file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
utils_email.py
import logging from email.utils import parseaddr logger = logging.getLogger('c7n_mailer.utils.email') def is_email(target):
if target.startswith('slack://'): logger.debug("Slack payload, not an email.") return False if parseaddr(target)[1] and '@' in target and '.' in target: return True else: return False
session.go
// Package session implements a redis backed user sessions HTTP middleware. package session import ( "context" "encoding/json" "fmt" "net/http" "net/textproto" "strings" "time" "github.com/pkg/errors" "github.com/sourcegraph/sourcegraph/cmd/frontend/db" "github.com/sourcegraph/sourcegraph/internal/actor" "github.com/sourcegraph/sourcegraph/internal/conf" "github.com/sourcegraph/sourcegraph/internal/env" "github.com/sourcegraph/sourcegraph/internal/errcode" "github.com/sourcegraph/sourcegraph/internal/redispool" "github.com/inconshreveable/log15" "github.com/boj/redistore" "github.com/gorilla/sessions" ) var sessionStore sessions.Store var sessionCookieKey = env.Get("SRC_SESSION_COOKIE_KEY", "", "secret key used for securing the session cookies") // defaultExpiryPeriod is the default session expiry period (if none is specified explicitly): 90 days. const defaultExpiryPeriod = 90 * 24 * time.Hour // cookieName is the name of the HTTP cookie that stores the session ID. const cookieName = "sgs" func init() { conf.ContributeValidator(func(c conf.Unified) (problems conf.Problems) { if c.AuthSessionExpiry == "" { return nil } d, err := time.ParseDuration(c.AuthSessionExpiry) if err != nil { return conf.NewSiteProblems("auth.sessionExpiry does not conform to the Go time.Duration format (https://golang.org/pkg/time/#ParseDuration). The default of 90 days will be used.") } if d == 0 { return conf.NewSiteProblems("auth.sessionExpiry should be greater than zero. The default of 90 days will be used.") } return nil }) } // sessionInfo is the information we store in the session. The gorilla/sessions library doesn't appear to // enforce the maxAge field in its session store implementations, so we include the expiry here. type sessionInfo struct { Actor *actor.Actor `json:"actor"` LastActive time.Time `json:"lastActive"` ExpiryPeriod time.Duration `json:"expiryPeriod"` } // SetSessionStore sets the backing store used for storing sessions on the server. It should be called exactly once. func SetSessionStore(s sessions.Store) { sessionStore = s } // sessionsStore wraps another sessions.Store to dynamically set the values // of the session.Options.Secure and session.Options.SameSite fields to what // is returned by the secure closure at invocation time. type sessionsStore struct { sessions.Store secure func() bool } // Get returns a cached session, setting the secure cookie option dynamically. func (st *sessionsStore) Get(r *http.Request, name string) (s *sessions.Session, err error) { defer st.setSecureOptions(s) return st.Store.Get(r, name) } // New creates and returns a new session with the secure cookie setting option set // dynamically. func (st *sessionsStore) New(r *http.Request, name string) (s *sessions.Session, err error) { defer st.setSecureOptions(s) return st.Store.New(r, name) } func (st *sessionsStore) setSecureOptions(s *sessions.Session) { if s != nil { if s.Options == nil { s.Options = new(sessions.Options) } setSessionSecureOptions(s.Options, st.secure()) } } // NewRedisStore creates a new session store backed by Redis. func NewRedisStore(secureCookie func() bool) sessions.Store { rstore, err := redistore.NewRediStoreWithPool(redispool.Store, []byte(sessionCookieKey)) if err != nil { waitForRedis(rstore) } rstore.Options.Path = "/" rstore.Options.HttpOnly = true setSessionSecureOptions(rstore.Options, secureCookie()) return &sessionsStore{ Store: rstore,
} } // setSessionSecureOptions set the values of the session.Options.Secure // and session.Options.SameSite fields depending on the value of the // secure field. func setSessionSecureOptions(opts *sessions.Options, secure bool) { // if Sourcegraph is running via: // * HTTP: set "SameSite=Lax" in session cookie - users can sign in, but won't be able to use the // browser extension. Note that users will be able to use the browser extension once they // configure their instance to use HTTPS. // * HTTPS: set "SameSite=None" in session cookie - users can sign in, and will be able to use the // browser extension. // // See https://github.com/sourcegraph/sourcegraph/issues/6167 for more information. opts.SameSite = http.SameSiteLaxMode if secure { opts.SameSite = http.SameSiteNoneMode } opts.Secure = secure } // Ping attempts to contact Redis and returns a non-nil error upon failure. It is intended to be // used by health checks. func Ping() error { if sessionStore == nil { return errors.New("redis session store is not available") } rstore, ok := sessionStore.(*redistore.RediStore) if !ok { // Only try to ping Redis session stores. If we add other types of session stores, add ways // to ping them here. return nil } return ping(rstore) } func ping(s *redistore.RediStore) error { conn := s.Pool.Get() defer conn.Close() data, err := conn.Do("PING") if err != nil { return err } if data != "PONG" { return errors.New("no pong received") } return nil } // waitForRedis waits up to a certain timeout for Redis to become reachable, to reduce the // likelihood of the HTTP handlers starting to serve requests while Redis (and therefore session // data) is still unavailable. After the timeout has elapsed, if Redis is still unreachable, it // continues anyway (because that's probably better than the site not coming up at all). func waitForRedis(s *redistore.RediStore) { const timeout = 5 * time.Second deadline := time.Now().Add(timeout) var err error for { time.Sleep(150 * time.Millisecond) err = ping(s) if err == nil { return } if time.Now().After(deadline) { log15.Warn("Redis (used for session store) failed to become reachable. Will continue trying to establish connection in background.", "timeout", timeout, "error", err) return } } } // SetData sets the session data at the key. The session data is a map of keys to values. If no // session exists, a new session is created. // // The value is JSON-encoded before being stored. func SetData(w http.ResponseWriter, r *http.Request, key string, value interface{}) error { session, err := sessionStore.Get(r, cookieName) if err != nil { return errors.WithMessage(err, "getting session") } data, err := json.Marshal(value) if err != nil { return errors.WithMessage(err, fmt.Sprintf("encoding JSON session data for %q", key)) } session.Values[key] = data if err := session.Save(r, w); err != nil { return errors.WithMessage(err, "saving session") } return nil } // GetData reads the session data at the key into the data structure addressed by value (which must // be a pointer). // // The value is JSON-decoded from the raw bytes stored by the call to SetData. func GetData(r *http.Request, key string, value interface{}) error { session, err := sessionStore.Get(r, cookieName) if err != nil { return errors.WithMessage(err, "getting session") } if data, ok := session.Values[key]; ok { if err := json.Unmarshal(data.([]byte), value); err != nil { return errors.WithMessage(err, fmt.Sprintf("decoding JSON session data for %q", key)) } } return nil } // SetActor sets the actor in the session, or removes it if actor == nil. If no session exists, a // new session is created. // // If expiryPeriod is 0, the default expiry period is used. func SetActor(w http.ResponseWriter, r *http.Request, actor *actor.Actor, expiryPeriod time.Duration) error { var value *sessionInfo if actor != nil { if expiryPeriod == 0 { if cfgExpiry, err := time.ParseDuration(conf.Get().AuthSessionExpiry); err == nil { expiryPeriod = cfgExpiry } else { // if there is no valid session duration, fall back to the default one expiryPeriod = defaultExpiryPeriod } } value = &sessionInfo{Actor: actor, ExpiryPeriod: expiryPeriod, LastActive: time.Now()} } return SetData(w, r, "actor", value) } func hasSessionCookie(r *http.Request) bool { c, _ := r.Cookie(cookieName) return c != nil } // deleteSession deletes the current session. If an error occurs, it returns the error but does not // write an HTTP error response. // // It should only be used when there is an unrecoverable, permanent error in the session data. To // sign out the current user, use SetActor(r, nil). func deleteSession(w http.ResponseWriter, r *http.Request) error { if !hasSessionCookie(r) { return nil // nothing to do } session, err := sessionStore.Get(r, cookieName) session.Options.MaxAge = -1 // expire immediately if err == nil { err = session.Save(r, w) } if err != nil && hasSessionCookie(r) { // Failsafe: delete the client's cookie even if the session store is unavailable. http.SetCookie(w, sessions.NewCookie(session.Name(), "", session.Options)) } return errors.WithMessage(err, "deleting session") } // CookieMiddleware is an http.Handler middleware that authenticates // future HTTP request via cookie. func CookieMiddleware(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Add("Vary", "Cookie") next.ServeHTTP(w, r.WithContext(authenticateByCookie(r, w))) }) } // CookieMiddlewareWithCSRFSafety is a middleware that authenticates HTTP requests using the // provided cookie (if any), *only if* the request is a non-simple CORS request (see // https://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0). This relies on the client's // CORS checks to guarantee that one of the following is true, thereby protecting against CSRF // attacks: // // - The request originates from the same origin. -OR- // // - The request is cross-origin but passed the CORS preflight check (because otherwise the // preflight OPTIONS response from secureHeadersMiddleware would have caused the browser to refuse // to send this HTTP request). // // To determine if it's a non-simple CORS request, it checks for the presence of either // "Content-Type: application/json; charset=utf-8" or a non-empty HTTP request header whose name is // given in corsAllowHeader. // // If the request is a simple CORS request, or if neither of these is true, then the cookie is not // used to authenticate the request. The request is still allowed to proceed (but will be // unauthenticated unless some other authentication is provided, such as an access token). func CookieMiddlewareWithCSRFSafety(next http.Handler, corsAllowHeader string, isTrustedOrigin func(*http.Request) bool) http.Handler { corsAllowHeader = textproto.CanonicalMIMEHeaderKey(corsAllowHeader) return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Add("Vary", "Cookie, Authorization, "+corsAllowHeader) _, isTrusted := r.Header[corsAllowHeader] if !isTrusted { isTrusted = isTrustedOrigin(r) } if !isTrusted { contentType := r.Header.Get("Content-Type") isTrusted = contentType == "application/json" || contentType == "application/json; charset=utf-8" } if isTrusted { r = r.WithContext(authenticateByCookie(r, w)) } next.ServeHTTP(w, r) }) } func authenticateByCookie(r *http.Request, w http.ResponseWriter) context.Context { // If the request is already authenticated from a cookie (and not a token), then do not clobber the request's existing // authenticated actor with the actor (if any) derived from the session cookie. if a := actor.FromContext(r.Context()); a.IsAuthenticated() && a.FromSessionCookie { if hasSessionCookie(r) { // Delete the session cookie to avoid confusion. (This occurs most often when switching // the auth provider to http-header; in that case, we want to rely on the http-header // auth provider for auth, not the user's old session. _ = deleteSession(w, r) } return r.Context() // unchanged } var info *sessionInfo if err := GetData(r, "actor", &info); err != nil { if !strings.Contains(err.Error(), "illegal base64 data at input byte 36") { // Skip log if the error message indicates the cookie value was a JWT (which almost // certainly means that the cookie was a pre-2.8 SAML cookie, so this error will only // occur once and the user will be automatically redirected to the SAML auth flow). log15.Warn("Error reading session actor. The session cookie was invalid and will be cleared. This error can be safely ignored unless it persists.", "err", err) } _ = deleteSession(w, r) // clear the bad value return r.Context() } if info != nil { // Check expiry if info.LastActive.Add(info.ExpiryPeriod).Before(time.Now()) { _ = deleteSession(w, r) // clear the bad value return actor.WithActor(r.Context(), &actor.Actor{}) } // Check that user still exists. if _, err := db.Users.GetByID(r.Context(), info.Actor.UID); err != nil { if errcode.IsNotFound(err) { _ = deleteSession(w, r) // clear the bad value } else { // Don't delete session, since the error might be an ephemeral DB error, and we don't // want that to cause all active users to be signed out. log15.Error("Error looking up user for session.", "uid", info.Actor.UID, "error", err) } return r.Context() // not authenticated } // Renew session if time.Since(info.LastActive) > 5*time.Minute { info.LastActive = time.Now() if err := SetData(w, r, "actor", info); err != nil { log15.Error("error renewing session", "error", err) return r.Context() } } info.Actor.FromSessionCookie = true return actor.WithActor(r.Context(), info.Actor) } return r.Context() }
secure: secureCookie,
training.py
from itertools import chain from pathlib import Path from typing import Tuple import torch from accelerate import Accelerator from torch.utils.data import DataLoader from saticl.config import Configuration, SSLConfiguration from saticl.datasets.icl import ICLDataset from saticl.datasets.transforms import invariance_transforms, inverse_transform, ssl_transforms from saticl.datasets.wrappers import SSLDataset from saticl.logging.tensorboard import TensorBoardLogger from saticl.losses.regularization import AugmentationInvariance from saticl.models.icl import ICLSegmenter from saticl.prepare import prepare_dataset, prepare_metrics, prepare_metrics_ssl, prepare_model, prepare_model_ssl from saticl.tasks import Task from saticl.trainer.base import Trainer from saticl.trainer.callbacks import Checkpoint, DisplaySamples, EarlyStopping, EarlyStoppingCriterion from saticl.trainer.invariance import AugInvarianceTrainer from saticl.trainer.ssl import SSLStage, SSLTrainer from saticl.utils.common import flatten_config, get_logger, git_revision_hash, store_config from saticl.utils.ml import checkpoint_path, init_experiment, seed_everything, seed_worker LOG = get_logger(__name__) def init_from_previous_step(config: Configuration, new_model: ICLSegmenter, old_model: ICLSegmenter, model_folder: Path, task: Task) -> Tuple[ICLSegmenter, ICLSegmenter]: if task.step == 0: LOG.info("Step 0: training from scratch without old model") return new_model, old_model LOG.info("Loading checkpoint from step: %d", task.step - 1) if config.task.step_checkpoint is not None: ckpt_path = Path(config.task.step_checkpoint) else: ckpt_path = checkpoint_path(model_folder, task_name=task.name, step=task.step - 1) assert ckpt_path.exists() and ckpt_path.is_file(), f"Checkpoint for step {task.step-1} not found at {str(ckpt_path)}" checkpoint = torch.load(str(ckpt_path), map_location="cpu") # load checkpoint into the new model, without strict matching because of ICL heads new_model.load_state_dict(checkpoint, strict=False) if config.model.init_balanced: new_model.init_classifier() # load the same checkpoint into the old model, this time strict since it's the very same old_model.load_state_dict(checkpoint, strict=True) old_model.freeze() old_model.eval() del checkpoint return new_model, old_model def train(config: Configuration): # assertions before starting
def train_ssl(config: SSLConfiguration): # assertions before starting assert config.name is not None or config.task.step == 0, "Specify the experiment name with ICL steps >= 1!" assert torch.backends.cudnn.enabled, "AMP requires CUDNN backend to be enabled." if config.in_channels != 4: LOG.warn("Forcing input channels to 4 (previous value: %d)", config.in_channels) config.in_channels = 4 # prepare accelerator ASAP accelerator = Accelerator(fp16=config.trainer.amp, cpu=config.trainer.cpu) # Create the directory tree: # outputs # |-- dataset # |--task_name # |-- exp_name # |-- models # |-- logs accelerator.wait_for_everyone() log_name = f"output-{config.task.step}.log" exp_id, out_folder, model_folder, logs_folder = init_experiment(config=config, log_name=log_name) config_path = out_folder / f"segmenter-config-s{config.task.step}.yaml" store_config(config, path=config_path) LOG.info("Run started") LOG.info("Experiment ID: %s", exp_id) LOG.info("Output folder: %s", out_folder) LOG.info("Models folder: %s", model_folder) LOG.info("Logs folder: %s", logs_folder) LOG.info("Configuration: %s", config_path) # seeding everything LOG.info("Using seed: %d", config.seed) seed_everything(config.seed) # prepare datasets LOG.info("Loading datasets...") train_set, valid_set = prepare_dataset(config=config) train_set = SSLDataset(train_set, transform=ssl_transforms()) LOG.info("Full sets - train set: %d samples, validation set: %d samples", len(train_set), len(valid_set)) add_background = not train_set.has_background() task = Task(dataset=config.dataset, name=config.task.name, step=config.task.step, add_background=add_background) train_mask, valid_mask = 0, 255 train_set = ICLDataset(dataset=train_set, task=task, mask_value=train_mask, filter_mode=config.task.filter_mode) valid_set = ICLDataset(dataset=valid_set, task=task, mask_value=valid_mask, filter_mode=config.task.filter_mode) train_loader = DataLoader(dataset=train_set, batch_size=config.trainer.batch_size, shuffle=True, num_workers=config.trainer.num_workers, worker_init_fn=seed_worker, drop_last=True) valid_loader = DataLoader(dataset=valid_set, batch_size=config.trainer.batch_size, shuffle=False, num_workers=config.trainer.num_workers, worker_init_fn=seed_worker) LOG.info("ICL sets - Train set: %d samples, validation set: %d samples", len(train_set), len(valid_set)) # prepare models LOG.info("Preparing model...") new_model, ssl_model = prepare_model_ssl(config=config, task=task) new_model = new_model.to(accelerator.device) ssl_model = ssl_model.to(accelerator.device) if task.step > 0: old_task = Task(dataset=config.dataset, name=config.task.name, step=task.step - 1, add_background=add_background) old_model = prepare_model(config=config, task=old_task) old_model = old_model.to(accelerator.device) else: old_model = None new_model, old_model = init_from_previous_step(config, new_model, old_model, model_folder, task) LOG.info("Done preparing models") # prepare optimizer and scheduler parameters = chain(new_model.parameters(), ssl_model.head.parameters()) optimizer = config.optimizer.instantiate(parameters) scheduler = config.scheduler.instantiate(optimizer) # prepare losses, including SSL segment_loss = config.ce.instantiate(ignore_index=255, old_class_count=task.old_class_count()) distill_loss = config.kd.instantiate() pretext_loss = config.ssl_loss() # asserts to verify their validity if task.step > 0 and config.ce.unbiased: seg_loss_name = str(type(segment_loss)) kdd_loss_name = str(type(distill_loss)) assert "Unbiased" in seg_loss_name, f"Wrong loss '{seg_loss_name}' for step {task.step}" assert "Unbiased" in kdd_loss_name, f"Wrong loss '{kdd_loss_name}' for step {task.step}" # prepare metrics and logger monitored = config.trainer.monitor.name train_metrics, valid_metrics = prepare_metrics(task=task, device=accelerator.device) ssl_metrics = prepare_metrics_ssl(num_classes=config.model.pretext_classes, device=accelerator.device) logger = TensorBoardLogger(log_folder=logs_folder, filename_suffix=f"step-{task.step}", icl_step=task.step, comment=config.comment) # logging configuration to tensorboard LOG.debug("Logging flattened config. to TensorBoard") logger.log_table("config", flatten_config(config.dict())) # prepare trainer LOG.info("Visualize: %s, num. batches for visualization: %s", str(config.visualize), str(config.num_samples)) num_samples = int(config.visualize) * config.num_samples trainer = SSLTrainer(accelerator=accelerator, task=task, new_model=new_model, old_model=old_model, ssl_model=ssl_model, optimizer=optimizer, scheduler=scheduler, train_metrics=train_metrics, val_metrics=valid_metrics, old_classes=train_set.old_categories(), new_classes=train_set.new_categories(), seg_criterion=segment_loss, ssl_criterion=pretext_loss, kdd_criterion=distill_loss, kde_criterion=None, kdd_lambda=config.kd.decoder_factor, kde_lambda=config.kd.encoder_factor, logger=logger, samples=num_samples, debug=config.debug) trainer.add_metrics(SSLStage.ssl, metrics=ssl_metrics) trainer.add_callback(EarlyStopping(call_every=1, metric=monitored, criterion=EarlyStoppingCriterion.maximum, patience=config.trainer.patience)) \ .add_callback(Checkpoint(call_every=1, model_folder=model_folder, name_format=f"task{task.name}_step-{task.step}", save_best=True)) \ .add_callback(DisplaySamples(inverse_transform=inverse_transform(), color_palette=train_set.palette())) trainer.fit(train_dataloader=train_loader, val_dataloader=valid_loader, max_epochs=config.trainer.max_epochs) LOG.info(f"Training completed at epoch {trainer.current_epoch:<2d} " f"(best {monitored}: {trainer.best_score:.4f})") LOG.info("Experiment %s (step %d) completed!", exp_id, task.step)
assert config.name is not None or config.task.step == 0, "Specify the experiment name with ICL steps >= 1!" assert torch.backends.cudnn.enabled, "AMP requires CUDNN backend to be enabled." # prepare accelerator ASAP accelerator = Accelerator(fp16=config.trainer.amp, cpu=config.trainer.cpu) # Create the directory tree: # outputs # |-- dataset # |--task_name # |-- exp_name # |-- models # |-- logs accelerator.wait_for_everyone() log_name = f"output-{config.task.step}.log" exp_id, out_folder, model_folder, logs_folder = init_experiment(config=config, log_name=log_name) config_path = out_folder / f"segmenter-config-s{config.task.step}.yaml" LOG.info("Run started") LOG.info("Experiment ID: %s", exp_id) LOG.info("Output folder: %s", out_folder) LOG.info("Models folder: %s", model_folder) LOG.info("Logs folder: %s", logs_folder) LOG.info("Configuration: %s", config_path) # seeding everything LOG.info("Using seed: %d", config.seed) seed_everything(config.seed) # prepare datasets LOG.info("Loading datasets...") train_set, valid_set = prepare_dataset(config=config, partial_transforms=False) LOG.info("Full sets - train set: %d samples, validation set: %d samples", len(train_set), len(valid_set)) add_background = not train_set.has_background() task = Task(dataset=config.dataset, name=config.task.name, step=config.task.step, add_background=add_background) train_mask, valid_mask = 0, 255 train_set = ICLDataset(dataset=train_set, task=task, mask_value=train_mask, filter_mode=config.task.filter_mode) valid_set = ICLDataset(dataset=valid_set, task=task, mask_value=valid_mask, filter_mode=config.task.filter_mode) # construct data loaders train_loader = DataLoader(dataset=train_set, batch_size=config.trainer.batch_size, shuffle=True, num_workers=config.trainer.num_workers, worker_init_fn=seed_worker, drop_last=True) valid_loader = DataLoader(dataset=valid_set, batch_size=config.trainer.batch_size, shuffle=False, num_workers=config.trainer.num_workers, worker_init_fn=seed_worker) LOG.info("ICL sets - Train set: %d samples, validation set: %d samples", len(train_set), len(valid_set)) # prepare models LOG.info("Preparing model...") new_model = prepare_model(config=config, task=task) new_model = new_model.to(accelerator.device) if task.step > 0: old_task = Task(dataset=config.dataset, name=config.task.name, step=task.step - 1, add_background=add_background) old_model = prepare_model(config=config, task=old_task) old_model = old_model.to(accelerator.device) else: old_model = None new_model, old_model = init_from_previous_step(config, new_model, old_model, model_folder, task) LOG.info("Done preparing models") # prepare optimizer and scheduler optimizer = config.optimizer.instantiate(new_model.parameters()) scheduler = config.scheduler.instantiate(optimizer) # prepare losses weights = None if config.class_weights: weights = train_set.load_class_weights(Path(config.class_weights), device=accelerator.device, normalize=config.ce.tversky) LOG.info("Using class weights: %s", str(weights)) segment_loss = config.ce.instantiate(ignore_index=255, old_class_count=task.old_class_count(), weight=weights) distill_loss = config.kd.instantiate() if task.step > 0 and config.ce.unbiased: seg_loss_name = str(type(segment_loss)) kdd_loss_name = str(type(distill_loss)) if "Unbiased" not in seg_loss_name: LOG.warn(f"Non-ubiased segmentation loss '{seg_loss_name}' for step {task.step}!") if "Unbiased" not in kdd_loss_name: LOG.warn(f"Non-unbiased KD loss '{kdd_loss_name}' for step {task.step}") # prepare metrics and logger monitored = config.trainer.monitor.name train_metrics, valid_metrics = prepare_metrics(task=task, device=accelerator.device) logger = TensorBoardLogger(log_folder=logs_folder, filename_suffix=f"step-{task.step}", icl_step=task.step, comment=config.comment) # logging configuration to tensorboard LOG.debug("Logging flattened config. to TensorBoard") logger.log_table("config", flatten_config(config.dict())) # prepare trainer LOG.info("Visualize: %s, num. batches for visualization: %s", str(config.visualize), str(config.num_samples)) num_samples = int(config.visualize) * config.num_samples # choose trainer class depending on task or regularization trainer_class = Trainer kwargs = dict() if config.aug.apply: inv_transforms = invariance_transforms(config.aug) LOG.info("Invariance transforms: ") LOG.info(str(inv_transforms)) kwargs.update(aug_criterion=AugmentationInvariance(transform=inv_transforms), aug_lambda=config.aug.factor, aug_lambda_icl=config.aug.factor_icl, temperature=config.trainer.temperature, temp_epochs=config.trainer.temp_epochs) trainer_class = AugInvarianceTrainer trainer = trainer_class(accelerator=accelerator, task=task, new_model=new_model, old_model=old_model, optimizer=optimizer, scheduler=scheduler, train_metrics=train_metrics, val_metrics=valid_metrics, old_classes=train_set.old_categories(), new_classes=train_set.new_categories(), seg_criterion=segment_loss, kdd_criterion=distill_loss, kde_criterion=None, kdd_lambda=config.kd.decoder_factor, kde_lambda=config.kd.encoder_factor, logger=logger, samples=num_samples, debug=config.debug, **kwargs) trainer.add_callback(EarlyStopping(call_every=1, metric=monitored, criterion=EarlyStoppingCriterion.maximum, patience=config.trainer.patience)) \ .add_callback(Checkpoint(call_every=1, model_folder=model_folder, name_format=f"task{task.name}_step-{task.step}", save_best=True)) \ .add_callback(DisplaySamples(inverse_transform=inverse_transform(), color_palette=train_set.palette())) # storing config and starting training config.version = git_revision_hash() store_config(config, path=config_path) trainer.fit(train_dataloader=train_loader, val_dataloader=valid_loader, max_epochs=config.trainer.max_epochs) LOG.info(f"Training completed at epoch {trainer.current_epoch:<2d} " f"(best {monitored}: {trainer.best_score:.4f})") LOG.info("Experiment %s (step %d) completed!", exp_id, task.step)
global-styles.ts
import { createGlobalStyle } from 'styled-components'; export const GlobalStyle = createGlobalStyle` html, body { height: 100%; width: 100%; } body { font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; } #root { min-height: 100%; min-width: 100%; } p, label { font-family: Georgia, Times, 'Times New Roman', serif; line-height: 1.5em; } input, select { font-family: inherit; font-size: inherit; } `;
export default GlobalStyle;
array_test.go
package pq import ( "bytes" "database/sql" "database/sql/driver" "math/rand" "reflect" "strings" "testing" ) func TestParseArray(t *testing.T) { for _, tt := range []struct { input string delim string dims []int elems [][]byte }{ {`{}`, `,`, nil, [][]byte{}}, {`{NULL}`, `,`, []int{1}, [][]byte{nil}}, {`{a}`, `,`, []int{1}, [][]byte{{'a'}}}, {`{a,b}`, `,`, []int{2}, [][]byte{{'a'}, {'b'}}}, {`{{a,b}}`, `,`, []int{1, 2}, [][]byte{{'a'}, {'b'}}}, {`{{a},{b}}`, `,`, []int{2, 1}, [][]byte{{'a'}, {'b'}}}, {`{{{a,b},{c,d},{e,f}}}`, `,`, []int{1, 3, 2}, [][]byte{ {'a'}, {'b'}, {'c'}, {'d'}, {'e'}, {'f'}, }}, {`{""}`, `,`, []int{1}, [][]byte{{}}}, {`{","}`, `,`, []int{1}, [][]byte{{','}}}, {`{",",","}`, `,`, []int{2}, [][]byte{{','}, {','}}}, {`{{",",","}}`, `,`, []int{1, 2}, [][]byte{{','}, {','}}}, {`{{","},{","}}`, `,`, []int{2, 1}, [][]byte{{','}, {','}}}, {`{{{",",","},{",",","},{",",","}}}`, `,`, []int{1, 3, 2}, [][]byte{ {','}, {','}, {','}, {','}, {','}, {','}, }}, {`{"\"}"}`, `,`, []int{1}, [][]byte{{'"', '}'}}}, {`{"\"","\""}`, `,`, []int{2}, [][]byte{{'"'}, {'"'}}}, {`{{"\"","\""}}`, `,`, []int{1, 2}, [][]byte{{'"'}, {'"'}}}, {`{{"\""},{"\""}}`, `,`, []int{2, 1}, [][]byte{{'"'}, {'"'}}}, {`{{{"\"","\""},{"\"","\""},{"\"","\""}}}`, `,`, []int{1, 3, 2}, [][]byte{ {'"'}, {'"'}, {'"'}, {'"'}, {'"'}, {'"'}, }}, {`{axyzb}`, `xyz`, []int{2}, [][]byte{{'a'}, {'b'}}}, } { dims, elems, err := parseArray([]byte(tt.input), []byte(tt.delim)) if err != nil { t.Fatalf("Expected no error for %q, got %q", tt.input, err) } if !reflect.DeepEqual(dims, tt.dims) { t.Errorf("Expected %v dimensions for %q, got %v", tt.dims, tt.input, dims) } if !reflect.DeepEqual(elems, tt.elems) { t.Errorf("Expected %v elements for %q, got %v", tt.elems, tt.input, elems) } } } func TestParseArrayError(t *testing.T) { for _, tt := range []struct { input, err string }{ {``, "expected '{' at offset 0"}, {`x`, "expected '{' at offset 0"}, {`}`, "expected '{' at offset 0"}, {`{`, "expected '}' at offset 1"}, {`{{}`, "expected '}' at offset 3"}, {`{}}`, "unexpected '}' at offset 2"}, {`{,}`, "unexpected ',' at offset 1"}, {`{,x}`, "unexpected ',' at offset 1"}, {`{x,}`, "unexpected '}' at offset 3"}, {`{x,{`, "unexpected '{' at offset 3"}, {`{x},`, "unexpected ',' at offset 3"}, {`{x}}`, "unexpected '}' at offset 3"}, {`{{x}`, "expected '}' at offset 4"}, {`{""x}`, "unexpected 'x' at offset 3"}, {`{{a},{b,c}}`, "multidimensional arrays must have elements with matching dimensions"}, } { _, _, err := parseArray([]byte(tt.input), []byte{','}) if err == nil { t.Fatalf("Expected error for %q, got none", tt.input) } if !strings.Contains(err.Error(), tt.err) { t.Errorf("Expected error to contain %q for %q, got %q", tt.err, tt.input, err) } } } func TestArrayScanner(t *testing.T) { var s sql.Scanner = Array(&[]bool{}) if _, ok := s.(*BoolArray); !ok { t.Errorf("Expected *BoolArray, got %T", s) } s = Array(&[]float64{}) if _, ok := s.(*Float64Array); !ok { t.Errorf("Expected *Float64Array, got %T", s) } s = Array(&[]int64{}) if _, ok := s.(*Int64Array); !ok { t.Errorf("Expected *Int64Array, got %T", s) } s = Array(&[]string{}) if _, ok := s.(*StringArray); !ok { t.Errorf("Expected *StringArray, got %T", s) } for _, tt := range []interface{}{ &[]sql.Scanner{}, &[][]bool{}, &[][]float64{}, &[][]int64{}, &[][]string{}, } { s = Array(tt) if _, ok := s.(GenericArray); !ok { t.Errorf("Expected GenericArray for %T, got %T", tt, s) } } } func TestArrayValuer(t *testing.T) { var v driver.Valuer = Array([]bool{}) if _, ok := v.(*BoolArray); !ok { t.Errorf("Expected *BoolArray, got %T", v) } v = Array([]float64{}) if _, ok := v.(*Float64Array); !ok { t.Errorf("Expected *Float64Array, got %T", v) } v = Array([]int64{}) if _, ok := v.(*Int64Array); !ok { t.Errorf("Expected *Int64Array, got %T", v) } v = Array([]string{}) if _, ok := v.(*StringArray); !ok { t.Errorf("Expected *StringArray, got %T", v) } for _, tt := range []interface{}{ nil, []driver.Value{}, [][]bool{}, [][]float64{}, [][]int64{}, [][]string{}, } { v = Array(tt) if _, ok := v.(GenericArray); !ok { t.Errorf("Expected GenericArray for %T, got %T", tt, v) } } } func TestBoolArrayScanUnsupported(t *testing.T) { var arr BoolArray err := arr.Scan(1) if err == nil { t.Fatal("Expected error when scanning from int") } if !strings.Contains(err.Error(), "int to BoolArray") { t.Errorf("Expected type to be mentioned when scanning, got %q", err) } } func TestBoolArrayScanEmpty(t *testing.T) { var arr BoolArray err := arr.Scan(`{}`) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr == nil || len(arr) != 0 { t.Errorf("Expected empty, got %#v", arr) } } func TestBoolArrayScanNil(t *testing.T) { arr := BoolArray{true, true, true} err := arr.Scan(nil) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr != nil { t.Errorf("Expected nil, got %+v", arr) } } var BoolArrayStringTests = []struct { str string arr BoolArray }{ {`{}`, BoolArray{}}, {`{t}`, BoolArray{true}}, {`{f,t}`, BoolArray{false, true}}, } func TestBoolArrayScanBytes(t *testing.T) { for _, tt := range BoolArrayStringTests { bytes := []byte(tt.str) arr := BoolArray{true, true, true} err := arr.Scan(bytes) if err != nil { t.Fatalf("Expected no error for %q, got %v", bytes, err) } if !reflect.DeepEqual(arr, tt.arr) { t.Errorf("Expected %+v for %q, got %+v", tt.arr, bytes, arr) } } } func BenchmarkBoolArrayScanBytes(b *testing.B) { var a BoolArray var x interface{} = []byte(`{t,f,t,f,t,f,t,f,t,f}`) for i := 0; i < b.N; i++ { a = BoolArray{} a.Scan(x) } } func TestBoolArrayScanString(t *testing.T) { for _, tt := range BoolArrayStringTests { arr := BoolArray{true, true, true} err := arr.Scan(tt.str) if err != nil { t.Fatalf("Expected no error for %q, got %v", tt.str, err) } if !reflect.DeepEqual(arr, tt.arr) { t.Errorf("Expected %+v for %q, got %+v", tt.arr, tt.str, arr) } } } func TestBoolArrayScanError(t *testing.T) { for _, tt := range []struct { input, err string }{ {``, "unable to parse array"}, {`{`, "unable to parse array"}, {`{{t},{f}}`, "cannot convert ARRAY[2][1] to BoolArray"}, {`{NULL}`, `could not parse boolean array index 0: invalid boolean ""`}, {`{a}`, `could not parse boolean array index 0: invalid boolean "a"`}, {`{t,b}`, `could not parse boolean array index 1: invalid boolean "b"`}, {`{t,f,cd}`, `could not parse boolean array index 2: invalid boolean "cd"`}, } { arr := BoolArray{true, true, true} err := arr.Scan(tt.input) if err == nil { t.Fatalf("Expected error for %q, got none", tt.input) } if !strings.Contains(err.Error(), tt.err) { t.Errorf("Expected error to contain %q for %q, got %q", tt.err, tt.input, err) } if !reflect.DeepEqual(arr, BoolArray{true, true, true}) { t.Errorf("Expected destination not to change for %q, got %+v", tt.input, arr) } } } func TestBoolArrayValue(t *testing.T) { result, err := BoolArray(nil).Value() if err != nil { t.Fatalf("Expected no error for nil, got %v", err) } if result != nil { t.Errorf("Expected nil, got %q", result) } result, err = BoolArray([]bool{}).Value() if err != nil { t.Fatalf("Expected no error for empty, got %v", err) } if expected := `{}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected empty, got %q", result) } result, err = BoolArray([]bool{false, true, false}).Value() if err != nil { t.Fatalf("Expected no error, got %v", err) } if expected := `{f,t,f}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected %q, got %q", expected, result) } } func BenchmarkBoolArrayValue(b *testing.B) { rand.Seed(1) x := make([]bool, 10) for i := 0; i < len(x); i++ { x[i] = rand.Intn(2) == 0 } a := BoolArray(x) for i := 0; i < b.N; i++ { a.Value() } } func TestByteaArrayScanUnsupported(t *testing.T) { var arr ByteaArray err := arr.Scan(1) if err == nil { t.Fatal("Expected error when scanning from int") } if !strings.Contains(err.Error(), "int to ByteaArray") { t.Errorf("Expected type to be mentioned when scanning, got %q", err) } } func TestByteaArrayScanEmpty(t *testing.T) { var arr ByteaArray err := arr.Scan(`{}`) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr == nil || len(arr) != 0 { t.Errorf("Expected empty, got %#v", arr) } } func TestByteaArrayScanNil(t *testing.T) { arr := ByteaArray{{2}, {6}, {0, 0}} err := arr.Scan(nil) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr != nil { t.Errorf("Expected nil, got %+v", arr) } } var ByteaArrayStringTests = []struct { str string arr ByteaArray }{ {`{}`, ByteaArray{}}, {`{NULL}`, ByteaArray{nil}}, {`{"\\xfeff"}`, ByteaArray{{'\xFE', '\xFF'}}}, {`{"\\xdead","\\xbeef"}`, ByteaArray{{'\xDE', '\xAD'}, {'\xBE', '\xEF'}}}, } func TestByteaArrayScanBytes(t *testing.T) { for _, tt := range ByteaArrayStringTests { bytes := []byte(tt.str) arr := ByteaArray{{2}, {6}, {0, 0}} err := arr.Scan(bytes) if err != nil { t.Fatalf("Expected no error for %q, got %v", bytes, err) } if !reflect.DeepEqual(arr, tt.arr) { t.Errorf("Expected %+v for %q, got %+v", tt.arr, bytes, arr) } } } func BenchmarkByteaArrayScanBytes(b *testing.B) { var a ByteaArray var x interface{} = []byte(`{"\\xfe","\\xff","\\xdead","\\xbeef","\\xfe","\\xff","\\xdead","\\xbeef","\\xfe","\\xff"}`) for i := 0; i < b.N; i++ { a = ByteaArray{} a.Scan(x) } } func TestByteaArrayScanString(t *testing.T) { for _, tt := range ByteaArrayStringTests { arr := ByteaArray{{2}, {6}, {0, 0}} err := arr.Scan(tt.str) if err != nil { t.Fatalf("Expected no error for %q, got %v", tt.str, err) } if !reflect.DeepEqual(arr, tt.arr) { t.Errorf("Expected %+v for %q, got %+v", tt.arr, tt.str, arr) } } } func TestByteaArrayScanError(t *testing.T) { for _, tt := range []struct { input, err string }{ {``, "unable to parse array"}, {`{`, "unable to parse array"}, {`{{"\\xfeff"},{"\\xbeef"}}`, "cannot convert ARRAY[2][1] to ByteaArray"}, {`{"\\abc"}`, "could not parse bytea array index 0: could not parse bytea value"}, } { arr := ByteaArray{{2}, {6}, {0, 0}} err := arr.Scan(tt.input) if err == nil { t.Fatalf("Expected error for %q, got none", tt.input) } if !strings.Contains(err.Error(), tt.err) { t.Errorf("Expected error to contain %q for %q, got %q", tt.err, tt.input, err) } if !reflect.DeepEqual(arr, ByteaArray{{2}, {6}, {0, 0}}) { t.Errorf("Expected destination not to change for %q, got %+v", tt.input, arr) } } } func TestByteaArrayValue(t *testing.T) { result, err := ByteaArray(nil).Value() if err != nil { t.Fatalf("Expected no error for nil, got %v", err) } if result != nil { t.Errorf("Expected nil, got %q", result) } result, err = ByteaArray([][]byte{}).Value() if err != nil { t.Fatalf("Expected no error for empty, got %v", err) } if expected := `{}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected empty, got %q", result) } result, err = ByteaArray([][]byte{{'\xDE', '\xAD', '\xBE', '\xEF'}, {'\xFE', '\xFF'}, {}}).Value() if err != nil { t.Fatalf("Expected no error, got %v", err) } if expected := `{"\\xdeadbeef","\\xfeff","\\x"}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected %q, got %q", expected, result) } } func BenchmarkByteaArrayValue(b *testing.B) { rand.Seed(1) x := make([][]byte, 10) for i := 0; i < len(x); i++ { x[i] = make([]byte, len(x)) for j := 0; j < len(x); j++ { x[i][j] = byte(rand.Int()) } } a := ByteaArray(x) for i := 0; i < b.N; i++ { a.Value() } } func TestFloat64ArrayScanUnsupported(t *testing.T) { var arr Float64Array err := arr.Scan(true) if err == nil { t.Fatal("Expected error when scanning from bool") } if !strings.Contains(err.Error(), "bool to Float64Array") { t.Errorf("Expected type to be mentioned when scanning, got %q", err) } } func TestFloat64ArrayScanEmpty(t *testing.T) { var arr Float64Array err := arr.Scan(`{}`) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr == nil || len(arr) != 0 { t.Errorf("Expected empty, got %#v", arr) } } func TestFloat64ArrayScanNil(t *testing.T) { arr := Float64Array{5, 5, 5} err := arr.Scan(nil) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr != nil { t.Errorf("Expected nil, got %+v", arr) } } var Float64ArrayStringTests = []struct { str string arr Float64Array }{ {`{}`, Float64Array{}}, {`{1.2}`, Float64Array{1.2}}, {`{3.456,7.89}`, Float64Array{3.456, 7.89}}, {`{3,1,2}`, Float64Array{3, 1, 2}}, } func TestFloat64ArrayScanBytes(t *testing.T) { for _, tt := range Float64ArrayStringTests { bytes := []byte(tt.str) arr := Float64Array{5, 5, 5} err := arr.Scan(bytes) if err != nil { t.Fatalf("Expected no error for %q, got %v", bytes, err) } if !reflect.DeepEqual(arr, tt.arr) { t.Errorf("Expected %+v for %q, got %+v", tt.arr, bytes, arr) } } } func BenchmarkFloat64ArrayScanBytes(b *testing.B) { var a Float64Array var x interface{} = []byte(`{1.2,3.4,5.6,7.8,9.01,2.34,5.67,8.90,1.234,5.678}`) for i := 0; i < b.N; i++ { a = Float64Array{} a.Scan(x) } } func TestFloat64ArrayScanString(t *testing.T) { for _, tt := range Float64ArrayStringTests { arr := Float64Array{5, 5, 5} err := arr.Scan(tt.str) if err != nil { t.Fatalf("Expected no error for %q, got %v", tt.str, err) } if !reflect.DeepEqual(arr, tt.arr) { t.Errorf("Expected %+v for %q, got %+v", tt.arr, tt.str, arr) } } } func TestFloat64ArrayScanError(t *testing.T) { for _, tt := range []struct { input, err string }{ {``, "unable to parse array"}, {`{`, "unable to parse array"}, {`{{5.6},{7.8}}`, "cannot convert ARRAY[2][1] to Float64Array"}, {`{NULL}`, "parsing array element index 0:"}, {`{a}`, "parsing array element index 0:"}, {`{5.6,a}`, "parsing array element index 1:"}, {`{5.6,7.8,a}`, "parsing array element index 2:"}, } { arr := Float64Array{5, 5, 5} err := arr.Scan(tt.input) if err == nil { t.Fatalf("Expected error for %q, got none", tt.input) } if !strings.Contains(err.Error(), tt.err) { t.Errorf("Expected error to contain %q for %q, got %q", tt.err, tt.input, err) } if !reflect.DeepEqual(arr, Float64Array{5, 5, 5}) { t.Errorf("Expected destination not to change for %q, got %+v", tt.input, arr) } } } func TestFloat64ArrayValue(t *testing.T) { result, err := Float64Array(nil).Value() if err != nil { t.Fatalf("Expected no error for nil, got %v", err) } if result != nil { t.Errorf("Expected nil, got %q", result) } result, err = Float64Array([]float64{}).Value() if err != nil { t.Fatalf("Expected no error for empty, got %v", err) } if expected := `{}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected empty, got %q", result) } result, err = Float64Array([]float64{1.2, 3.4, 5.6}).Value() if err != nil { t.Fatalf("Expected no error, got %v", err) } if expected := `{1.2,3.4,5.6}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected %q, got %q", expected, result) } } func BenchmarkFloat64ArrayValue(b *testing.B) { rand.Seed(1) x := make([]float64, 10) for i := 0; i < len(x); i++ { x[i] = rand.NormFloat64() } a := Float64Array(x) for i := 0; i < b.N; i++ { a.Value() } } func TestInt64ArrayScanUnsupported(t *testing.T) { var arr Int64Array err := arr.Scan(true) if err == nil { t.Fatal("Expected error when scanning from bool") } if !strings.Contains(err.Error(), "bool to Int64Array") { t.Errorf("Expected type to be mentioned when scanning, got %q", err) } } func TestInt64ArrayScanEmpty(t *testing.T) { var arr Int64Array err := arr.Scan(`{}`) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr == nil || len(arr) != 0 { t.Errorf("Expected empty, got %#v", arr) } } func TestInt64ArrayScanNil(t *testing.T) { arr := Int64Array{5, 5, 5} err := arr.Scan(nil) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr != nil { t.Errorf("Expected nil, got %+v", arr) } } var Int64ArrayStringTests = []struct { str string arr Int64Array }{ {`{}`, Int64Array{}}, {`{12}`, Int64Array{12}}, {`{345,678}`, Int64Array{345, 678}}, } func TestInt64ArrayScanBytes(t *testing.T) { for _, tt := range Int64ArrayStringTests { bytes := []byte(tt.str) arr := Int64Array{5, 5, 5} err := arr.Scan(bytes) if err != nil { t.Fatalf("Expected no error for %q, got %v", bytes, err) } if !reflect.DeepEqual(arr, tt.arr) { t.Errorf("Expected %+v for %q, got %+v", tt.arr, bytes, arr) } } } func BenchmarkInt64ArrayScanBytes(b *testing.B) { var a Int64Array var x interface{} = []byte(`{1,2,3,4,5,6,7,8,9,0}`) for i := 0; i < b.N; i++ { a = Int64Array{} a.Scan(x) } } func TestInt64ArrayScanString(t *testing.T) { for _, tt := range Int64ArrayStringTests { arr := Int64Array{5, 5, 5} err := arr.Scan(tt.str) if err != nil { t.Fatalf("Expected no error for %q, got %v", tt.str, err) } if !reflect.DeepEqual(arr, tt.arr) { t.Errorf("Expected %+v for %q, got %+v", tt.arr, tt.str, arr) } } } func TestInt64ArrayScanError(t *testing.T) { for _, tt := range []struct { input, err string }{ {``, "unable to parse array"}, {`{`, "unable to parse array"}, {`{{5},{6}}`, "cannot convert ARRAY[2][1] to Int64Array"}, {`{NULL}`, "parsing array element index 0:"}, {`{a}`, "parsing array element index 0:"}, {`{5,a}`, "parsing array element index 1:"}, {`{5,6,a}`, "parsing array element index 2:"}, } { arr := Int64Array{5, 5, 5} err := arr.Scan(tt.input) if err == nil { t.Fatalf("Expected error for %q, got none", tt.input) } if !strings.Contains(err.Error(), tt.err) { t.Errorf("Expected error to contain %q for %q, got %q", tt.err, tt.input, err) } if !reflect.DeepEqual(arr, Int64Array{5, 5, 5}) { t.Errorf("Expected destination not to change for %q, got %+v", tt.input, arr) } } } func TestInt64ArrayValue(t *testing.T) { result, err := Int64Array(nil).Value() if err != nil { t.Fatalf("Expected no error for nil, got %v", err) } if result != nil { t.Errorf("Expected nil, got %q", result) } result, err = Int64Array([]int64{}).Value() if err != nil { t.Fatalf("Expected no error for empty, got %v", err) } if expected := `{}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected empty, got %q", result) } result, err = Int64Array([]int64{1, 2, 3}).Value() if err != nil { t.Fatalf("Expected no error, got %v", err) } if expected := `{1,2,3}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected %q, got %q", expected, result) } } func BenchmarkInt64ArrayValue(b *testing.B) { rand.Seed(1) x := make([]int64, 10) for i := 0; i < len(x); i++ { x[i] = rand.Int63() } a := Int64Array(x) for i := 0; i < b.N; i++ { a.Value() } } func TestStringArrayScanUnsupported(t *testing.T) { var arr StringArray err := arr.Scan(true) if err == nil { t.Fatal("Expected error when scanning from bool") } if !strings.Contains(err.Error(), "bool to StringArray") { t.Errorf("Expected type to be mentioned when scanning, got %q", err) } } func TestStringArrayScanEmpty(t *testing.T) { var arr StringArray err := arr.Scan(`{}`) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr == nil || len(arr) != 0 { t.Errorf("Expected empty, got %#v", arr) } } func TestStringArrayScanNil(t *testing.T) { arr := StringArray{"x", "x", "x"} err := arr.Scan(nil) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr != nil { t.Errorf("Expected nil, got %+v", arr) } } var StringArrayStringTests = []struct { str string arr StringArray }{ {`{}`, StringArray{}}, {`{t}`, StringArray{"t"}}, {`{f,1}`, StringArray{"f", "1"}}, {`{"a\\b","c d",","}`, StringArray{"a\\b", "c d", ","}}, } func TestStringArrayScanBytes(t *testing.T) { for _, tt := range StringArrayStringTests { bytes := []byte(tt.str) arr := StringArray{"x", "x", "x"} err := arr.Scan(bytes) if err != nil { t.Fatalf("Expected no error for %q, got %v", bytes, err) } if !reflect.DeepEqual(arr, tt.arr) { t.Errorf("Expected %+v for %q, got %+v", tt.arr, bytes, arr) } } } func BenchmarkStringArrayScanBytes(b *testing.B) { var a StringArray var x interface{} = []byte(`{a,b,c,d,e,f,g,h,i,j}`) var y interface{} = []byte(`{"\a","\b","\c","\d","\e","\f","\g","\h","\i","\j"}`) for i := 0; i < b.N; i++ { a = StringArray{} a.Scan(x) a = StringArray{} a.Scan(y) } } func TestStringArrayScanString(t *testing.T) { for _, tt := range StringArrayStringTests { arr := StringArray{"x", "x", "x"} err := arr.Scan(tt.str) if err != nil { t.Fatalf("Expected no error for %q, got %v", tt.str, err) } if !reflect.DeepEqual(arr, tt.arr) { t.Errorf("Expected %+v for %q, got %+v", tt.arr, tt.str, arr) } } } func TestStringArrayScanError(t *testing.T) { for _, tt := range []struct { input, err string }{ {``, "unable to parse array"}, {`{`, "unable to parse array"}, {`{{a},{b}}`, "cannot convert ARRAY[2][1] to StringArray"}, {`{NULL}`, "parsing array element index 0: cannot convert nil to string"}, {`{a,NULL}`, "parsing array element index 1: cannot convert nil to string"}, {`{a,b,NULL}`, "parsing array element index 2: cannot convert nil to string"}, } { arr := StringArray{"x", "x", "x"} err := arr.Scan(tt.input) if err == nil { t.Fatalf("Expected error for %q, got none", tt.input) } if !strings.Contains(err.Error(), tt.err) { t.Errorf("Expected error to contain %q for %q, got %q", tt.err, tt.input, err) } if !reflect.DeepEqual(arr, StringArray{"x", "x", "x"}) { t.Errorf("Expected destination not to change for %q, got %+v", tt.input, arr) } } } func TestStringArrayValue(t *testing.T) { result, err := StringArray(nil).Value() if err != nil { t.Fatalf("Expected no error for nil, got %v", err) } if result != nil { t.Errorf("Expected nil, got %q", result) } result, err = StringArray([]string{}).Value() if err != nil { t.Fatalf("Expected no error for empty, got %v", err) } if expected := `{}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected empty, got %q", result) } result, err = StringArray([]string{`a`, `\b`, `c"`, `d,e`}).Value() if err != nil { t.Fatalf("Expected no error, got %v", err) } if expected := `{"a","\\b","c\"","d,e"}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected %q, got %q", expected, result) } } func BenchmarkStringArrayValue(b *testing.B) { x := make([]string, 10) for i := 0; i < len(x); i++ { x[i] = strings.Repeat(`abc"def\ghi`, 5) } a := StringArray(x) for i := 0; i < b.N; i++ { a.Value() } } func TestGenericArrayScanUnsupported(t *testing.T) { var s string var ss []string var nsa [1]sql.NullString for _, tt := range []struct { src, dest interface{} err string }{ {nil, nil, "destination <nil> is not a pointer to array or slice"}, {nil, true, "destination bool is not a pointer to array or slice"}, {nil, &s, "destination *string is not a pointer to array or slice"}, {nil, ss, "destination []string is not a pointer to array or slice"}, {nil, &nsa, "<nil> to [1]sql.NullString"}, {true, &ss, "bool to []string"}, {`{{x}}`, &ss, "multidimensional ARRAY[1][1] is not implemented"}, {`{{x},{x}}`, &ss, "multidimensional ARRAY[2][1] is not implemented"}, {`{x}`, &ss, "scanning to string is not implemented"}, } { err := GenericArray{tt.dest}.Scan(tt.src) if err == nil { t.Fatalf("Expected error for [%#v %#v]", tt.src, tt.dest) } if !strings.Contains(err.Error(), tt.err) { t.Errorf("Expected error to contain %q for [%#v %#v], got %q", tt.err, tt.src, tt.dest, err) } } } func TestGenericArrayScanScannerArrayBytes(t *testing.T) { src, expected, nsa := []byte(`{NULL,abc,"\""}`), [3]sql.NullString{{}, {String: `abc`, Valid: true}, {String: `"`, Valid: true}}, [3]sql.NullString{{String: ``, Valid: true}, {}, {}} if err := (GenericArray{&nsa}).Scan(src); err != nil { t.Fatalf("Expected no error, got %v", err) } if !reflect.DeepEqual(nsa, expected) { t.Errorf("Expected %v, got %v", expected, nsa) } } func TestGenericArrayScanScannerArrayString(t *testing.T) { src, expected, nsa := `{NULL,"\"",xyz}`, [3]sql.NullString{{}, {String: `"`, Valid: true}, {String: `xyz`, Valid: true}}, [3]sql.NullString{{String: ``, Valid: true}, {}, {}} if err := (GenericArray{&nsa}).Scan(src); err != nil { t.Fatalf("Expected no error, got %v", err) } if !reflect.DeepEqual(nsa, expected) { t.Errorf("Expected %v, got %v", expected, nsa) } } func TestGenericArrayScanScannerSliceEmpty(t *testing.T) { var nss []sql.NullString if err := (GenericArray{&nss}).Scan(`{}`); err != nil { t.Fatalf("Expected no error, got %v", err) } if nss == nil || len(nss) != 0 { t.Errorf("Expected empty, got %#v", nss) } } func TestGenericArrayScanScannerSliceNil(t *testing.T) { nss := []sql.NullString{{String: ``, Valid: true}, {}} if err := (GenericArray{&nss}).Scan(nil); err != nil { t.Fatalf("Expected no error, got %v", err) } if nss != nil { t.Errorf("Expected nil, got %+v", nss) } } func TestGenericArrayScanScannerSliceBytes(t *testing.T) { src, expected, nss := []byte(`{NULL,abc,"\""}`), []sql.NullString{{}, {String: `abc`, Valid: true}, {String: `"`, Valid: true}}, []sql.NullString{{String: ``, Valid: true}, {}, {}, {}, {}} if err := (GenericArray{&nss}).Scan(src); err != nil { t.Fatalf("Expected no error, got %v", err) } if !reflect.DeepEqual(nss, expected) { t.Errorf("Expected %v, got %v", expected, nss) } } func BenchmarkGenericArrayScanScannerSliceBytes(b *testing.B) { var a GenericArray var x interface{} = []byte(`{a,b,c,d,e,f,g,h,i,j}`) var y interface{} = []byte(`{"\a","\b","\c","\d","\e","\f","\g","\h","\i","\j"}`) for i := 0; i < b.N; i++ { a = GenericArray{new([]sql.NullString)} a.Scan(x) a = GenericArray{new([]sql.NullString)} a.Scan(y) } } func TestGenericArrayScanScannerSliceString(t *testing.T) { src, expected, nss := `{NULL,"\"",xyz}`, []sql.NullString{{}, {String: `"`, Valid: true}, {String: `xyz`, Valid: true}}, []sql.NullString{{String: ``, Valid: true}, {}, {}} if err := (GenericArray{&nss}).Scan(src); err != nil { t.Fatalf("Expected no error, got %v", err) } if !reflect.DeepEqual(nss, expected) { t.Errorf("Expected %v, got %v", expected, nss) } } type TildeNullInt64 struct{ sql.NullInt64 } func (TildeNullInt64) ArrayDelimiter() string { return "~" } func TestGenericArrayScanDelimiter(t *testing.T) { src, expected, tnis := `{12~NULL~76}`, []TildeNullInt64{{sql.NullInt64{Int64: 12, Valid: true}}, {}, {sql.NullInt64{Int64: 76, Valid: true}}}, []TildeNullInt64{{sql.NullInt64{Int64: 0, Valid: true}}, {}} if err := (GenericArray{&tnis}).Scan(src); err != nil { t.Fatalf("Expected no error for %#v, got %v", src, err) } if !reflect.DeepEqual(tnis, expected)
} func TestGenericArrayScanErrors(t *testing.T) { var sa [1]string var nis []sql.NullInt64 var pss *[]string for _, tt := range []struct { src, dest interface{} err string }{ {nil, pss, "destination *[]string is nil"}, {`{`, &sa, "unable to parse"}, {`{}`, &sa, "cannot convert ARRAY[0] to [1]string"}, {`{x,x}`, &sa, "cannot convert ARRAY[2] to [1]string"}, {`{x}`, &nis, `parsing array element index 0: converting`}, } { err := GenericArray{tt.dest}.Scan(tt.src) if err == nil { t.Fatalf("Expected error for [%#v %#v]", tt.src, tt.dest) } if !strings.Contains(err.Error(), tt.err) { t.Errorf("Expected error to contain %q for [%#v %#v], got %q", tt.err, tt.src, tt.dest, err) } } } func TestGenericArrayValueUnsupported(t *testing.T) { _, err := GenericArray{true}.Value() if err == nil { t.Fatal("Expected error for bool") } if !strings.Contains(err.Error(), "bool to array") { t.Errorf("Expected type to be mentioned, got %q", err) } } type ByteArrayValuer [1]byte type ByteSliceValuer []byte type FuncArrayValuer struct { delimiter func() string value func() (driver.Value, error) } func (a ByteArrayValuer) Value() (driver.Value, error) { return a[:], nil } func (b ByteSliceValuer) Value() (driver.Value, error) { return []byte(b), nil } func (f FuncArrayValuer) ArrayDelimiter() string { return f.delimiter() } func (f FuncArrayValuer) Value() (driver.Value, error) { return f.value() } func TestGenericArrayValue(t *testing.T) { result, err := GenericArray{nil}.Value() if err != nil { t.Fatalf("Expected no error for nil, got %v", err) } if result != nil { t.Errorf("Expected nil, got %q", result) } for _, tt := range []interface{}{ []bool(nil), [][]int(nil), []*int(nil), []sql.NullString(nil), } { result, err := GenericArray{tt}.Value() if err != nil { t.Fatalf("Expected no error for %#v, got %v", tt, err) } if result != nil { t.Errorf("Expected nil for %#v, got %q", tt, result) } } Tilde := func(v driver.Value) FuncArrayValuer { return FuncArrayValuer{ func() string { return "~" }, func() (driver.Value, error) { return v, nil }} } for _, tt := range []struct { result string input interface{} }{ {`{}`, []bool{}}, {`{true}`, []bool{true}}, {`{true,false}`, []bool{true, false}}, {`{true,false}`, [2]bool{true, false}}, {`{}`, [][]int{{}}}, {`{}`, [][]int{{}, {}}}, {`{{1}}`, [][]int{{1}}}, {`{{1},{2}}`, [][]int{{1}, {2}}}, {`{{1,2},{3,4}}`, [][]int{{1, 2}, {3, 4}}}, {`{{1,2},{3,4}}`, [2][2]int{{1, 2}, {3, 4}}}, {`{"a","\\b","c\"","d,e"}`, []string{`a`, `\b`, `c"`, `d,e`}}, {`{"a","\\b","c\"","d,e"}`, [][]byte{{'a'}, {'\\', 'b'}, {'c', '"'}, {'d', ',', 'e'}}}, {`{NULL}`, []*int{nil}}, {`{0,NULL}`, []*int{new(int), nil}}, {`{NULL}`, []sql.NullString{{}}}, {`{"\"",NULL}`, []sql.NullString{{String: `"`, Valid: true}, {}}}, {`{"a","b"}`, []ByteArrayValuer{{'a'}, {'b'}}}, {`{{"a","b"},{"c","d"}}`, [][]ByteArrayValuer{{{'a'}, {'b'}}, {{'c'}, {'d'}}}}, {`{"e","f"}`, []ByteSliceValuer{{'e'}, {'f'}}}, {`{{"e","f"},{"g","h"}}`, [][]ByteSliceValuer{{{'e'}, {'f'}}, {{'g'}, {'h'}}}}, {`{1~2}`, []FuncArrayValuer{Tilde(int64(1)), Tilde(int64(2))}}, {`{{1~2}~{3~4}}`, [][]FuncArrayValuer{{Tilde(int64(1)), Tilde(int64(2))}, {Tilde(int64(3)), Tilde(int64(4))}}}, } { result, err := GenericArray{tt.input}.Value() if err != nil { t.Fatalf("Expected no error for %q, got %v", tt.input, err) } if !reflect.DeepEqual(result, tt.result) { t.Errorf("Expected %q for %q, got %q", tt.result, tt.input, result) } } } func TestGenericArrayValueErrors(t *testing.T) { v := []interface{}{func() {}} if _, err := (GenericArray{v}).Value(); err == nil { t.Errorf("Expected error for %q, got nil", v) } v = []interface{}{nil, func() {}} if _, err := (GenericArray{v}).Value(); err == nil { t.Errorf("Expected error for %q, got nil", v) } } func BenchmarkGenericArrayValueBools(b *testing.B) { rand.Seed(1) x := make([]bool, 10) for i := 0; i < len(x); i++ { x[i] = rand.Intn(2) == 0 } a := GenericArray{x} for i := 0; i < b.N; i++ { a.Value() } } func BenchmarkGenericArrayValueFloat64s(b *testing.B) { rand.Seed(1) x := make([]float64, 10) for i := 0; i < len(x); i++ { x[i] = rand.NormFloat64() } a := GenericArray{x} for i := 0; i < b.N; i++ { a.Value() } } func BenchmarkGenericArrayValueInt64s(b *testing.B) { rand.Seed(1) x := make([]int64, 10) for i := 0; i < len(x); i++ { x[i] = rand.Int63() } a := GenericArray{x} for i := 0; i < b.N; i++ { a.Value() } } func BenchmarkGenericArrayValueByteSlices(b *testing.B) { x := make([][]byte, 10) for i := 0; i < len(x); i++ { x[i] = bytes.Repeat([]byte(`abc"def\ghi`), 5) } a := GenericArray{x} for i := 0; i < b.N; i++ { a.Value() } } func BenchmarkGenericArrayValueStrings(b *testing.B) { x := make([]string, 10) for i := 0; i < len(x); i++ { x[i] = strings.Repeat(`abc"def\ghi`, 5) } a := GenericArray{x} for i := 0; i < b.N; i++ { a.Value() } } func TestArrayScanBackend(t *testing.T) { db := openTestConn(t) defer db.Close() for _, tt := range []struct { s string d sql.Scanner e interface{} }{ {`ARRAY[true, false]`, new(BoolArray), &BoolArray{true, false}}, {`ARRAY[E'\\xdead', E'\\xbeef']`, new(ByteaArray), &ByteaArray{{'\xDE', '\xAD'}, {'\xBE', '\xEF'}}}, {`ARRAY[1.2, 3.4]`, new(Float64Array), &Float64Array{1.2, 3.4}}, {`ARRAY[1, 2, 3]`, new(Int64Array), &Int64Array{1, 2, 3}}, {`ARRAY['a', E'\\b', 'c"', 'd,e']`, new(StringArray), &StringArray{`a`, `\b`, `c"`, `d,e`}}, } { err := db.QueryRow(`SELECT ` + tt.s).Scan(tt.d) if err != nil { t.Errorf("Expected no error when scanning %s into %T, got %v", tt.s, tt.d, err) } if !reflect.DeepEqual(tt.d, tt.e) { t.Errorf("Expected %v when scanning %s into %T, got %v", tt.e, tt.s, tt.d, tt.d) } } } func TestArrayValueBackend(t *testing.T) { db := openTestConn(t) defer db.Close() for _, tt := range []struct { s string v driver.Valuer }{ {`ARRAY[true, false]`, BoolArray{true, false}}, {`ARRAY[E'\\xdead', E'\\xbeef']`, ByteaArray{{'\xDE', '\xAD'}, {'\xBE', '\xEF'}}}, {`ARRAY[1.2, 3.4]`, Float64Array{1.2, 3.4}}, {`ARRAY[1, 2, 3]`, Int64Array{1, 2, 3}}, {`ARRAY['a', E'\\b', 'c"', 'd,e']`, StringArray{`a`, `\b`, `c"`, `d,e`}}, } { var x int err := db.QueryRow(`SELECT 1 WHERE `+tt.s+` <> $1`, tt.v).Scan(&x) if err != sql.ErrNoRows { t.Errorf("Expected %v to equal %s, got %v", tt.v, tt.s, err) } } }
{ t.Errorf("Expected %v for %#v, got %v", expected, src, tnis) }
zipkinv1_to_protospan_test.go
// Copyright 2019, OpenTelemetry Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package zipkin import ( "encoding/json" "io/ioutil" "reflect" "sort" "strconv" "testing" commonpb "github.com/census-instrumentation/opencensus-proto/gen-go/agent/common/v1" tracepb "github.com/census-instrumentation/opencensus-proto/gen-go/trace/v1" "github.com/golang/protobuf/ptypes/timestamp" zipkinmodel "github.com/openzipkin/zipkin-go/model" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/consumer/consumerdata" tracetranslator "go.opentelemetry.io/collector/translator/trace" ) func Test_hexIDToOCID(t *testing.T) { tests := []struct { name string hexStr string want []byte wantErr error }{ { name: "empty hex string", hexStr: "", want: nil, wantErr: errHexIDWrongLen, }, { name: "wrong length", hexStr: "0000", want: nil, wantErr: errHexIDWrongLen, }, { name: "parse error", hexStr: "000000000000000-", want: nil, wantErr: errHexIDParsing, }, { name: "all zero", hexStr: "0000000000000000", want: nil, wantErr: errHexIDZero, }, { name: "happy path", hexStr: "0706050400010203", want: []byte{7, 6, 5, 4, 0, 1, 2, 3}, wantErr: nil, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := hexIDToOCID(tt.hexStr) if tt.wantErr != nil && tt.wantErr != err { t.Errorf("hexIDToOCID() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("hexIDToOCID() = %v, want %v", got, tt.want) } }) } } func Test_hexTraceIDToOCTraceID(t *testing.T) { tests := []struct { name string hexStr string want []byte wantErr error }{ { name: "empty hex string", hexStr: "", want: nil, wantErr: errHexTraceIDWrongLen, }, { name: "wrong length", hexStr: "000000000000000010", want: nil, wantErr: errHexTraceIDWrongLen, }, { name: "parse error", hexStr: "000000000000000X0000000000000000", want: nil, wantErr: errHexTraceIDParsing, }, { name: "all zero", hexStr: "00000000000000000000000000000000", want: nil, wantErr: errHexTraceIDZero, }, { name: "happy path", hexStr: "00000000000000010000000000000002", want: []byte{0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2}, wantErr: nil, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := hexTraceIDToOCTraceID(tt.hexStr) if tt.wantErr != nil && tt.wantErr != err { t.Errorf("hexTraceIDToOCTraceID() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("hexTraceIDToOCTraceID() = %v, want %v", got, tt.want) } }) } } func TestZipkinJSONFallbackToLocalComponent(t *testing.T) { blob, err := ioutil.ReadFile("./testdata/zipkin_v1_local_component.json") if err != nil { t.Fatalf("failed to load test data: %v", err) } reqs, err := V1JSONBatchToOCProto(blob) if err != nil { t.Fatalf("failed to translate zipkinv1 to OC proto: %v", err) } if len(reqs) != 2 { t.Fatalf("got %d trace service request(s), want 2", len(reqs)) } // Ensure the order of nodes sort.Slice(reqs, func(i, j int) bool { return reqs[i].Node.ServiceInfo.Name < reqs[j].Node.ServiceInfo.Name }) // First span didn't have a host/endpoint to give service name, use the local component. got := reqs[0].Node.ServiceInfo.Name want := "myLocalComponent" if got != want { t.Fatalf("got %q for service name, want %q", got, want) } // Second span have a host/endpoint to give service name, do not use local component. got = reqs[1].Node.ServiceInfo.Name want = "myServiceName" if got != want
} func TestSingleJSONV1BatchToOCProto(t *testing.T) { blob, err := ioutil.ReadFile("./testdata/zipkin_v1_single_batch.json") if err != nil { t.Fatalf("failed to load test data: %v", err) } got, err := V1JSONBatchToOCProto(blob) if err != nil { t.Fatalf("failed to translate zipkinv1 to OC proto: %v", err) } want := ocBatchesFromZipkinV1 sortTraceByNodeName(want) sortTraceByNodeName(got) if !reflect.DeepEqual(got, want) { t.Fatalf("Unsuccessful conversion\nGot:\n\t%v\nWant:\n\t%v", got, want) } } func TestMultipleJSONV1BatchesToOCProto(t *testing.T) { blob, err := ioutil.ReadFile("./testdata/zipkin_v1_multiple_batches.json") if err != nil { t.Fatalf("failed to load test data: %v", err) } var batches []interface{} if err := json.Unmarshal(blob, &batches); err != nil { t.Fatalf("failed to load the batches: %v", err) } nodeToTraceReqs := make(map[string]*consumerdata.TraceData) var got []consumerdata.TraceData for _, batch := range batches { jsonBatch, err := json.Marshal(batch) if err != nil { t.Fatalf("failed to marshal interface back to blob: %v", err) } g, err := V1JSONBatchToOCProto(jsonBatch) if err != nil { t.Fatalf("failed to translate zipkinv1 to OC proto: %v", err) } // Coalesce the nodes otherwise they will differ due to multiple // nodes representing same logical service for _, tsr := range g { key := tsr.Node.String() if pTsr, ok := nodeToTraceReqs[key]; ok { pTsr.Spans = append(pTsr.Spans, tsr.Spans...) } else { nodeToTraceReqs[key] = &tsr } } } for _, tsr := range nodeToTraceReqs { got = append(got, *tsr) } want := ocBatchesFromZipkinV1 sortTraceByNodeName(want) sortTraceByNodeName(got) if !reflect.DeepEqual(got, want) { t.Fatalf("Unsuccessful conversion\nGot:\n\t%v\nWant:\n\t%v", got, want) } } func sortTraceByNodeName(trace []consumerdata.TraceData) { sort.Slice(trace, func(i, j int) bool { return trace[i].Node.ServiceInfo.Name < trace[j].Node.ServiceInfo.Name }) } func TestZipkinAnnotationsToOCStatus(t *testing.T) { type test struct { haveTags []*binaryAnnotation wantAttributes *tracepb.Span_Attributes wantStatus *tracepb.Status } cases := []test{ // only status.code tag { haveTags: []*binaryAnnotation{{ Key: "status.code", Value: "13", }}, wantAttributes: nil, wantStatus: &tracepb.Status{ Code: 13, }, }, // only status.message tag { haveTags: []*binaryAnnotation{{ Key: "status.message", Value: "Forbidden", }}, wantAttributes: nil, wantStatus: nil, }, // both status.code and status.message { haveTags: []*binaryAnnotation{ { Key: "status.code", Value: "13", }, { Key: "status.message", Value: "Forbidden", }, }, wantAttributes: nil, wantStatus: &tracepb.Status{ Code: 13, Message: "Forbidden", }, }, // http status.code { haveTags: []*binaryAnnotation{ { Key: "http.status_code", Value: "404", }, { Key: "http.status_message", Value: "NotFound", }, }, wantAttributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ tracetranslator.TagHTTPStatusCode: { Value: &tracepb.AttributeValue_IntValue{ IntValue: 404, }, }, tracetranslator.TagHTTPStatusMsg: { Value: &tracepb.AttributeValue_StringValue{ StringValue: &tracepb.TruncatableString{Value: "NotFound"}, }, }, }, }, wantStatus: &tracepb.Status{ Code: 5, Message: "NotFound", }, }, // http and oc { haveTags: []*binaryAnnotation{ { Key: "http.status_code", Value: "404", }, { Key: "http.status_message", Value: "NotFound", }, { Key: "status.code", Value: "13", }, { Key: "status.message", Value: "Forbidden", }, }, wantAttributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ tracetranslator.TagHTTPStatusCode: { Value: &tracepb.AttributeValue_IntValue{ IntValue: 404, }, }, tracetranslator.TagHTTPStatusMsg: { Value: &tracepb.AttributeValue_StringValue{ StringValue: &tracepb.TruncatableString{Value: "NotFound"}, }, }, }, }, wantStatus: &tracepb.Status{ Code: 13, Message: "Forbidden", }, }, // http and only oc code { haveTags: []*binaryAnnotation{ { Key: "http.status_code", Value: "404", }, { Key: "http.status_message", Value: "NotFound", }, { Key: "status.code", Value: "14", }, }, wantAttributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ tracetranslator.TagHTTPStatusCode: { Value: &tracepb.AttributeValue_IntValue{ IntValue: 404, }, }, tracetranslator.TagHTTPStatusMsg: { Value: &tracepb.AttributeValue_StringValue{ StringValue: &tracepb.TruncatableString{Value: "NotFound"}, }, }, }, }, wantStatus: &tracepb.Status{ Code: 14, }, }, // http and only oc message { haveTags: []*binaryAnnotation{ { Key: "http.status_code", Value: "404", }, { Key: "http.status_message", Value: "NotFound", }, { Key: "status.message", Value: "Forbidden", }, }, wantAttributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ tracetranslator.TagHTTPStatusCode: { Value: &tracepb.AttributeValue_IntValue{ IntValue: 404, }, }, tracetranslator.TagHTTPStatusMsg: { Value: &tracepb.AttributeValue_StringValue{ StringValue: &tracepb.TruncatableString{Value: "NotFound"}, }, }, }, }, wantStatus: &tracepb.Status{ Code: 5, Message: "NotFound", }, }, // census tags { haveTags: []*binaryAnnotation{ { Key: "census.status_code", Value: "10", }, { Key: "census.status_description", Value: "RPCError", }, }, wantAttributes: nil, wantStatus: &tracepb.Status{ Code: 10, Message: "RPCError", }, }, // census tags priority over others { haveTags: []*binaryAnnotation{ { Key: "census.status_code", Value: "10", }, { Key: "census.status_description", Value: "RPCError", }, { Key: "http.status_code", Value: "404", }, { Key: "http.status_message", Value: "NotFound", }, { Key: "status.message", Value: "Forbidden", }, { Key: "status.code", Value: "7", }, }, wantAttributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ tracetranslator.TagHTTPStatusCode: { Value: &tracepb.AttributeValue_IntValue{ IntValue: 404, }, }, tracetranslator.TagHTTPStatusMsg: { Value: &tracepb.AttributeValue_StringValue{ StringValue: &tracepb.TruncatableString{Value: "NotFound"}, }, }, }, }, wantStatus: &tracepb.Status{ Code: 10, Message: "RPCError", }, }, } fakeTraceID := "00000000000000010000000000000002" fakeSpanID := "0000000000000001" for i, c := range cases { zSpans := []*zipkinV1Span{{ ID: fakeSpanID, TraceID: fakeTraceID, BinaryAnnotations: c.haveTags, }} zBytes, err := json.Marshal(zSpans) if err != nil { t.Errorf("#%d: Unexpected error: %v", i, err) continue } gb, err := V1JSONBatchToOCProto(zBytes) if err != nil { t.Errorf("#%d: Unexpected error: %v", i, err) continue } gs := gb[0].Spans[0] if !reflect.DeepEqual(gs.Attributes, c.wantAttributes) { t.Fatalf("Unsuccessful conversion\nGot:\n\t%v\nWant:\n\t%v", gs.Attributes, c.wantAttributes) } if !reflect.DeepEqual(gs.Status, c.wantStatus) { t.Fatalf("Unsuccessful conversion: %d\nGot:\n\t%v\nWant:\n\t%v", i, gs.Status, c.wantStatus) } } } func TestJSONHTTPToGRPCStatusCode(t *testing.T) { fakeTraceID := "00000000000000010000000000000002" fakeSpanID := "0000000000000001" for i := int32(100); i <= 600; i++ { wantStatus := tracetranslator.OCStatusCodeFromHTTP(i) zBytes, err := json.Marshal([]*zipkinV1Span{{ ID: fakeSpanID, TraceID: fakeTraceID, BinaryAnnotations: []*binaryAnnotation{ { Key: "http.status_code", Value: strconv.Itoa(int(i)), }, }, }}) if err != nil { t.Errorf("#%d: Unexpected error: %v", i, err) continue } gb, err := V1JSONBatchToOCProto(zBytes) if err != nil { t.Errorf("#%d: Unexpected error: %v", i, err) continue } gs := gb[0].Spans[0] if !reflect.DeepEqual(gs.Status.Code, wantStatus) { t.Fatalf("Unsuccessful conversion: %d\nGot:\n\t%v\nWant:\n\t%v", i, gs.Status, wantStatus) } } } // ocBatches has the OpenCensus proto batches used in the test. They are hard coded because // structs like tracepb.AttributeMap cannot be ready from JSON. var ocBatchesFromZipkinV1 = []consumerdata.TraceData{ { Node: &commonpb.Node{ ServiceInfo: &commonpb.ServiceInfo{Name: "front-proxy"}, Attributes: map[string]string{"ipv4": "172.31.0.2"}, }, Spans: []*tracepb.Span{ { TraceId: []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0xd2, 0xe6, 0x3c, 0xbe, 0x71, 0xf5, 0xa8}, SpanId: []byte{0x0e, 0xd2, 0xe6, 0x3c, 0xbe, 0x71, 0xf5, 0xa8}, ParentSpanId: nil, Name: &tracepb.TruncatableString{Value: "checkAvailability"}, Kind: tracepb.Span_CLIENT, StartTime: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 446743000}, EndTime: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 459699000}, TimeEvents: &tracepb.Span_TimeEvents{ TimeEvent: []*tracepb.Span_TimeEvent{ { Time: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 446743000}, Value: &tracepb.Span_TimeEvent_Annotation_{ Annotation: &tracepb.Span_TimeEvent_Annotation{ Attributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ "cs": { Value: &tracepb.AttributeValue_StringValue{StringValue: &tracepb.TruncatableString{Value: "front-proxy"}}, }, }, }, }, }, }, { Time: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 460510000}, Value: &tracepb.Span_TimeEvent_Annotation_{ Annotation: &tracepb.Span_TimeEvent_Annotation{ Attributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ "cr": { Value: &tracepb.AttributeValue_StringValue{StringValue: &tracepb.TruncatableString{Value: "front-proxy"}}, }, }, }, }, }, }, }, }, }, }, }, { Node: &commonpb.Node{ ServiceInfo: &commonpb.ServiceInfo{Name: "service1"}, Attributes: map[string]string{"ipv4": "172.31.0.4"}, }, Spans: []*tracepb.Span{ { TraceId: []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0xd2, 0xe6, 0x3c, 0xbe, 0x71, 0xf5, 0xa8}, SpanId: []byte{0x0e, 0xd2, 0xe6, 0x3c, 0xbe, 0x71, 0xf5, 0xa8}, ParentSpanId: nil, Name: &tracepb.TruncatableString{Value: "checkAvailability"}, Kind: tracepb.Span_SERVER, StartTime: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 448081000}, EndTime: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 460102000}, TimeEvents: &tracepb.Span_TimeEvents{ TimeEvent: []*tracepb.Span_TimeEvent{ { Time: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 448081000}, Value: &tracepb.Span_TimeEvent_Annotation_{ Annotation: &tracepb.Span_TimeEvent_Annotation{ Attributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ "sr": { Value: &tracepb.AttributeValue_StringValue{StringValue: &tracepb.TruncatableString{Value: "service1"}}, }, }, }, }, }, }, { Time: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 460102000}, Value: &tracepb.Span_TimeEvent_Annotation_{ Annotation: &tracepb.Span_TimeEvent_Annotation{ Attributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ "ss": { Value: &tracepb.AttributeValue_StringValue{StringValue: &tracepb.TruncatableString{Value: "service1"}}, }, }, }, }, }, }, }, }, }, { TraceId: []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0xd2, 0xe6, 0x3c, 0xbe, 0x71, 0xf5, 0xa8}, SpanId: []byte{0xf9, 0xeb, 0xb6, 0xe6, 0x48, 0x80, 0x61, 0x2a}, ParentSpanId: []byte{0x0e, 0xd2, 0xe6, 0x3c, 0xbe, 0x71, 0xf5, 0xa8}, Name: &tracepb.TruncatableString{Value: "checkStock"}, Kind: tracepb.Span_CLIENT, StartTime: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 453923000}, EndTime: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 457663000}, TimeEvents: &tracepb.Span_TimeEvents{ TimeEvent: []*tracepb.Span_TimeEvent{ { Time: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 453923000}, Value: &tracepb.Span_TimeEvent_Annotation_{ Annotation: &tracepb.Span_TimeEvent_Annotation{ Attributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ "cs": { Value: &tracepb.AttributeValue_StringValue{StringValue: &tracepb.TruncatableString{Value: "service1"}}, }, }, }, }, }, }, { Time: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 457717000}, Value: &tracepb.Span_TimeEvent_Annotation_{ Annotation: &tracepb.Span_TimeEvent_Annotation{ Attributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ "cr": { Value: &tracepb.AttributeValue_StringValue{StringValue: &tracepb.TruncatableString{Value: "service1"}}, }, }, }, }, }, }, }, }, }, }, }, { Node: &commonpb.Node{ ServiceInfo: &commonpb.ServiceInfo{Name: "service2"}, Attributes: map[string]string{"ipv4": "172.31.0.7"}, }, Spans: []*tracepb.Span{ { TraceId: []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0xd2, 0xe6, 0x3c, 0xbe, 0x71, 0xf5, 0xa8}, SpanId: []byte{0xf9, 0xeb, 0xb6, 0xe6, 0x48, 0x80, 0x61, 0x2a}, ParentSpanId: []byte{0x0e, 0xd2, 0xe6, 0x3c, 0xbe, 0x71, 0xf5, 0xa8}, Name: &tracepb.TruncatableString{Value: "checkStock"}, Kind: tracepb.Span_SERVER, StartTime: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 454487000}, EndTime: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 457320000}, Status: &tracepb.Status{ Code: 0, }, Attributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ "http.status_code": { Value: &tracepb.AttributeValue_IntValue{IntValue: 200}, }, "http.url": { Value: &tracepb.AttributeValue_StringValue{StringValue: &tracepb.TruncatableString{Value: "http://localhost:9000/trace/2"}}, }, "success": { Value: &tracepb.AttributeValue_BoolValue{BoolValue: true}, }, }, }, TimeEvents: &tracepb.Span_TimeEvents{ TimeEvent: []*tracepb.Span_TimeEvent{ { Time: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 454487000}, Value: &tracepb.Span_TimeEvent_Annotation_{ Annotation: &tracepb.Span_TimeEvent_Annotation{ Attributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ "sr": { Value: &tracepb.AttributeValue_StringValue{StringValue: &tracepb.TruncatableString{Value: "service2"}}, }, }, }, }, }, }, { Time: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 457320000}, Value: &tracepb.Span_TimeEvent_Annotation_{ Annotation: &tracepb.Span_TimeEvent_Annotation{ Attributes: &tracepb.Span_Attributes{ AttributeMap: map[string]*tracepb.AttributeValue{ "ss": { Value: &tracepb.AttributeValue_StringValue{StringValue: &tracepb.TruncatableString{Value: "service2"}}, }, }, }, }, }, }, }, }, }, }, }, { Node: &commonpb.Node{ ServiceInfo: &commonpb.ServiceInfo{Name: "unknown-service"}, }, Spans: []*tracepb.Span{ { TraceId: []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0xd2, 0xe6, 0x3c, 0xbe, 0x71, 0xf5, 0xa8}, SpanId: []byte{0xfe, 0x35, 0x1a, 0x05, 0x3f, 0xbc, 0xac, 0x1f}, ParentSpanId: []byte{0x0e, 0xd2, 0xe6, 0x3c, 0xbe, 0x71, 0xf5, 0xa8}, Name: &tracepb.TruncatableString{Value: "checkStock"}, Kind: tracepb.Span_SPAN_KIND_UNSPECIFIED, StartTime: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 453923000}, EndTime: &timestamp.Timestamp{Seconds: 1544805927, Nanos: 457663000}, Attributes: nil, }, }, }, } func TestSpanKindTranslation(t *testing.T) { tests := []struct { zipkinV1Kind string zipkinV2Kind zipkinmodel.Kind ocKind tracepb.Span_SpanKind ocAttrSpanKind tracetranslator.OpenTracingSpanKind jaegerSpanKind string }{ { zipkinV1Kind: "cr", zipkinV2Kind: zipkinmodel.Client, ocKind: tracepb.Span_CLIENT, jaegerSpanKind: "client", }, { zipkinV1Kind: "sr", zipkinV2Kind: zipkinmodel.Server, ocKind: tracepb.Span_SERVER, jaegerSpanKind: "server", }, { zipkinV1Kind: "ms", zipkinV2Kind: zipkinmodel.Producer, ocKind: tracepb.Span_SPAN_KIND_UNSPECIFIED, ocAttrSpanKind: tracetranslator.OpenTracingSpanKindProducer, jaegerSpanKind: "producer", }, { zipkinV1Kind: "mr", zipkinV2Kind: zipkinmodel.Consumer, ocKind: tracepb.Span_SPAN_KIND_UNSPECIFIED, ocAttrSpanKind: tracetranslator.OpenTracingSpanKindConsumer, jaegerSpanKind: "consumer", }, } for _, test := range tests { t.Run(test.zipkinV1Kind, func(t *testing.T) { // Create Zipkin V1 span. zSpan := &zipkinV1Span{ TraceID: "1234567890123456", ID: "0123456789123456", Annotations: []*annotation{ {Value: test.zipkinV1Kind}, // note that only first annotation matters. {Value: "cr"}, // this will have no effect. }, } // Translate to OC and verify that span kind is correctly translated. ocSpan, parsedAnnotations, err := zipkinV1ToOCSpan(zSpan) assert.NoError(t, err) assert.EqualValues(t, test.ocKind, ocSpan.Kind) assert.NotNil(t, parsedAnnotations) if test.ocAttrSpanKind != "" { require.NotNil(t, ocSpan.Attributes) // This is a special case, verify that TagSpanKind attribute is set. expected := &tracepb.AttributeValue{ Value: &tracepb.AttributeValue_StringValue{ StringValue: &tracepb.TruncatableString{Value: string(test.ocAttrSpanKind)}, }, } assert.EqualValues(t, expected, ocSpan.Attributes.AttributeMap[tracetranslator.TagSpanKind]) } // Translate to Zipkin V2 (which is used for internal representation by Zipkin exporter). zSpanTranslated, err := OCSpanProtoToZipkin(nil, nil, ocSpan, "") assert.NoError(t, err) assert.EqualValues(t, test.zipkinV2Kind, zSpanTranslated.Kind) }) } }
{ t.Fatalf("got %q for service name, want %q", got, want) }
tf_ops.py
import gym from gym.spaces import Discrete, MultiDiscrete import numpy as np import tree from ray.rllib.utils.framework import try_import_tf tf1, tf, tfv = try_import_tf() def convert_to_non_tf_type(stats): """Converts values in `stats` to non-Tensor numpy or python types. Args: stats (any): Any (possibly nested) struct, the values in which will be converted and returned as a new struct with all tf (eager) tensors being converted to numpy types. Returns: Any: A new struct with the same structure as `stats`, but with all values converted to non-tf Tensor types. """ # The mapping function used to numpyize torch Tensors. def mapping(item): if isinstance(item, (tf.Tensor, tf.Variable)): return item.numpy() else: return item return tree.map_structure(mapping, stats) def explained_variance(y, pred): _, y_var = tf.nn.moments(y, axes=[0]) _, diff_var = tf.nn.moments(y - pred, axes=[0]) return tf.maximum(-1.0, 1 - (diff_var / y_var)) def get_placeholder(*, space=None, value=None, name=None, time_axis=False): from ray.rllib.models.catalog import ModelCatalog if space is not None: if isinstance(space, (gym.spaces.Dict, gym.spaces.Tuple)): return ModelCatalog.get_action_placeholder(space, None) return tf1.placeholder( shape=(None, ) + ((None, ) if time_axis else ()) + space.shape, dtype=tf.float32 if space.dtype == np.float64 else space.dtype, name=name, ) else: assert value is not None shape = value.shape[1:] return tf1.placeholder( shape=(None, ) + ((None, ) if time_axis else ()) + (shape if isinstance( shape, tuple) else tuple(shape.as_list())), dtype=tf.float32 if value.dtype == np.float64 else value.dtype, name=name, ) def huber_loss(x, delta=1.0): """Reference: https://en.wikipedia.org/wiki/Huber_loss""" return tf.where( tf.abs(x) < delta, tf.math.square(x) * 0.5, delta * (tf.abs(x) - 0.5 * delta)) def one_hot(x, space): if isinstance(space, Discrete): return tf.one_hot(x, space.n) elif isinstance(space, MultiDiscrete): return tf.concat( [tf.one_hot(x[:, i], n) for i, n in enumerate(space.nvec)], axis=-1) else: raise ValueError("Unsupported space for `one_hot`: {}".format(space)) def reduce_mean_ignore_inf(x, axis): """Same as tf.reduce_mean() but ignores -inf values.""" mask = tf.not_equal(x, tf.float32.min) x_zeroed = tf.where(mask, x, tf.zeros_like(x)) return (tf.reduce_sum(x_zeroed, axis) / tf.reduce_sum( tf.cast(mask, tf.float32), axis)) def minimize_and_clip(optimizer, objective, var_list, clip_val=10.0): """Minimized `objective` using `optimizer` w.r.t. variables in `var_list` while ensure the norm of the gradients for each variable is clipped to `clip_val` """ # Accidentally passing values < 0.0 will break all gradients. assert clip_val > 0.0, clip_val if tf.executing_eagerly(): tape = optimizer.tape grads_and_vars = list( zip(list(tape.gradient(objective, var_list)), var_list)) else: grads_and_vars = optimizer.compute_gradients( objective, var_list=var_list) for i, (grad, var) in enumerate(grads_and_vars): if grad is not None: grads_and_vars[i] = (tf.clip_by_norm(grad, clip_val), var) return grads_and_vars def make_tf_callable(session_or_none, dynamic_shape=False): """Returns a function that can be executed in either graph or eager mode. The function must take only positional args. If eager is enabled, this will act as just a function. Otherwise, it will build a function that executes a session run with placeholders internally. Args: session_or_none (tf.Session): tf.Session if in graph mode, else None. dynamic_shape (bool): True if the placeholders should have a dynamic batch dimension. Otherwise they will be fixed shape. Returns: a Python function that can be called in either mode. """ if tf.executing_eagerly(): assert session_or_none is None else: assert session_or_none is not None def make_wrapper(fn): # Static-graph mode: Create placeholders and make a session call each # time the wrapped function is called. Return this session call's # outputs. if session_or_none is not None: args_placeholders = [] kwargs_placeholders = {} symbolic_out = [None] def call(*args, **kwargs): args_flat = [] for a in args: if type(a) is list: args_flat.extend(a) else: args_flat.append(a) args = args_flat if symbolic_out[0] is None: with session_or_none.graph.as_default(): for i, v in enumerate(args): if dynamic_shape: if len(v.shape) > 0: shape = (None, ) + v.shape[1:] else: shape = () else: shape = v.shape args_placeholders.append( tf1.placeholder( dtype=v.dtype, shape=shape, name="arg_{}".format(i))) for k, v in kwargs.items(): if dynamic_shape: if len(v.shape) > 0: shape = (None, ) + v.shape[1:] else: shape = () else: shape = v.shape kwargs_placeholders[k] = \ tf1.placeholder( dtype=v.dtype, shape=shape, name="kwarg_{}".format(k)) symbolic_out[0] = fn(*args_placeholders, **kwargs_placeholders) feed_dict = dict(zip(args_placeholders, args)) feed_dict.update( {kwargs_placeholders[k]: kwargs[k] for k in kwargs.keys()}) ret = session_or_none.run(symbolic_out[0], feed_dict) return ret return call # Eager mode (call function as is). else: return fn return make_wrapper def scope_vars(scope, trainable_only=False): """ Get variables inside a scope The scope can be specified as a string Parameters ---------- scope: str or VariableScope scope in which the variables reside. trainable_only: bool whether or not to return only the variables that were marked as trainable. Returns ------- vars: [tf.Variable] list of variables in `scope`. """ return tf1.get_collection( tf1.GraphKeys.TRAINABLE_VARIABLES if trainable_only else tf1.GraphKeys.VARIABLES, scope=scope if isinstance(scope, str) else scope.name)
version.go
package version import ( "bytes" "fmt" ) var ( // The git commit that was compiled. This will be filled in by the compiler. GitCommit string GitDescribe string // The main version number that is being run at the moment. Version = "0.1.0" // A pre-release marker for the version. If this is "" (empty string) // then it means that it is a final release. Otherwise, this is a pre-release // such as "dev" (in development), "beta", "rc1", etc. VersionPrerelease = "rc1" // VersionMetadata is metadata further describing the build type. VersionMetadata = "" ) // VersionInfo type VersionInfo struct { Revision string Version string VersionPrerelease string VersionMetadata string } func
() *VersionInfo { ver := Version rel := VersionPrerelease md := VersionMetadata if GitDescribe != "" { ver = GitDescribe } if GitDescribe == "" && rel == "" && VersionPrerelease != "" { rel = "dev" } return &VersionInfo{ Revision: GitCommit, Version: ver, VersionPrerelease: rel, VersionMetadata: md, } } func (c *VersionInfo) VersionNumber() string { version := fmt.Sprintf("%s", c.Version) if c.VersionPrerelease != "" { version = fmt.Sprintf("%s-%s", version, c.VersionPrerelease) } if c.VersionMetadata != "" { version = fmt.Sprintf("%s+%s", version, c.VersionMetadata) } return version } func (c *VersionInfo) FullVersionNumber(rev bool) string { var versionString bytes.Buffer fmt.Fprintf(&versionString, "aqueduct v%s", c.Version) if c.VersionPrerelease != "" { fmt.Fprintf(&versionString, "-%s", c.VersionPrerelease) } if c.VersionMetadata != "" { fmt.Fprintf(&versionString, "+%s", c.VersionMetadata) } if rev && c.Revision != "" { fmt.Fprintf(&versionString, " (%s)", c.Revision) } return versionString.String() }
GetVersion
path_finder.py
directions = ['up', 'down', 'left', 'right'] def get_air_distance_between_two_points(point1, point2): x1 = point1['x'] y1 = point1['y'] x2 = point2['x'] y2 = point2['y'] distance = pow(pow((x2 - x1), 2) + pow((y2 - y1), 2), 0.5) return distance def not_deadly_location_on_board(goal, deadly_locations, width, height): if goal[0] < 0 or goal[0] >= width or goal[1] < 0 or goal[1] >= height: return False if deadly_locations.__contains__(goal): return False return True def get_neighbours(tile):
def find_shortest_path(start, goal): pass def list_of_reachable_tiles(start, deadly_locations, width, height): visited = [] queue = [start] while queue: cur = queue.pop(0) visited.append(cur) for d in directions: cur_neighbour = next_field_with_tupel(d, cur) if not visited.__contains__(cur_neighbour) and not queue.__contains__(cur_neighbour): if not_deadly_location_on_board(cur_neighbour, deadly_locations, width, height): queue.append(cur_neighbour) return visited
pass
delete_many_relations.rs
use query_engine_tests::*; #[test_suite] mod delete_many_rels { use indoc::indoc; use query_engine_tests::{run_query, Runner}; use query_test_macros::relation_link_test; // "a P1 to C1 relation " should "succeed when trying to delete the parent" #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt", id_only = true)] async fn p1_c1(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" mutation { createOneParent(data: { p: "p1" p_1: "1" p_2: "2" childOpt: { create: { c: "c1", c_1: "foo", c_2: "bar" } } }){ p childOpt{ c } } } "# }) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### ); Ok(()) } // "a P1 to C1 relation " should "succeed when trying to delete the parent if there are no children" #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt")] async fn p1_c1_no_children(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" mutation { createOneParent(data: { p: "p1" p_1: "lol" p_2: "zoop" }){ p childOpt{ c } } } "# }) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### ); Ok(()) } // "a PM to C1! relation " should "succeed if no child exists that requires the parent" #[relation_link_test(on_parent = "ToMany", on_child = "ToOneReq")] async fn pm_c1_req_no_children(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" mutation { createOneParent(data: { p: "p1" p_1: "p1" p_2: "p2" }){ childrenOpt{ c } } } "# }) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### ); Ok(()) } // "a P1 to C1! relation " should "succeed when trying to delete the parent if there is no child" #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneReq")] async fn p1_c1_req_no_children(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" mutation { createOneParent(data: { p: "p1" p_1: "p1" p_2: "p2" }){ p } } "# }) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### ); Ok(()) } // "a PM to C1 " should "succeed in deleting the parent" #[relation_link_test(on_parent = "ToMany", on_child = "ToOneOpt")] async fn pm_c1(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" mutation { createOneParent(data: { p: "p1" p_1: "1" p_2: "2" childrenOpt: { create: [{c: "c1", c_1: "foo", c_2: "bar"}, {c: "c2", c_1: "fqe", c_2: "asd"}] } }){ childrenOpt{ c } } } "# }) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### ); Ok(()) } // "a PM to C1 " should "succeed in deleting the parent if there is no child" #[relation_link_test(on_parent = "ToMany", on_child = "ToOneOpt")] async fn pm_c1_no_children(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" mutation { createOneParent(data: { p: "p1" p_1: "1" p_2: "2" }){ p } } "# }) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### ); Ok(()) } // "a P1! to CM relation" should "should succeed in deleting the parent " #[relation_link_test(on_parent = "ToOneReq", on_child = "ToMany")] async fn p1_req_cm_no_children(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" mutation { createOneParent(data: { p: "p1" p_1: "1" p_2: "2" childReq: { create: { c: "c1" c_1: "c_1" c_2: "c_2" } } }){ childReq{ c } } } "# }) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### ); Ok(()) } // "a P1 to CM relation " should " should succeed in deleting the parent" #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToMany")] async fn p1_cm(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" mutation { createOneParent(data: { p: "p1" p_1: "1" p_2: "2" childOpt: { create: {c: "c1", c_1: "foo", c_2: "bar"} } }){ childOpt{ c } } } "# }) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### );
} // "a P1 to CM relation " should " should succeed in deleting the parent if there is no child" #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToMany")] async fn p1_cm_no_children(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" mutation { createOneParent(data: { p: "p1" p_1: "1" p_2: "2" }){ p } } "# }) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### ); Ok(()) } // "a PM to CM relation" should "succeed in deleting the parent" #[relation_link_test(on_parent = "ToMany", on_child = "ToMany")] async fn pm_cm(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" mutation { createOneParent(data: { p: "p1" p_1: "1" p_2: "2" childrenOpt: { create: [{c: "c1", c_1: "foo", c_2: "bar"},{c: "c2", c_1: "q23", c_2: "lk"}] } }){ childrenOpt{ c } } } "# }) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### ); Ok(()) } // "a PM to CM relation" should "succeed in deleting the parent if there is no child" #[relation_link_test(on_parent = "ToMany", on_child = "ToMany")] async fn pm_cm_no_children(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" mutation { createOneParent(data: { p: "p1" p_1: "1" p_2: "2" }){ p } } "# }) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### ); Ok(()) } fn additional_schema() -> String { let schema = indoc! { r#" model Parent { #id(id, Int, @id) #m2m(childrenOpt, Child[], Int) p String @unique stepChildId Int? stepChildOpt StepChild? @relation(fields: [stepChildId], references: [id]) } model Child { #id(id, Int, @id) #m2m(parentsOpt, Parent[], Int) c String @unique } model StepChild { #id(id, Int, @id) s String @unique parentOpt Parent? }"# }; schema.to_owned() } // "a PM to CM relation" should "delete the parent from other relations as well" #[connector_test(schema(additional_schema))] async fn pm_cm_other_relations(runner: Runner) -> TestResult<()> { runner .query( r#"mutation { createOneParent(data: { id: 1, p: "p1" childrenOpt: { create: [{id: 1, c: "c1"},{id: 2, c: "c2"}] } stepChildOpt: { create: {id: 1, s: "s1"} } }){ p } }"#, ) .await? .assert_success(); insta::assert_snapshot!( run_query!(runner, r#"mutation { deleteManyParent(where: { p: "p1" }) { count }}"#), @r###"{"data":{"deleteManyParent":{"count":1}}}"### ); Ok(()) } }
Ok(())
set.ts
import { any, resource } from '../../lib'; export const set = resource({ type: any, async resolver(arr: Array<any>, index: number, value: any) { const newArr = arr.slice(); newArr[index] = value; return newArr; },
});
heatmapSeriesData.d.ts
import { StoreModule } from "../../types/store/store"; declare const heatmapSeriesData: StoreModule; export default heatmapSeriesData;
test_auth.py
import json import stat import pathlib import platform import globus_sdk import requests from dkist.net.globus.auth import (ensure_globus_authorized, get_cache_contents, get_cache_file_path, get_refresh_token_authorizer, save_auth_cache, start_local_server) def test_http_server(): server = start_local_server() redirect_uri = "http://{a[0]}:{a[1]}".format(a=server.server_address) inp_code = "wibble" requests.get(redirect_uri + f"?code={inp_code}") code = server.wait_for_code() assert code == inp_code def test_get_cache_file_path(mocker): mocker.patch("appdirs.user_cache_dir", return_value="/tmp/test/") path = get_cache_file_path() assert isinstance(path, pathlib.Path) assert path.parent == pathlib.Path("/tmp/test") assert path.name == "globus_auth_cache.json" def test_get_no_cache(mocker, tmpdir): mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir)) # Test file not exists cache = get_cache_contents() assert isinstance(cache, dict) assert not cache def test_get_cache(mocker, tmpdir): mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir)) with open(tmpdir / "globus_auth_cache.json", "w") as fd: json.dump({"hello": "world"}, fd) cache = get_cache_contents() assert isinstance(cache, dict) assert len(cache) == 1 assert cache == {"hello": "world"} def test_get_cache_not_json(mocker, tmpdir): mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir)) with open(tmpdir / "globus_auth_cache.json", "w") as fd: fd.write("aslkjdasdjjdlsajdjklasjdj, akldjaskldjasd, lkjasdkljasldkjas") cache = get_cache_contents() assert isinstance(cache, dict) assert not cache def
(mocker, tmpdir): filename = tmpdir / "globus_auth_cache.json" assert not filename.exists() # Sanity check mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir)) save_auth_cache({"hello": "world"}) assert filename.exists() statinfo = filename.stat() # Test that the user can read and write assert bool(statinfo.mode & stat.S_IRUSR) assert bool(statinfo.mode & stat.S_IWUSR) if platform.system() != 'Windows': # Test that neither "Group" or "Other" have read permissions assert not bool(statinfo.mode & stat.S_IRGRP) assert not bool(statinfo.mode & stat.S_IROTH) def test_get_refresh_token_authorizer(mocker): # An example cache without real tokens cache = { "transfer.api.globus.org": { "scope": "urn:globus:auth:scope:transfer.api.globus.org:all", "access_token": "buscVeATmhfB0v1tzu8VmTfFRB1nwlF8bn1R9rQTI3Q", "refresh_token": "YSbLZowAHfmhxehUqeOF3lFvoC0FlTT11QGupfWAOX4", "token_type": "Bearer", "expires_at_seconds": 1553362861, "resource_server": "transfer.api.globus.org" } } mocker.patch("dkist.net.globus.auth.get_cache_contents", return_value=cache) auth = get_refresh_token_authorizer()['transfer.api.globus.org'] assert isinstance(auth, globus_sdk.RefreshTokenAuthorizer) assert auth.access_token == cache["transfer.api.globus.org"]["access_token"] mocker.patch("dkist.net.globus.auth.do_native_app_authentication", return_value=cache) auth = get_refresh_token_authorizer(force_reauth=True)['transfer.api.globus.org'] assert isinstance(auth, globus_sdk.RefreshTokenAuthorizer) assert auth.access_token == cache["transfer.api.globus.org"]["access_token"] def test_ensure_auth_decorator(mocker): error = globus_sdk.AuthAPIError(mocker.MagicMock()) mocker.patch.object(error, "http_status", 400) mocker.patch.object(error, "message", "invalid_grant") reauth = mocker.patch("dkist.net.globus.auth.get_refresh_token_authorizer") called = [False] @ensure_globus_authorized def test_func(): if not called[0]: called[0] = True raise error return True assert test_func() assert reauth.called_once_with(force_reauth=True)
test_save_auth_cache
swiper-vue.esm.js
/** * Swiper Vue 6.8.3 * Most modern mobile touch slider and framework with hardware accelerated transitions * https://swiperjs.com * * Copyright 2014-2021 Vladimir Kharlampidi * * Released under the MIT License * * Released on: August 20, 2021 */ import { Swiper } from './esm/vue/swiper'; import { SwiperSlide } from './esm/vue/swiper-slide';
export { Swiper, SwiperSlide };
testservice_grpcproxy.pb.go
// Auto generated code by protoc-gen-go-grpcproxy // DO NOT EDIT // Adds OneMany versions of RPC methods for use by proxy clients package testdata import ( context "context" proxy "github.com/Snowflake-Labs/sansshell/proxy/proxy" grpc "google.golang.org/grpc" ) import ( "fmt" "io" ) // TestServiceClientProxy is the superset of TestServiceClient which additionally includes the OneMany proxy methods type TestServiceClientProxy interface { TestServiceClient TestUnaryOneMany(ctx context.Context, in *TestRequest, opts ...grpc.CallOption) (<-chan *TestUnaryManyResponse, error) TestServerStreamOneMany(ctx context.Context, in *TestRequest, opts ...grpc.CallOption) (TestService_TestServerStreamClientProxy, error) TestClientStreamOneMany(ctx context.Context, opts ...grpc.CallOption) (TestService_TestClientStreamClientProxy, error) TestBidiStreamOneMany(ctx context.Context, opts ...grpc.CallOption) (TestService_TestBidiStreamClientProxy, error) } // Embed the original client inside of this so we get the other generated methods automatically. type testServiceClientProxy struct { *testServiceClient } // NewTestServiceClientProxy creates a TestServiceClientProxy for use in proxied connections. // NOTE: This takes a proxy.Conn instead of a generic ClientConnInterface as the methods here are only valid in proxy.Conn contexts. func
(cc *proxy.Conn) TestServiceClientProxy { return &testServiceClientProxy{NewTestServiceClient(cc).(*testServiceClient)} } // TestUnaryManyResponse encapsulates a proxy data packet. // It includes the target, index, response and possible error returned. type TestUnaryManyResponse struct { Target string // As targets can be duplicated this is the index into the slice passed to proxy.Conn. Index int Resp *TestResponse Error error } // TestUnaryOneMany provides the same API as TestUnary but sends the same request to N destinations at once. // N can be a single destination. // // NOTE: The returned channel must be read until it closes in order to avoid leaking goroutines. func (c *testServiceClientProxy) TestUnaryOneMany(ctx context.Context, in *TestRequest, opts ...grpc.CallOption) (<-chan *TestUnaryManyResponse, error) { conn := c.cc.(*proxy.Conn) ret := make(chan *TestUnaryManyResponse) // If this is a single case we can just use Invoke and marshal it onto the channel once and be done. if len(conn.Targets) == 1 { go func() { out := &TestUnaryManyResponse{ Target: conn.Targets[0], Index: 0, Resp: &TestResponse{}, } err := conn.Invoke(ctx, "/Testdata.TestService/TestUnary", in, out.Resp, opts...) if err != nil { out.Error = err } // Send and close. ret <- out close(ret) }() return ret, nil } manyRet, err := conn.InvokeOneMany(ctx, "/Testdata.TestService/TestUnary", in, opts...) if err != nil { return nil, err } // A goroutine to retrive untyped responses and convert them to typed ones. go func() { for { typedResp := &TestUnaryManyResponse{ Resp: &TestResponse{}, } resp, ok := <-manyRet if !ok { // All done so we can shut down. close(ret) return } typedResp.Target = resp.Target typedResp.Index = resp.Index typedResp.Error = resp.Error if resp.Error == nil { if err := resp.Resp.UnmarshalTo(typedResp.Resp); err != nil { typedResp.Error = fmt.Errorf("can't decode any response - %v. Original Error - %v", err, resp.Error) } } ret <- typedResp } }() return ret, nil } // TestServerStreamManyResponse encapsulates a proxy data packet. // It includes the target, index, response and possible error returned. type TestServerStreamManyResponse struct { Target string // As targets can be duplicated this is the index into the slice passed to proxy.Conn. Index int Resp *TestResponse Error error } type TestService_TestServerStreamClientProxy interface { Recv() ([]*TestServerStreamManyResponse, error) grpc.ClientStream } type testServiceClientTestServerStreamClientProxy struct { cc *proxy.Conn directDone bool grpc.ClientStream } func (x *testServiceClientTestServerStreamClientProxy) Recv() ([]*TestServerStreamManyResponse, error) { var ret []*TestServerStreamManyResponse // If this is a direct connection the RecvMsg call is to a standard grpc.ClientStream // and not our proxy based one. This means we need to receive a typed response and // convert it into a single slice entry return. This ensures the OneMany style calls // can be used by proxy with 1:N targets and non proxy with 1 target without client changes. if x.cc.Direct() { // Check if we're done. Just return EOF now. Any real error was already sent inside // of a ManyResponse. if x.directDone { return nil, io.EOF } m := &TestResponse{} err := x.ClientStream.RecvMsg(m) ret = append(ret, &TestServerStreamManyResponse{ Resp: m, Error: err, Target: x.cc.Targets[0], Index: 0, }) // An error means we're done so set things so a later call now gets an EOF. if err != nil { x.directDone = true } return ret, nil } m := []*proxy.Ret{} if err := x.ClientStream.RecvMsg(&m); err != nil { return nil, err } for _, r := range m { typedResp := &TestServerStreamManyResponse{ Resp: &TestResponse{}, } typedResp.Target = r.Target typedResp.Index = r.Index typedResp.Error = r.Error if r.Error == nil { if err := r.Resp.UnmarshalTo(typedResp.Resp); err != nil { typedResp.Error = fmt.Errorf("can't decode any response - %v. Original Error - %v", err, r.Error) } } ret = append(ret, typedResp) } return ret, nil } // TestServerStreamOneMany provides the same API as TestServerStream but sends the same request to N destinations at once. // N can be a single destination. // // NOTE: The returned channel must be read until it closes in order to avoid leaking goroutines. func (c *testServiceClientProxy) TestServerStreamOneMany(ctx context.Context, in *TestRequest, opts ...grpc.CallOption) (TestService_TestServerStreamClientProxy, error) { stream, err := c.cc.NewStream(ctx, &TestService_ServiceDesc.Streams[0], "/Testdata.TestService/TestServerStream", opts...) if err != nil { return nil, err } x := &testServiceClientTestServerStreamClientProxy{c.cc.(*proxy.Conn), false, stream} if err := x.ClientStream.SendMsg(in); err != nil { return nil, err } if err := x.ClientStream.CloseSend(); err != nil { return nil, err } return x, nil } // TestClientStreamManyResponse encapsulates a proxy data packet. // It includes the target, index, response and possible error returned. type TestClientStreamManyResponse struct { Target string // As targets can be duplicated this is the index into the slice passed to proxy.Conn. Index int Resp *TestResponse Error error } type TestService_TestClientStreamClientProxy interface { Send(*TestRequest) error CloseAndRecv() ([]*TestClientStreamManyResponse, error) grpc.ClientStream } type testServiceClientTestClientStreamClientProxy struct { cc *proxy.Conn directDone bool grpc.ClientStream } func (x *testServiceClientTestClientStreamClientProxy) Send(m *TestRequest) error { return x.ClientStream.SendMsg(m) } func (x *testServiceClientTestClientStreamClientProxy) CloseAndRecv() ([]*TestClientStreamManyResponse, error) { if err := x.ClientStream.CloseSend(); err != nil { return nil, err } var ret []*TestClientStreamManyResponse // If this is a direct connection the RecvMsg call is to a standard grpc.ClientStream // and not our proxy based one. This means we need to receive a typed response and // convert it into a single slice entry return. This ensures the OneMany style calls // can be used by proxy with 1:N targets and non proxy with 1 target without client changes. if x.cc.Direct() { // Check if we're done. Just return EOF now. Any real error was already sent inside // of a ManyResponse. if x.directDone { return nil, io.EOF } m := &TestResponse{} err := x.ClientStream.RecvMsg(m) ret = append(ret, &TestClientStreamManyResponse{ Resp: m, Error: err, Target: x.cc.Targets[0], Index: 0, }) // An error means we're done so set things so a later call now gets an EOF. if err != nil { x.directDone = true } return ret, nil } eof := make(map[int]bool) for i := range x.cc.Targets { eof[i] = false } for { // Need to allow all client channels to return state before we return since // no more Recv's will ever be called. done := true for _, v := range eof { if !v { done = false } } if done { break } m := []*proxy.Ret{} if err := x.ClientStream.RecvMsg(&m); err != nil { return nil, err } for _, r := range m { typedResp := &TestClientStreamManyResponse{ Resp: &TestResponse{}, } typedResp.Target = r.Target typedResp.Index = r.Index typedResp.Error = r.Error if r.Error == nil { if err := r.Resp.UnmarshalTo(typedResp.Resp); err != nil { typedResp.Error = fmt.Errorf("can't decode any response - %v. Original Error - %v", err, r.Error) } } ret = append(ret, typedResp) eof[r.Index] = true } } return ret, nil } // TestClientStreamOneMany provides the same API as TestClientStream but sends the same request to N destinations at once. // N can be a single destination. // // NOTE: The returned channel must be read until it closes in order to avoid leaking goroutines. func (c *testServiceClientProxy) TestClientStreamOneMany(ctx context.Context, opts ...grpc.CallOption) (TestService_TestClientStreamClientProxy, error) { stream, err := c.cc.NewStream(ctx, &TestService_ServiceDesc.Streams[1], "/Testdata.TestService/TestClientStream", opts...) if err != nil { return nil, err } x := &testServiceClientTestClientStreamClientProxy{c.cc.(*proxy.Conn), false, stream} return x, nil } // TestBidiStreamManyResponse encapsulates a proxy data packet. // It includes the target, index, response and possible error returned. type TestBidiStreamManyResponse struct { Target string // As targets can be duplicated this is the index into the slice passed to proxy.Conn. Index int Resp *TestResponse Error error } type TestService_TestBidiStreamClientProxy interface { Send(*TestRequest) error Recv() ([]*TestBidiStreamManyResponse, error) grpc.ClientStream } type testServiceClientTestBidiStreamClientProxy struct { cc *proxy.Conn directDone bool grpc.ClientStream } func (x *testServiceClientTestBidiStreamClientProxy) Send(m *TestRequest) error { return x.ClientStream.SendMsg(m) } func (x *testServiceClientTestBidiStreamClientProxy) Recv() ([]*TestBidiStreamManyResponse, error) { var ret []*TestBidiStreamManyResponse // If this is a direct connection the RecvMsg call is to a standard grpc.ClientStream // and not our proxy based one. This means we need to receive a typed response and // convert it into a single slice entry return. This ensures the OneMany style calls // can be used by proxy with 1:N targets and non proxy with 1 target without client changes. if x.cc.Direct() { // Check if we're done. Just return EOF now. Any real error was already sent inside // of a ManyResponse. if x.directDone { return nil, io.EOF } m := &TestResponse{} err := x.ClientStream.RecvMsg(m) ret = append(ret, &TestBidiStreamManyResponse{ Resp: m, Error: err, Target: x.cc.Targets[0], Index: 0, }) // An error means we're done so set things so a later call now gets an EOF. if err != nil { x.directDone = true } return ret, nil } m := []*proxy.Ret{} if err := x.ClientStream.RecvMsg(&m); err != nil { return nil, err } for _, r := range m { typedResp := &TestBidiStreamManyResponse{ Resp: &TestResponse{}, } typedResp.Target = r.Target typedResp.Index = r.Index typedResp.Error = r.Error if r.Error == nil { if err := r.Resp.UnmarshalTo(typedResp.Resp); err != nil { typedResp.Error = fmt.Errorf("can't decode any response - %v. Original Error - %v", err, r.Error) } } ret = append(ret, typedResp) } return ret, nil } // TestBidiStreamOneMany provides the same API as TestBidiStream but sends the same request to N destinations at once. // N can be a single destination. // // NOTE: The returned channel must be read until it closes in order to avoid leaking goroutines. func (c *testServiceClientProxy) TestBidiStreamOneMany(ctx context.Context, opts ...grpc.CallOption) (TestService_TestBidiStreamClientProxy, error) { stream, err := c.cc.NewStream(ctx, &TestService_ServiceDesc.Streams[2], "/Testdata.TestService/TestBidiStream", opts...) if err != nil { return nil, err } x := &testServiceClientTestBidiStreamClientProxy{c.cc.(*proxy.Conn), false, stream} return x, nil }
NewTestServiceClientProxy
token.rs
use super::frontend_prelude::*; use crate::models::ApiToken; use crate::schema::api_tokens; use crate::util::read_fill; use crate::views::EncodableApiTokenWithToken; use conduit::{Body, Response}; use serde_json as json; /// Handles the `GET /me/tokens` route. pub fn list(req: &mut dyn RequestExt) -> EndpointResult { let authenticated_user = req.authenticate()?.forbid_api_token_auth()?; let conn = req.db_conn()?; let user = authenticated_user.user(); let tokens = ApiToken::belonging_to(&user) .filter(api_tokens::revoked.eq(false)) .order(api_tokens::created_at.desc()) .load(&*conn)?; #[derive(Serialize)] struct R { api_tokens: Vec<ApiToken>, } Ok(req.json(&R { api_tokens: tokens })) } /// Handles the `PUT /me/tokens` route. pub fn new(req: &mut dyn RequestExt) -> EndpointResult { /// The incoming serialization format for the `ApiToken` model. #[derive(Deserialize, Serialize)] struct NewApiToken { name: String, } /// The incoming serialization format for the `ApiToken` model. #[derive(Deserialize, Serialize)] struct NewApiTokenRequest { api_token: NewApiToken, } let max_size = 2000; let length = req .content_length() .chain_error(|| bad_request("missing header: Content-Length"))?; if length > max_size { return Err(bad_request(&format!("max content length is: {}", max_size))); } let mut json = vec![0; length as usize]; read_fill(req.body(), &mut json)?; let json = String::from_utf8(json).map_err(|_| bad_request(&"json body was not valid utf-8"))?; let new: NewApiTokenRequest = json::from_str(&json) .map_err(|e| bad_request(&format!("invalid new token request: {:?}", e)))?; let name = &new.api_token.name; if name.is_empty() { return Err(bad_request("name must have a value")); } let authenticated_user = req.authenticate()?; if authenticated_user.api_token_id().is_some() { return Err(bad_request( "cannot use an API token to create a new API token", )); } let conn = req.db_conn()?; let user = authenticated_user.user(); let max_token_per_user = 500; let count: i64 = ApiToken::belonging_to(&user).count().get_result(&*conn)?; if count >= max_token_per_user { return Err(bad_request(&format!( "maximum tokens per user is: {}", max_token_per_user ))); } let api_token = ApiToken::insert(&*conn, user.id, name)?;
} Ok(req.json(&R { api_token: api_token.into(), })) } /// Handles the `DELETE /me/tokens/:id` route. pub fn revoke(req: &mut dyn RequestExt) -> EndpointResult { let id = req.params()["id"] .parse::<i32>() .map_err(|e| bad_request(&format!("invalid token id: {:?}", e)))?; let authenticated_user = req.authenticate()?; let conn = req.db_conn()?; let user = authenticated_user.user(); diesel::update(ApiToken::belonging_to(&user).find(id)) .set(api_tokens::revoked.eq(true)) .execute(&*conn)?; #[derive(Serialize)] struct R {} Ok(req.json(&R {})) } /// Handles the `DELETE /tokens/current` route. pub fn revoke_current(req: &mut dyn RequestExt) -> EndpointResult { let authenticated_user = req.authenticate()?; let api_token_id = authenticated_user .api_token_id() .ok_or_else(|| bad_request("token not provided"))?; let conn = req.db_conn()?; diesel::update(api_tokens::table.filter(api_tokens::id.eq(api_token_id))) .set(api_tokens::revoked.eq(true)) .execute(&*conn)?; Ok(Response::builder().status(204).body(Body::empty()).unwrap()) }
#[derive(Serialize)] struct R { api_token: EncodableApiTokenWithToken,
route.go
package route import ( "github.com/gorilla/mux" textbox "github.com/oms-services/machinebox-textbox/textbox"
) //Route struct type Route struct { Name string Method string Pattern string HandlerFunc http.HandlerFunc } //Routes array type Routes []Route var routes = Routes{ Route{ "TextAnalyze", "POST", "/textAnalyze", textbox.TextAnalyze, }, } //NewRouter route func NewRouter() *mux.Router { router := mux.NewRouter().StrictSlash(true) for _, route := range routes { var handler http.Handler log.Println(route.Name) handler = route.HandlerFunc router. Methods(route.Method). Path(route.Pattern). Name(route.Name). Handler(handler) } return router }
"log" "net/http"
base.py
""" MySQL database backend for Django. Requires MySQLdb: http://sourceforge.net/projects/mysql-python """ import re try: import MySQLdb as Database except ImportError, e: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("Error loading MySQLdb module: %s" % e) # We want version (1, 2, 1, 'final', 2) or later. We can't just use # lexicographic ordering in this check because then (1, 2, 1, 'gamma') # inadvertently passes the version test. version = Database.version_info if (version < (1,2,1) or (version[:3] == (1, 2, 1) and (len(version) < 5 or version[3] != 'final' or version[4] < 2))): from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("MySQLdb-1.2.1p2 or newer is required; you have %s" % Database.__version__) from MySQLdb.converters import conversions from MySQLdb.constants import FIELD_TYPE, FLAG, CLIENT from django.db.backends import * from django.db.backends.signals import connection_created from django.db.backends.mysql.client import DatabaseClient from django.db.backends.mysql.creation import DatabaseCreation from django.db.backends.mysql.introspection import DatabaseIntrospection from django.db.backends.mysql.validation import DatabaseValidation from django.utils.safestring import SafeString, SafeUnicode # Raise exceptions for database warnings if DEBUG is on from django.conf import settings if settings.DEBUG: from warnings import filterwarnings filterwarnings("error", category=Database.Warning) DatabaseError = Database.DatabaseError IntegrityError = Database.IntegrityError # MySQLdb-1.2.1 returns TIME columns as timedelta -- they are more like # timedelta in terms of actual behavior as they are signed and include days -- # and Django expects time, so we still need to override that. We also need to # add special handling for SafeUnicode and SafeString as MySQLdb's type # checking is too tight to catch those (see Django ticket #6052). django_conversions = conversions.copy() django_conversions.update({ FIELD_TYPE.TIME: util.typecast_time, FIELD_TYPE.DECIMAL: util.typecast_decimal, FIELD_TYPE.NEWDECIMAL: util.typecast_decimal, }) # This should match the numerical portion of the version numbers (we can treat # versions like 5.0.24 and 5.0.24a as the same). Based on the list of version # at http://dev.mysql.com/doc/refman/4.1/en/news.html and # http://dev.mysql.com/doc/refman/5.0/en/news.html . server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})') # MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on # MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the # point is to raise Warnings as exceptions, this can be done with the Python # warning module, and this is setup when the connection is created, and the # standard util.CursorDebugWrapper can be used. Also, using sql_mode # TRADITIONAL will automatically cause most warnings to be treated as errors. class CursorWrapper(object): """ A thin wrapper around MySQLdb's normal cursor class so that we can catch particular exception instances and reraise them with the right types. Implemented as a wrapper, rather than a subclass, so that we aren't stuck to the particular underlying representation returned by Connection.cursor(). """ codes_for_integrityerror = (1048,) def __init__(self, cursor): self.cursor = cursor def execute(self, query, args=None): try: return self.cursor.execute(query, args) except Database.OperationalError, e: # Map some error codes to IntegrityError, since they seem to be # misclassified and Django would prefer the more logical place. if e[0] in self.codes_for_integrityerror: raise Database.IntegrityError(tuple(e)) raise def executemany(self, query, args): try: return self.cursor.executemany(query, args) except Database.OperationalError, e: # Map some error codes to IntegrityError, since they seem to be # misclassified and Django would prefer the more logical place. if e[0] in self.codes_for_integrityerror: raise Database.IntegrityError(tuple(e)) raise def __getattr__(self, attr): if attr in self.__dict__: return self.__dict__[attr] else: return getattr(self.cursor, attr) def __iter__(self): return iter(self.cursor) class DatabaseFeatures(BaseDatabaseFeatures): empty_fetchmany_value = () update_can_self_select = False allows_group_by_pk = True related_fields_match_type = True class DatabaseOperations(BaseDatabaseOperations): def date_extract_sql(self, lookup_type, field_name): # http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html if lookup_type == 'week_day': # DAYOFWEEK() returns an integer, 1-7, Sunday=1. # Note: WEEKDAY() returns 0-6, Monday=0. return "DAYOFWEEK(%s)" % field_name else: return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) def date_trunc_sql(self, lookup_type, field_name): fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape. format_def = ('0000-', '01', '-01', ' 00:', '00', ':00') try: i = fields.index(lookup_type) + 1 except ValueError: sql = field_name else: format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]]) sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str) return sql def drop_foreignkey_sql(self):
def force_no_ordering(self): """ "ORDER BY NULL" prevents MySQL from implicitly ordering by grouped columns. If no ordering would otherwise be applied, we don't want any implicit sorting going on. """ return ["NULL"] def fulltext_search_sql(self, field_name): return 'MATCH (%s) AGAINST (%%s IN BOOLEAN MODE)' % field_name def no_limit_value(self): # 2**64 - 1, as recommended by the MySQL documentation return 18446744073709551615L def quote_name(self, name): if name.startswith("`") and name.endswith("`"): return name # Quoting once is enough. return "`%s`" % name def random_function_sql(self): return 'RAND()' def sql_flush(self, style, tables, sequences): # NB: The generated SQL below is specific to MySQL # 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements # to clear all tables of all data if tables: sql = ['SET FOREIGN_KEY_CHECKS = 0;'] for table in tables: sql.append('%s %s;' % (style.SQL_KEYWORD('TRUNCATE'), style.SQL_FIELD(self.quote_name(table)))) sql.append('SET FOREIGN_KEY_CHECKS = 1;') # 'ALTER TABLE table AUTO_INCREMENT = 1;'... style SQL statements # to reset sequence indices sql.extend(["%s %s %s %s %s;" % \ (style.SQL_KEYWORD('ALTER'), style.SQL_KEYWORD('TABLE'), style.SQL_TABLE(self.quote_name(sequence['table'])), style.SQL_KEYWORD('AUTO_INCREMENT'), style.SQL_FIELD('= 1'), ) for sequence in sequences]) return sql else: return [] def value_to_db_datetime(self, value): if value is None: return None # MySQL doesn't support tz-aware datetimes if value.tzinfo is not None: raise ValueError("MySQL backend does not support timezone-aware datetimes.") # MySQL doesn't support microseconds return unicode(value.replace(microsecond=0)) def value_to_db_time(self, value): if value is None: return None # MySQL doesn't support tz-aware datetimes if value.tzinfo is not None: raise ValueError("MySQL backend does not support timezone-aware datetimes.") # MySQL doesn't support microseconds return unicode(value.replace(microsecond=0)) def year_lookup_bounds(self, value): # Again, no microseconds first = '%s-01-01 00:00:00' second = '%s-12-31 23:59:59.99' return [first % value, second % value] class DatabaseWrapper(BaseDatabaseWrapper): operators = { 'exact': '= %s', 'iexact': 'LIKE %s', 'contains': 'LIKE BINARY %s', 'icontains': 'LIKE %s', 'regex': 'REGEXP BINARY %s', 'iregex': 'REGEXP %s', 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': 'LIKE BINARY %s', 'endswith': 'LIKE BINARY %s', 'istartswith': 'LIKE %s', 'iendswith': 'LIKE %s', } def __init__(self, *args, **kwargs): super(DatabaseWrapper, self).__init__(*args, **kwargs) self.server_version = None self.features = DatabaseFeatures() self.ops = DatabaseOperations() self.client = DatabaseClient(self) self.creation = DatabaseCreation(self) self.introspection = DatabaseIntrospection(self) self.validation = DatabaseValidation() def _valid_connection(self): if self.connection is not None: try: self.connection.ping() return True except DatabaseError: self.connection.close() self.connection = None return False def _cursor(self): if not self._valid_connection(): kwargs = { 'conv': django_conversions, 'charset': 'utf8', 'use_unicode': True, } settings_dict = self.settings_dict if settings_dict['DATABASE_USER']: kwargs['user'] = settings_dict['DATABASE_USER'] if settings_dict['DATABASE_NAME']: kwargs['db'] = settings_dict['DATABASE_NAME'] if settings_dict['DATABASE_PASSWORD']: kwargs['passwd'] = settings_dict['DATABASE_PASSWORD'] if settings_dict['DATABASE_HOST'].startswith('/'): kwargs['unix_socket'] = settings_dict['DATABASE_HOST'] elif settings_dict['DATABASE_HOST']: kwargs['host'] = settings_dict['DATABASE_HOST'] if settings_dict['DATABASE_PORT']: kwargs['port'] = int(settings_dict['DATABASE_PORT']) # We need the number of potentially affected rows after an # "UPDATE", not the number of changed rows. kwargs['client_flag'] = CLIENT.FOUND_ROWS kwargs.update(settings_dict['DATABASE_OPTIONS']) self.connection = Database.connect(**kwargs) self.connection.encoders[SafeUnicode] = self.connection.encoders[unicode] self.connection.encoders[SafeString] = self.connection.encoders[str] connection_created.send(sender=self.__class__) cursor = CursorWrapper(self.connection.cursor()) return cursor def _rollback(self): try: BaseDatabaseWrapper._rollback(self) except Database.NotSupportedError: pass def get_server_version(self): if not self.server_version: if not self._valid_connection(): self.cursor() m = server_version_re.match(self.connection.get_server_info()) if not m: raise Exception('Unable to determine MySQL version from version string %r' % self.connection.get_server_info()) self.server_version = tuple([int(x) for x in m.groups()]) return self.server_version
return "DROP FOREIGN KEY"
code.py
#!/bin/python3 import math import os import random import re import sys from functools import cache import time from bisect import bisect,insort @cache def get_sub_sum(temp_sum,removed,added,modulo):
def maximumSum_iter_2(a, m, a_sum): if len(a) == 0: return 0 if len(a) == 1: return a[0]%m first = a[0] last = a[-1] return max( a_sum, maximumSum_iter_2(a[1:],m,get_sub_sum(a_sum,first,0,m)), maximumSum_iter_2(a[0:-1],m,get_sub_sum(a_sum,last,0,m)) ) def maximumSum_2(a, m): return maximumSum_iter_2(a,m,sum(a)%m) def maximumSum_iter_3(a, m, a_sum, do_left=True): if len(a) == 0: return 0 if len(a) == 1: return a[0]%m first = a[0] last = a[-1] return max( a_sum, maximumSum_iter_3(a[1:],m,get_sub_sum(a_sum,first,0,m)) if do_left else a_sum, maximumSum_iter_3(a[0:-1],m,get_sub_sum(a_sum,last,0,m),do_left=False) ) def maximumSum_3(a, m): return maximumSum_iter_3(a,m,sum(a)%m) def maxSubarray(a,m): N = len(a) cumulative_sums = [] sum_so_far = 0 max_sum = 0 for i in range(N): sum_so_far = (sum_so_far + a[i]) % m pos = bisect(cumulative_sums, sum_so_far) d = 0 if pos == i else cumulative_sums[pos] max_sum = max(max_sum, (sum_so_far + m - d) % m) insort(cumulative_sums, sum_so_far) return max_sum def maximumSum_1(a, m): best_sub_a_sum = 0 for l in range(1,len(a)+1): temp_sum = sum(a[0:l])%m if temp_sum>best_sub_a_sum: best_sub_a_sum = temp_sum for i in range(1,len(a)-l+1): temp_sum = get_sub_sum(temp_sum,a[i-1],a[i+l-1],m) if temp_sum>best_sub_a_sum: best_sub_a_sum = temp_sum return best_sub_a_sum if __name__ == '__main__': with open("./test_cases/case_1.txt") as test_case: with open("./test_cases/case_1_solutions.txt") as solutions: q = int(test_case.readline().strip()) print("tot cases: ",q) max_1_time = 0 max_2_time = 0 max_3_time = 0 for i in range(q): first_multiple_input = test_case.readline().rstrip().split() n = int(first_multiple_input[0]) m = int(first_multiple_input[1]) a = list(map(int, test_case.readline().rstrip().split())) solution = int(solutions.readline().rstrip()) start_time = time.time() r1 = maximumSum_1(a, m) time_1 = time.time()-start_time max_1_time += time_1 start_time = time.time() r2= maxSubarray(a, m) time_2 = time.time()-start_time max_2_time += time_2 start_time = time.time() r3= maximumSum_3(a, m) time_3 = time.time()-start_time max_3_time += time_3 if (time_1 > 0.5 or time_2 > 0.5 or time_3 > 0.5): print(f"{i} {time_1} {time_2} {time_3}") if (r1 != solution or r2 != solution or r3 != solution): print(f"{i} {r1} {r2} {r3} {solution}") print("1:{} 2:{} 3:{}".format(max_1_time/q,max_2_time/q,max_3_time/q))
return (temp_sum-removed+added)%modulo
kex2provisionee2.go
// Auto-generated to Go types and interfaces using avdl-compiler v1.4.2 (https://github.com/keybase/node-avdl-compiler) // Input file: avdl/keybase1/kex2provisionee2.avdl package keybase1 import ( "github.com/keybase/go-framed-msgpack-rpc/rpc" context "golang.org/x/net/context" ) type Hello2Res struct { EncryptionKey KID `codec:"encryptionKey" json:"encryptionKey"` SigPayload HelloRes `codec:"sigPayload" json:"sigPayload"` DeviceEkKID KID `codec:"deviceEkKID" json:"deviceEkKID"` } func (o Hello2Res) DeepCopy() Hello2Res { return Hello2Res{ EncryptionKey: o.EncryptionKey.DeepCopy(), SigPayload: o.SigPayload.DeepCopy(), DeviceEkKID: o.DeviceEkKID.DeepCopy(), } } type PerUserKeyBox struct { Generation PerUserKeyGeneration `codec:"generation" json:"generation"` Box string `codec:"box" json:"box"` ReceiverKID KID `codec:"receiverKID" json:"receiver_kid"` } func (o PerUserKeyBox) DeepCopy() PerUserKeyBox { return PerUserKeyBox{ Generation: o.Generation.DeepCopy(), Box: o.Box, ReceiverKID: o.ReceiverKID.DeepCopy(), } } type Hello2Arg struct { Uid UID `codec:"uid" json:"uid"` Token SessionToken `codec:"token" json:"token"` Csrf CsrfToken `codec:"csrf" json:"csrf"` SigBody string `codec:"sigBody" json:"sigBody"` } type DidCounterSign2Arg struct { Sig []byte `codec:"sig" json:"sig"` PpsEncrypted string `codec:"ppsEncrypted" json:"ppsEncrypted"` PukBox *PerUserKeyBox `codec:"pukBox,omitempty" json:"pukBox,omitempty"` UserEkBox *UserEkBoxed `codec:"userEkBox,omitempty" json:"userEkBox,omitempty"` } type Kex2Provisionee2Interface interface { Hello2(context.Context, Hello2Arg) (Hello2Res, error) DidCounterSign2(context.Context, DidCounterSign2Arg) error } func
(i Kex2Provisionee2Interface) rpc.Protocol { return rpc.Protocol{ Name: "keybase.1.Kex2Provisionee2", Methods: map[string]rpc.ServeHandlerDescription{ "hello2": { MakeArg: func() interface{} { var ret [1]Hello2Arg return &ret }, Handler: func(ctx context.Context, args interface{}) (ret interface{}, err error) { typedArgs, ok := args.(*[1]Hello2Arg) if !ok { err = rpc.NewTypeError((*[1]Hello2Arg)(nil), args) return } ret, err = i.Hello2(ctx, typedArgs[0]) return }, }, "didCounterSign2": { MakeArg: func() interface{} { var ret [1]DidCounterSign2Arg return &ret }, Handler: func(ctx context.Context, args interface{}) (ret interface{}, err error) { typedArgs, ok := args.(*[1]DidCounterSign2Arg) if !ok { err = rpc.NewTypeError((*[1]DidCounterSign2Arg)(nil), args) return } err = i.DidCounterSign2(ctx, typedArgs[0]) return }, }, }, } } type Kex2Provisionee2Client struct { Cli rpc.GenericClient } func (c Kex2Provisionee2Client) Hello2(ctx context.Context, __arg Hello2Arg) (res Hello2Res, err error) { err = c.Cli.Call(ctx, "keybase.1.Kex2Provisionee2.hello2", []interface{}{__arg}, &res) return } func (c Kex2Provisionee2Client) DidCounterSign2(ctx context.Context, __arg DidCounterSign2Arg) (err error) { err = c.Cli.Call(ctx, "keybase.1.Kex2Provisionee2.didCounterSign2", []interface{}{__arg}, nil) return }
Kex2Provisionee2Protocol
fixtures.go
/* Copyright © 2021 NAME HERE <EMAIL ADDRESS> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cmd import ( "github.com/wwwillian/codepix-go/domain/model" "github.com/wwwillian/codepix-go/infrastructure/db" "github.com/wwwillian/codepix-go/infrastructure/repository" "os" "github.com/spf13/cobra" ) // fixturesCmd represents the fixtures command var fixturesCmd = &cobra.Command{ Use: "fixtures", Short: "Run fixture for fake data generation", Run: func(cmd *cobra.Command, args []string) { database := db.ConnectDB(os.Getenv("env")) defer database.Close() pixRepository := repository.PixKeyRepositoryDb{Db: database} bankNubank, _ := model.NewBank("111", "Nubank") pixRepository.AddBank(bankNubank) bankItau, _ := model.NewBank("222", "Itau") pixRepository.AddBank(bankItau) accountA, _ := model.NewAccount(bankNubank, "1010", "Willian A") accountA.ID = "6e4635ce-88d1-4e58-9597-d13fc446ee47" pixRepository.AddAccount(accountA) accountB, _ := model.NewAccount(bankNubank, "2020", "Maria B") accountB.ID = "51a720b2-5144-4d7f-921d-57023b1e24c1" pixRepository.AddAccount(accountB) accountC, _ := model.NewAccount(bankItau, "3030", "User CTER 1") accountC.ID = "103cc632-78e7-4476-ab63-d5ad3a562d26" pixRepository.AddAccount(accountC) accountD, _ := model.NewAccount(bankItau, "4040", "User CTER 2") accountD.ID = "463b1b2a-b5fa-4b88-9c31-e5c894a20ae3" pixRepository.AddAccount(accountD) }, } func init() { rootCmd.AddCommand(fixturesCmd)
// Here you will define your flags and configuration settings. // Cobra supports Persistent Flags which will work for this command // and all subcommands, e.g.: // fixturesCmd.PersistentFlags().String("foo", "", "A help for foo") // Cobra supports local flags which will only run when this command // is called directly, e.g.: // fixturesCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") }
awsparameterstore_loader_test.py
"""Unit tests for aws parameter store interactions""" import importlib import sys from typing import Generator from unittest.mock import patch import pytest from secretbox import awsparameterstore_loader as ssm_loader_module from secretbox.awsparameterstore_loader import AWSParameterStore from tests.conftest import TEST_LIST from tests.conftest import TEST_PATH from tests.conftest import TEST_REGION from tests.conftest import TEST_STORE from tests.conftest import TEST_STORE2 from tests.conftest import TEST_STORE3 from tests.conftest import TEST_VALUE @pytest.fixture def
() -> Generator[AWSParameterStore, None, None]: """Create a fixture to test with""" clazz = AWSParameterStore() assert not clazz.loaded_values yield clazz @pytest.mark.usefixtures("mask_aws_creds", "parameterstore") def test_boto3_not_installed_auto_load(loader: AWSParameterStore) -> None: """Silently skip loading AWS parameter store if no boto3""" with patch.object(ssm_loader_module, "boto3", None): assert not loader.loaded_values assert not loader.load_values(aws_sstore=TEST_PATH, aws_region=TEST_REGION) assert not loader.loaded_values def test_boto3_missing_import_catch() -> None: """Reload loadenv without boto3""" with patch.dict(sys.modules, {"boto3": None}): importlib.reload(ssm_loader_module) assert ssm_loader_module.boto3 is None # Reload after test to avoid polution importlib.reload(ssm_loader_module) @pytest.mark.parametrize( ("prefix", "region", "expectedCnt"), ( (TEST_PATH, TEST_REGION, 33), # correct, root node (f"{TEST_PATH}{TEST_STORE}/", TEST_REGION, 30), # correct, child node (TEST_STORE, TEST_REGION, 0), # wrong prefix (None, TEST_REGION, 0), # no prefix (TEST_PATH, "us-east-2", 0), # wrong region (TEST_PATH, None, 0), # no region ), ) @pytest.mark.usefixtures("mask_aws_creds", "parameterstore") def test_count_parameters( loader: AWSParameterStore, prefix: str, region: str, expectedCnt: int, ) -> None: """Load a parameter from mocked loader""" # nothing has been loaded assert loader.loaded_values.get(prefix) is None # loading succeeded if expectedCnt > 0: # don't assert this if we're trying to make it fail! assert loader.load_values(aws_sstore_name=prefix, aws_region_name=region) else: loader.load_values(aws_sstore_name=prefix, aws_region_name=region) # loaded the proper number of parameters assert len(loader.loaded_values) == expectedCnt @pytest.mark.usefixtures("mask_aws_creds", "parameterstore") def test_parameter_values( loader: AWSParameterStore, ) -> None: """compare parameters from mocked loader to what we put in there""" # loading succeeded assert loader.load_values(aws_sstore_name=TEST_PATH, aws_region_name=TEST_REGION) # both our parameters exist and have the expected value assert loader.loaded_values.get(TEST_STORE) == TEST_VALUE assert loader.loaded_values.get(TEST_STORE2) == TEST_VALUE assert loader.loaded_values.get(TEST_STORE3) == TEST_LIST
loader
main.py
from scripts.downloader import * import fiona from shapely.geometry import shape import geopandas as gpd import matplotlib.pyplot as plt from pprint import pprint import requests import json import time import os # Constant variables input_min_lat = 50.751797561 input_min_lon = 5.726110232 input_max_lat = 50.938216069 input_max_lon = 6.121604582 route_search_url = "https://api.routeyou.com/2.0/json/Route/k-9aec2fc1705896b901c3ea17d6223f0a/mapSearch" route_search_headers = {"Accept": "*/*", "Accept-Encoding": "gzip, deflate, br", "Accept-Language": "nl,en-US;q=0.7,en;q=0.3", "Connection": "keep-alive", "Content-Length": "331", "Content-Type": "text/plain;charset=UTF-8", "DNT": "1", "Host": "api.routeyou.com", "Origin": "https://www.routeyou.com", "Referer": "https://www.routeyou.com/route/search/2/walking-route-search", "TE": "Trailers", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:66.0) Gecko/20100101 Firefox/66.0"} default_headers = {"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", "Accept-Encoding": "gzip, deflate, br", "Accept-Language": "nl,en-US;q=0.7,en;q=0.3", "Connection": "test", "Cookie": "rtysid=5gf59rik6gf8o7b5an7nalcsh0; " "_ga=GA1.2.1811204879.1553438381; _" "gid=GA1.2.1815573989.1553438381; __" "gads=ID=fab95f7aaf65227e:T=1553438384:S=ALNI_MaIjkdo1dKpYiyQKfWZEymqT7HgUQ", "Host": "download.routeyou.com", "Referer": "https://www.routeyou.com/nl-be/route/view/5653357/wandelroute/" "in-het-spoor-van-napoleon-kasteel-reinhardstein-en-de-stuwdam-van-robertville", "TE": "Trailers", "Upgrade-Insecure-Requests": "1", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:65.0) Gecko/20100101 Firefox/65.0"} # # Setup script # bounding_boxes_list = create_bounding_boxes(input_min_lat, input_min_lon, input_max_lat, input_max_lon, # nr_of_rows=12, nr_of_columns=12) # for index, bounding_box in enumerate(bounding_boxes_list): # route_search_data = '{"jsonrpc":"2.0","id":"3","method":"searchAdvanced","params":' \ # '[{"bounds":{"min":{"lat":%s,"lon":%s},"max":{"lat":%s,"lon":%s}},' \ # '"type.id":2,"score.min":0.5,"bounds.comparator":"geometry"},null,100,0,' \ # '{"clusters":false,"addLanguage":"en","media":false,"description":false}]}' \ # % (bounding_box['min_lat'], bounding_box['min_lon'], bounding_box['max_lat'], bounding_box['max_lon']) # response = requests.post(url=route_search_url, headers=route_search_headers, # data=route_search_data) # with open("D:/Wandelroutes/Text/routes_{}.txt".format(index), "wb") as file: # file.write(response.content) # data = json.loads(response.content) # print("Index / routes count / total routes: ", index, "/", len(data['result']['routes']), "/", data['result']['total']) # # for route in data['result']['routes']: # time.sleep(0.5) # route_url = "https://download.routeyou.com/k-9aec2fc1705896b901c3ea17d6223f0a/route/{}.gpx?language=nl".format(route['id']) # filepath = "D:/Wandelroutes/GPX/{}.gpx".format(route['id']) # download_to_file(route_url, default_headers, filepath) dir_filepath = "D:/Wandelroutes/GPX" filenames = os.listdir(dir_filepath) rows_list = [] for filename in filenames: layer = fiona.open(os.path.join(dir_filepath, filename), layer='tracks') geom = layer[0] route_name = geom['properties']['name'] route_geodata = {'type': 'MultiLineString', 'coordinates': geom['geometry']['coordinates']} route_geometry = shape(route_geodata) route_id = os.path.splitext(os.path.basename(filename))[0] route_dict = {'id': str(route_id), 'name': route_name, 'url': "https://www.routeyou.com/nl-nl/route/view/" + str(route_id), 'geometry': route_geometry} rows_list.append(route_dict) routes_gdf = gpd.GeoDataFrame(rows_list) routes_gdf.crs = {'init': 'epsg:4326', 'no_defs': True}
routes_gdf.to_file("D:/Wandelroutes/walking_routes.shp")
RunThis.py
import subprocess import time from VpnConnect import VpnConnects vpc = VpnConnects() while True: time.sleep(10) processf2 = subprocess.Popen(["python", "Targetaccountstatus.py"]) time.sleep(3600) processf2.kill() # Scrape User
print("Opened The SubProcesses") process1 = subprocess.Popen(["python", "Manager1.py"]) process2 = subprocess.Popen(["python", "Manager2.py"]) process3 = subprocess.Popen(["python", "Manager3.py"]) process4 = subprocess.Popen(["python", "Manager4.py"]) process5 = subprocess.Popen(["python", "Manager5.py"]) time.sleep(6000) processmain.kill() process1.kill() process2.kill() process3.kill() process4.kill() process5.kill() print("Killed the Managers") time.sleep(10) subprocess.call("TASKKILL /f /IM CHROME.EXE") subprocess.call("TASKKILL /f /IM CHROMEDRIVER.EXE") time.sleep(45)
processmain = subprocess.Popen(["python", "ManagerAdmin.py"]) time.sleep(45)
interface.ts
import { Locale } from 'date-fns'; export interface I18nInterface { example: string; locale: Locale; dateFormat: 'MM/dd/yyyy' | 'dd/MM/yyyy' | 'yyyy/MM/dd' | 'yyyy/dd/MM'; timeFormat: '12' | '24'; months: { long: { january: string; february: string; march: string; april: string; may: string; june: string; july: string; august: string; september: string; october: string; november: string; december: string; }; short: { january: string; february: string; march: string; april: string; may: string; june: string; july: string; august: string; september: string; october: string; november: string; december: string; }; }; days: { long: { monday: string; tuesday: string; wednesday: string; thursday: string; friday: string; saturday: string; sunday: string; }; short: { monday: string; tuesday: string; wednesday: string; thursday: string; friday: string; saturday: string; sunday: string; }; min: { monday: string; tuesday: string; wednesday: string; thursday: string; friday: string; saturday: string; sunday: string; }; }; emptyItemsListText: string; alert: {
dismissAriaLabel: string; }; breadcrumb: { navAriaLabel: string; }; combobox: { clearIndicatorAriaLabel: string; createLabel: string; }; datePicker: { startOfWeek: | 'sunday' | 'monday' | 'tuesday' | 'wednesday' | 'thursday' | 'friday' | 'saturday'; calendarIconAriaLabel: string; calendarOpenAnnounce: string; calendarCloseAriaLabel: string; previousMonthAriaLabel: string; nextMonthAriaLabel: string; disabledDayAriaLabel: string; selectedDayAriaLabel: string; todayAriaLabel: string; helpModal: { header: string; helpButtonAriaLabel: string; enter: { ariaLabel: string; explanation: string; }; rightAndLeftArrowKeys: { ariaLabel: string; explanation: string; }; upAndDownArrowKeys: { ariaLabel: string; explanation: string; }; pageUpAndPageDownKeys: { ariaLabel: string; displayValue: string; explanation: string; }; homeAndEndKeys: { ariaLabel: string; displayValue: string; explanation: string; }; escape: { ariaLabel: string; displayValue: string; explanation: string; }; questionMark: { ariaLabel: string; explanation: string; }; }; }; dropdown: { menuItemSelectedAriaLabel: string; toggleMenuAriaLabel: string; }; fileUploader: { browseFiles: string; dragMessage: string; errors: { [key: string]: { header?: string; message: string; }; } files: string; }; header: { navigationButtonLabel: string; search: { input: { ariaLabel: string; placeholder: string; }; iconAriaLabel: string; }; }; indeterminateCheckbox: { isCheckedAnnounce: string; isIndeterminateAnnounce: string; isUncheckedAnnounce: string; }; input: { isClearableAriaLabel: string; }; loadingIndicator: { progressBar: { messages: { first: string; second: string; third: string; }; }; spinner: { messages: { first: string; second: string; third: string; }; }; }; modal: { closeAriaLabel: string; }; multiCombobox: { selectedItemButtonAriaLabel: string; }; multiSelect: { placeholder: string; selectedItemButtonAriaLabel: string; }; pagination: { nextButtonLabel: string; previousButtonLabel: string; pageButtonLabel: string; }; password: { shown: { ariaLabel: string; buttonText: string; announce: string; }; hidden: { ariaLabel: string; buttonText: string; announce: string; }; }; search: { input: { ariaLabel: string; placeholder: string; }; iconAriaLabel: string; }; select: { placeholder: string; clearIndicatorAriaLabel: string; }; skipLink: { buttonText: string; }; spinner: { ariaLabel: string; }; table: { pagination: { ofLabel: string; nextAriaLabel: string; previousAriaLabel: string; rowsPerPageLabel: string; }; }; tabs: { horizontalTabsInstructions: string; verticalTabsInstructions: string; previousButtonLabel: string; nextButtonLabel: string; }; tag: { deleteAriaLabel: string; } timePicker: { hoursAriaLabel: string; minutesAriaLabel: string; amButtonAriaLabel: string; pmButtonAriaLabel: string; amSelectedAnnounce: string; pmSelectedAnnounce: string; }; wizard: { navigationLabel: string; optional: string; actions: { previous: string; next: string; submit: string; cancel: string; }; }; }
HeaderController.js
angular.module('app').controller('HeaderController', ['$scope', '$timeout', '$http', '$location', '$rootScope','BASE_URL', '$state', function($scope, $timeout, $http, $location, $rootScope, BASE_URL, $state){ $scope.po_code_header= 'abc'; $scope.po_create= false; $scope.po_update= false; $scope.purchase_orders= []; $scope.createInit= function(){ var post_information= {}; $http.post(BASE_URL + '/home/createInit', post_information) .success(function(data) { if(data.success) { $scope.purchase_orders= data.purchase_orders; } else{ $state.go('404'); } }) .error(function(data, status, headers, config) { $state.go('404'); }); }; $scope.createInit();
$scope.poProcess= function(){ if($scope.po_create){ console.log($scope.po_create); console.log($scope.po_update); alert('create'); return false; $state.go('purchase-order-create'); } else if($scope.po_update){ console.log($scope.po_create); console.log($scope.po_update); alert('update'); return false; jQuery('#processPO').modal('show'); } else{} }; $scope.number_po_code_change= 0; $scope.focus_po_code= false; $scope.blur_po_code= false; //$scope.$watchCollection('[po_code, blur_po_code]', $scope.$watch('po_code_header', function(){ $scope.number_po_code_change++; console.log($scope.number_po_code_change); //if($scope.number_po_code_change>1){ //if($scope.blur_po_code== true){ var in_array= false; $.each($scope.purchase_orders, function(key, purchase_order_value){ if(purchase_order_value.po_code== $scope.po_code_header){ $rootScope.view_detail_purchase_order_id= purchase_order_value.id; in_array= true; $scope.po_update= true; $scope.po_create= false; console.log(purchase_order_value); return false; } }); if(in_array== false){ $scope.po_update= false; $scope.po_create= true; } //} //} }, true ); }]);
wikipedia.go
package summarize import ( "fmt" "framagit.org/andinus/indus/fetch" "framagit.org/andinus/indus/notification" ) // Wikipedia returns struct notification.Notif with notification info // from the wikipedia summary. func
(w fetch.Wiki) (notification.Notif, error) { n := notification.Notif{} var err error // Continue only if the page's type is standard. TODO: Work // with other page types to get summary. switch w.Type { case "standard": n.Title = fmt.Sprintf("%s", w.Title) n.Message = w.Extract default: err = fmt.Errorf("Summarizing wikipedia response failed") } return n, err }
Wikipedia
uniform_distribution.rs
use rand::distributions::{Distribution, Uniform}; fn main()
{ let mut rng = rand::thread_rng(); let die = Uniform::from(1..7); loop { let throw = die.sample(&mut rng); println!("Roll the die: {throw}"); if throw == 6 { break; } } }
views.py
from django.shortcuts import render, redirect from django.http import HttpResponse from django.views import View from .forms import TwitterForm from .models import * from .tasks import get_tweets #from twython import Twython # Create your views here. def twitter_view(request): if request.method == 'GET': form = TwitterForm() return render( request, 'tweets.html', { 'twitter_form':form } ) if request.method == 'POST': form = TwitterForm(request.POST) if form.is_valid(): hashtag=form.cleaned_data['hashtag'] hashtag= "hariharaselvam" tweets = get_tweets.delay(hashtag) #product = form.save() #tw = Twython( # "yJ9GXtYiLH2yMXukUijR6R3dH", # "Ad8ZMpJNZvYe1CulUDUHPJiw1lg9pgalcLSFdWUQQRemP7jKhz", # "239795044-XqQ5P6tYWIZJip5EaWWO2Q8mPVwJVZ6hWJ4N9pEO", # "uC9cjPyNtUPg1ekJvWZCCMwtLojpFA7d6dyzoMAyfIlQQ" #) #tweets = tw.search(q=hashtag,count=10) print tweets return redirect('timeline_view') #return HttpResponse(tweets) def
(request): if request.method == 'GET': statuses = Status.objects.all() return render( request, 'timeline.html', { 'status_list':statuses } )
timeline_view
util.rs
// Copyright 2014 The html5ever Project Developers. See the // COPYRIGHT file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::io; use std::path::Path; pub fn
( src_dir: Path, subdir: &'static str, ext: &'static str, mk: |path_str: &str, file: io::File|) { let test_dir_path = src_dir.join_many(["html5lib-tests", subdir]); let test_files = io::fs::readdir(&test_dir_path).ok().expect("can't open dir"); for path in test_files.into_iter() { let path_str = path.filename_str().unwrap(); if path_str.ends_with(ext) { let file = io::File::open(&path).ok().expect("can't open file"); mk(path_str, file); } } }
foreach_html5lib_test
tempFile.ts
import * as os from "os"; import * as path from "path";
import { iconv } from "./vscodeModules"; import { configuration } from "./configuration"; import { pathExists } from "../util"; export const tempdir = path.join( os.tmpdir(), `vscode-svn-${os.userInfo().uid.toString()}` ); export async function createTempFile( svnUri: Uri, revision: string, payload: string, ext? : string ): Promise<Uri> { if (!pathExists(tempdir)) { mkdirSync(tempdir, { mode: 0o770 }); } let fname = `r${revision}_${path.basename(svnUri.fsPath)}`; if (ext) { fname = fname + "." + ext; } const hash = crypto.createHash("md5"); const data = hash.update(svnUri.path); const filePathHash = data.digest("hex"); const encoding = configuration.get<string>("default.encoding"); if (!pathExists(path.join(tempdir, filePathHash))) { mkdirSync(path.join(tempdir, filePathHash), { mode: 0o770 }); } const fpath = path.join(tempdir, filePathHash, fname); if (encoding) { const encodedPayload = iconv.encode(payload, encoding); writeFileSync(fpath, encodedPayload); } else { writeFileSync(fpath, payload); } return Uri.file(fpath); }
import { Uri } from "vscode"; import { writeFileSync, mkdirSync } from "fs"; import * as crypto from "crypto";
map3d.js
define('echarts-x/chart/map3d', [ 'require', 'zrender/tool/util', 'zrender/config', 'echarts/util/ecData', 'echarts/util/mapData/params', 'echarts/util/mapData/geoCoord', 'echarts/util/mapData/textFixed', 'zrender/shape/Polygon', 'zrender/shape/ShapeBundle', 'zrender/shape/Text', 'qtek/Node', 'qtek/Mesh', 'qtek/geometry/Sphere', 'qtek/Material', 'qtek/Shader', 'qtek/Texture2D', 'qtek/math/Vector3', 'qtek/math/Matrix4', 'qtek/core/glenum', '../config', './base3d', '../util/OrbitControl', '../surface/ZRenderSurface', '../surface/VectorFieldParticleSurface', 'qtek/core/LRU', 'echarts/chart' ], function (require) { var zrUtil = require('zrender/tool/util'); var zrConfig = require('zrender/config'); var ecData = require('echarts/util/ecData'); var mapParams = require('echarts/util/mapData/params').params; var geoCoordMap = require('echarts/util/mapData/geoCoord'); var textFixedMap = require('echarts/util/mapData/textFixed'); var PolygonShape = require('zrender/shape/Polygon'); var ShapeBundle = require('zrender/shape/ShapeBundle'); var TextShape = require('zrender/shape/Text'); var Node = require('qtek/Node'); var Mesh = require('qtek/Mesh'); var SphereGeometry = require('qtek/geometry/Sphere'); var Material = require('qtek/Material'); var Shader = require('qtek/Shader'); var Texture2D = require('qtek/Texture2D'); var Vector3 = require('qtek/math/Vector3'); var Matrix4 = require('qtek/math/Matrix4'); var glenum = require('qtek/core/glenum'); var ecConfig = require('../config'); var ChartBase3D = require('./base3d'); var OrbitControl = require('../util/OrbitControl'); var ZRenderSurface = require('../surface/ZRenderSurface'); var VectorFieldParticleSurface = require('../surface/VectorFieldParticleSurface'); var LRU = require('qtek/core/LRU'); var formatGeoPoint = function (p) { return [ p[0] < -168.5 && p[1] > 63.8 ? p[0] + 360 : p[0], p[1] ]; }; function
(ecTheme, messageCenter, zr, option, myChart) { ChartBase3D.call(this, ecTheme, messageCenter, zr, option, myChart); if (!this.baseLayer.renderer) { return; } this._earthRadius = 100; this._baseTextureSize = 2048; this._globeNode = null; this._orbitControl = null; this._mapDataMap = {}; this._nameMap = {}; this._globeSurface = null; this._surfaceLayerRoot = null; this._albedoShader = new Shader({ vertex: Shader.source('ecx.albedo.vertex'), fragment: Shader.source('ecx.albedo.fragment') }); this._albedoShader.enableTexture('diffuseMap'); this._albedoShaderPA = this._albedoShader.clone(); this._albedoShaderPA.define('fragment', 'PREMULTIPLIED_ALPHA'); this._sphereGeometry = new SphereGeometry({ widthSegments: 40, heightSegments: 40 }); this._imageCache = new LRU(5); this._vfParticleSurfaceList = []; this.refresh(option); } Map3D.prototype = { type: ecConfig.CHART_TYPE_MAP3D, constructor: Map3D, _init: function () { var legend = this.component.legend; var series = this.series; this.selectedMap = {}; this.beforeBuildMark(); for (var i = 0; i < series.length; i++) { if (series[i].type === ecConfig.CHART_TYPE_MAP3D) { series[i] = this.reformOption(series[i]); var seriesName = series[i].name; var mapType = series[i].mapType; this.selectedMap[seriesName] = legend ? legend.isSelected(seriesName) : true; if (series[i].geoCoord) { zrUtil.merge(geoCoordMap, series[i].geoCoord, true); } if (series[i].textFixed) { zrUtil.merge(textFixedMap, series[i].textFixed, true); } if (series[i].nameMap) { this._nameMap[mapType] = this._nameMap[mapType] || {}; zrUtil.merge(this._nameMap[mapType], series[i].nameMap, true); } } } var seriesGroupByMapType = this._groupSeriesByMapType(series); var dataMap = this._mergeSeriesData(series); for (var mapType in dataMap) { var seriesGroup = seriesGroupByMapType[mapType]; var mapQuality = this.deepQuery(seriesGroup, 'baseLayer.quality'); if (isNaN(mapQuality)) { switch (mapQuality) { case 'low': this._baseTextureSize = 1024; break; case 'high': this._baseTextureSize = 4096; break; case 'medium': default: this._baseTextureSize = 2048; break; } } else { this._baseTextureSize = mapQuality; } if (!this._globeNode) { this._createGlob(seriesGroup); this._initGlobeHandlers(); } this._updateGlobe(mapType, dataMap[mapType], seriesGroup); this._setViewport(seriesGroup); break; } var camera = this.baseLayer.camera; camera.position.y = 0; camera.position.z = this._earthRadius * 2.5; camera.lookAt(Vector3.ZERO); this.afterBuildMark(); }, _setViewport: function (seriesGroup) { var mapLocation = this.deepQuery(seriesGroup, 'mapLocation') || {}; var x = mapLocation.x; var y = mapLocation.y; var width = mapLocation.width; var height = mapLocation.height; var zrWidth = this.zr.getWidth(); var zrHeight = this.zr.getHeight(); x = this.parsePercent(x, zrWidth); y = this.parsePercent(y, zrHeight); width = this.parsePercent(width, zrWidth); height = this.parsePercent(height, zrHeight); x = isNaN(x) ? 0 : x; y = isNaN(y) ? 0 : x; width = isNaN(width) ? zrWidth : width; height = isNaN(height) ? zrHeight : height; this.baseLayer.setViewport(x, y, width, height); }, _groupSeriesByMapType: function (series) { var seriesGroupByMapType = {}; for (var i = 0; i < series.length; i++) { if (series[i].type === ecConfig.CHART_TYPE_MAP3D && this.selectedMap[series[i].name]) { var mapType = series[i].mapType; seriesGroupByMapType[mapType] = seriesGroupByMapType[mapType] || []; seriesGroupByMapType[mapType].push(series[i]); } } return seriesGroupByMapType; }, _mergeSeriesData: function (series) { var dataMap = {}; for (var i = 0; i < series.length; i++) { if (series[i].type === ecConfig.CHART_TYPE_MAP3D && this.selectedMap[series[i].name]) { var mapType = series[i].mapType; dataMap[mapType] = dataMap[mapType] || {}; var data = series[i].data || []; for (var j = 0; j < data.length; j++) { var name = data[j].name || ''; dataMap[mapType][name] = dataMap[mapType][name] || { seriesIdx: [], value: 0 }; dataMap[mapType][name].seriesIdx.push(i); for (var key in data[j]) { var val = data[j][key]; if (key === 'value') { if (!isNaN(val)) { dataMap[mapType][name].value += +val; } } else { dataMap[mapType][name][key] = val; } } } } } return dataMap; }, _updateGlobe: function (mapType, data, seriesGroup) { var globeSurface = this._globeSurface; var self = this; globeSurface.resize(this._baseTextureSize, this._baseTextureSize); var bgColor = this.deepQuery(seriesGroup, 'baseLayer.backgroundColor'); var bgImage = this.deepQuery(seriesGroup, 'baseLayer.backgroundImage'); globeSurface.backgroundColor = this._isValueNone(bgColor) ? '' : bgColor; if (!this._isValueNone(bgImage)) { if (typeof bgImage == 'string') { var img = new Image(); img.onload = function () { globeSurface.backgroundImage = img; globeSurface.refresh(); }; img.src = bgImage; } else { globeSurface.backgroundImage = bgImage; } } else { globeSurface.backgroundImage = null; } if (this._mapDataMap[mapType]) { this._updateMapPolygonShapes(data, this._mapDataMap[mapType], seriesGroup); globeSurface.refresh(); } else if (mapParams[mapType].getGeoJson) { mapParams[mapType].getGeoJson(function (mapData) { if (self._disposed) { return; } self._mapDataMap[mapType] = mapData; self._updateMapPolygonShapes(data, mapData, seriesGroup); globeSurface.refresh(); }); } else { globeSurface.refresh(); } if (this._surfaceLayerRoot) { this.baseLayer.renderer.disposeNode(this._surfaceLayerRoot, false, true); } this._surfaceLayerRoot = new Node({ name: 'surfaceLayers' }); this._globeNode.add(this._surfaceLayerRoot); for (var i = 0; i < this._vfParticleSurfaceList.length; i++) { this._vfParticleSurfaceList[i].dispose(); } this._vfParticleSurfaceList = []; seriesGroup.forEach(function (serie) { var sIdx = this.series.indexOf(serie); this.buildMark(sIdx, this._globeNode); this._createSurfaceLayers(sIdx); }, this); }, _createSurfaceLayers: function (seriesIdx) { var serie = this.series[seriesIdx]; for (var i = 0; i < serie.surfaceLayers.length; i++) { var surfaceLayer = serie.surfaceLayers[i]; var surfaceMesh = new Mesh({ name: 'surfaceLayer' + i, geometry: this._sphereGeometry, ignorePicking: true }); var distance = surfaceLayer.distance; if (distance == null) { distance = i + 1; } var r = this._earthRadius + distance; surfaceMesh.scale.set(r, r, r); switch (surfaceLayer.type) { case 'particle': this._createParticleSurfaceLayer(seriesIdx, surfaceLayer, surfaceMesh); break; case 'texture': default: this._createTextureSurfaceLayer(seriesIdx, surfaceLayer, surfaceMesh); break; } this._surfaceLayerRoot.add(surfaceMesh); } }, _createTextureSurfaceLayer: function (seriesIdx, surfaceLayerCfg, surfaceMesh) { var self = this; surfaceMesh.material = new Material({ shader: this._albedoShader, transparent: true, depthMask: false }); var serie = this.series[seriesIdx]; var image = surfaceLayerCfg.image; var canvas = document.createElement('canvas'); canvas.width = 1; canvas.height = 1; var texture = new Texture2D({ anisotropic: 32, image: canvas }); surfaceMesh.material.set('diffuseMap', texture); if (typeof image === 'string') { var src = image; image = this._imageCache.get(src); if (!image) { image = new Image(); image.onload = function () { texture.image = image; texture.dirty(); self.zr.refreshNextFrame(); self._imageCache.put(src, image); }; image.src = src; } else { texture.image = image; } } else if (this._isValueImage(image)) { texture.image = image; } }, _createParticleSurfaceLayer: function (seriesIdx, surfaceLayerCfg, surfaceMesh) { var self = this; var serie = this.series[seriesIdx]; var data = this.query(surfaceLayerCfg, 'particle.vectorField'); surfaceMesh.material = new Material({ shader: this._albedoShaderPA, transparent: true, depthMask: false }); var vfParticleSurface = new VectorFieldParticleSurface(this.baseLayer.renderer, data); var width = 0; var height = 0; var vfImage; if (data instanceof Array) { vfImage = this._createCanvasFromDataMatrix(data); width = vfImage.width; height = vfImage.height; if (!vfImage) { return false; } } else if (this._isValueImage(data)) { width = data.width; height = data.height; vfImage = data; } else { return false; } if (!width || !height) { return; } var textureSize = this.query(surfaceLayerCfg, 'size'); if (typeof textureSize === 'number') { textureSize = [ textureSize, textureSize ]; } else if (!textureSize) { textureSize = [ 2048, 1024 ]; } var particleSizeScaling = this.query(surfaceLayerCfg, 'particle.sizeScaling') || 1; var particleSpeedScaling = this.query(surfaceLayerCfg, 'particle.speedScaling'); if (particleSpeedScaling == null) { particleSpeedScaling = 1; } var particleColor = this.query(surfaceLayerCfg, 'particle.color') || 'white'; var particleNumber = this.query(surfaceLayerCfg, 'particle.number'); if (particleNumber == null) { particleNumber = 256 * 256; } ; var motionBlurFactor = this.query(surfaceLayerCfg, 'particle.motionBlurFactor'); if (motionBlurFactor == null) { motionBlurFactor = 0.99; } vfParticleSurface.vectorFieldTexture = new Texture2D({ image: vfImage, flipY: true }); vfParticleSurface.surfaceTexture = new Texture2D({ width: textureSize[0], height: textureSize[1], anisotropic: 32 }); vfParticleSurface.particleSizeScaling = particleSizeScaling; vfParticleSurface.particleSpeedScaling = particleSpeedScaling; vfParticleSurface.particleColor = this.parseColor(particleColor); vfParticleSurface.motionBlurFactor = motionBlurFactor; var size = Math.round(Math.sqrt(particleNumber)); vfParticleSurface.init(size, size); vfParticleSurface.surfaceMesh = surfaceMesh; this._vfParticleSurfaceList.push(vfParticleSurface); }, _createCanvasFromDataMatrix: function (data) { var height = data.length; if (!(data[0] instanceof Array)) { return null; } var width = data[0].length; if (!(data[0][0] instanceof Array)) { return null; } var vfImage = document.createElement('canvas'); vfImage.width = width; vfImage.height = height; var ctx = vfImage.getContext('2d'); var imageData = ctx.getImageData(0, 0, width, height); var p = 0; for (var j = 0; j < height; j++) { for (var i = 0; i < width; i++) { var item = data[j][i]; var u = item.x == null ? item[0] : item.x; var v = item.y == null ? item[1] : item.y; imageData.data[p++] = u * 128 + 128; imageData.data[p++] = v * 128 + 128; imageData.data[p++] = 0; imageData.data[p++] = 255; } } ctx.putImageData(imageData, 0, 0); return vfImage; }, _createGlob: function (seriesGroup) { var zr = this.zr; var self = this; this._globeNode = new Node({ name: 'globe' }); var earthMesh = new Mesh({ name: 'earth', geometry: this._sphereGeometry, material: new Material({ shader: this._albedoShader, transparent: true }) }); var radius = this._earthRadius; earthMesh.scale.set(radius, radius, radius); this._globeNode.add(earthMesh); var scene = this.baseLayer.scene; scene.add(this._globeNode); this._orbitControl = new OrbitControl(this._globeNode, this.zr, this.baseLayer); this._orbitControl.init(); this._orbitControl.autoRotate = this.deepQuery(seriesGroup, 'autoRotate'); var globeSurface = new ZRenderSurface(this._baseTextureSize, this._baseTextureSize); this._globeSurface = globeSurface; earthMesh.material.set('diffuseMap', globeSurface.getTexture()); globeSurface.onrefresh = function () { zr.refreshNextFrame(); }; }, _updateMapPolygonShapes: function (data, mapData, seriesGroup) { this._globeSurface.clearElements(); var self = this; var dataRange = this.component.dataRange; var scaleX = this._baseTextureSize / 360; var scaleY = this._baseTextureSize / 180; var mapType = this.deepQuery(seriesGroup, 'mapType'); var nameMap = this._nameMap[mapType] || {}; for (var i = 0; i < mapData.features.length; i++) { var feature = mapData.features[i]; var name = feature.properties.name; name = nameMap[name] || name; var dataItem = data[name]; var value; var queryTarget = []; var seriesName = []; if (dataItem) { queryTarget.push(dataItem); for (var j = 0; j < dataItem.seriesIdx.length; j++) { var sIdx = dataItem.seriesIdx[j]; seriesName.push(this.series[sIdx].name); queryTarget.push(this.series[sIdx]); } value = dataItem.value; } else { dataItem = '-'; value = '-'; queryTarget = seriesGroup; } seriesName = seriesName.join(' '); var color = this.deepQuery(queryTarget, 'itemStyle.normal.areaStyle.color'); color = dataRange && !isNaN(value) ? dataRange.getColor(value) : color; var shape = new ShapeBundle({ name: name, zlevel: 0, cp: feature.properties.cp, style: { shapeList: [], brushType: 'both', color: color, strokeColor: this.deepQuery(queryTarget, 'itemStyle.normal.borderColor'), lineWidth: this.deepQuery(queryTarget, 'itemStyle.normal.borderWidth'), opacity: this.deepQuery(queryTarget, 'itemStyle.normal.opacity') }, highlightStyle: { color: this.deepQuery(queryTarget, 'itemStyle.emphasis.areaStyle.color'), strokeColor: this.deepQuery(queryTarget, 'itemStyle.emphasis.borderColor'), lineWidth: this.deepQuery(queryTarget, 'itemStyle.emphasis.borderWidth'), opacity: this.deepQuery(queryTarget, 'itemStyle.emphasis.opacity') } }); ecData.pack(shape, { name: seriesName, tooltip: this.deepQuery(queryTarget, 'tooltip') }, 0, dataItem, 0, name); if (feature.type == 'Feature') { createGeometry(feature.geometry, shape); } else if (feature.type == 'GeometryCollection') { for (var j = 0; j < feature.geometries; j++) { createGeometry(feature.geometries[j], shape); } } this._globeSurface.addElement(shape); var cp = this._getTextPosition(shape); var lat = (0.5 - cp[1] / this._baseTextureSize) * Math.PI; var textScaleX = 1 / Math.cos(lat); var baseScale = this._baseTextureSize / 2048; var textShape = new TextShape({ zlevel: 1, position: cp, scale: [ 0.5 * textScaleX * baseScale, baseScale ], style: { x: 0, y: 0, brushType: 'fill', text: this._getMapLabelText(name, value, queryTarget, 'normal'), textAlign: 'center', color: this.deepQuery(queryTarget, 'itemStyle.normal.label.textStyle.color'), opacity: this.deepQuery(queryTarget, 'itemStyle.normal.label.show') ? 1 : 0, textFont: this.getFont(this.deepQuery(queryTarget, 'itemStyle.normal.label.textStyle')) }, highlightStyle: { color: this.deepQuery(queryTarget, 'itemStyle.emphasis.label.textStyle.color'), opacity: this.deepQuery(queryTarget, 'itemStyle.emphasis.label.show') ? 1 : 0, textFont: this.getFont(this.deepQuery(queryTarget, 'itemStyle.emphasis.label.textStyle')) } }); this._globeSurface.addElement(textShape); } function createGeometry(geometry, bundleShape) { if (geometry.type == 'Polygon') { createPolygon(geometry.coordinates, bundleShape); } else if (geometry.type == 'MultiPolygon') { for (var i = 0; i < geometry.coordinates.length; i++) { createPolygon(geometry.coordinates[i], bundleShape); } } } function createPolygon(coordinates, bundleShape) { for (var k = 0; k < coordinates.length; k++) { var polygon = new PolygonShape({ style: { pointList: [] } }); for (var i = 0; i < coordinates[k].length; i++) { var point = formatGeoPoint(coordinates[k][i]); var x = (point[0] + 180) * scaleX; var y = (90 - point[1]) * scaleY; polygon.style.pointList.push([ x, y ]); } bundleShape.style.shapeList.push(polygon); } } }, _getTextPosition: function (polygonShape) { var textPosition; var name = polygonShape.name; var textFixed = textFixedMap[name] || [ 0, 0 ]; var size = this._baseTextureSize; if (geoCoordMap[name]) { textPosition = [ (geoCoordMap[name][0] + 180) / 360 * size, (90 - geoCoordMap[name][1]) / 180 * size ]; } else if (polygonShape.cp) { textPosition = [ (polygonShape.cp[0] + textFixed[0] + 180) / 360 * size, (90 - (polygonShape.cp[1] + textFixed[1])) / 180 * size ]; } else { var bbox = polygonShape.getRect(polygonShape.style); textPosition = [ bbox.x + bbox.width / 2 + textFixed[0], bbox.y + bbox.height / 2 + textFixed[1] ]; } return textPosition; }, _initGlobeHandlers: function () { var globeMesh = this._globeNode.queryNode('earth'); var mouseEventHandler = function (e) { var shape = this._globeSurface.hover(e); if (shape) { this.zr.handler.dispatch(e.type, { target: shape, event: e.event, type: e.type }); } }; var eventList = [ 'CLICK', 'DBLCLICK', 'MOUSEOVER', 'MOUSEOUT', 'MOUSEMOVE', 'DRAGSTART', 'DRAGEND', 'DRAGENTER', 'DRAGOVER', 'DRAGLEAVE', 'DROP' ]; eventList.forEach(function (eveName) { globeMesh.on(zrConfig.EVENT[eveName], mouseEventHandler, this); }, this); }, _eulerToSphere: function (x, y, z) { var theta = Math.asin(y); var phi = Math.atan2(z, -x); if (phi < 0) { phi = Math.PI * 2 + phi; } var log = theta * 180 / Math.PI + 90; var lat = phi * 180 / Math.PI; }, _isValueNone: function (value) { return value == null || value === '' || typeof value == 'string' && value.toLowerCase() == 'none'; }, _isValueImage: function (value) { return value instanceof HTMLCanvasElement || value instanceof HTMLImageElement || value instanceof Image; }, _getMapLabelText: function (name, value, queryTarget, status) { var formatter = this.deepQuery(queryTarget, 'itemStyle.' + status + '.label.formatter'); if (formatter) { if (typeof formatter == 'function') { return formatter.call(this.myChart, name, value); } else if (typeof formatter == 'string') { formatter = formatter.replace('{a}', '{a0}').replace('{b}', '{b0}'); formatter = formatter.replace('{a0}', name).replace('{b0}', value); return formatter; } } else { return name; } }, getMarkCoord: function (seriesIdx, data, point) { var geoCoord = data.geoCoord || geoCoordMap[data.name]; var coords = []; var serie = this.series[seriesIdx]; var distance = this.deepQuery([ data, serie.markPoint || serie.markLine || serie.markBar ], 'distance'); coords[0] = geoCoord.x == null ? geoCoord[0] : geoCoord.x; coords[1] = geoCoord.y == null ? geoCoord[1] : geoCoord.y; coords = formatGeoPoint(coords); var lon = coords[0]; var lat = coords[1]; lon = Math.PI * lon / 180; lat = Math.PI * lat / 180; var r = this._earthRadius + distance; var r0 = Math.cos(lat) * r; point._array[1] = Math.sin(lat) * r; point._array[0] = -r0 * Math.cos(lon + Math.PI); point._array[2] = r0 * Math.sin(lon + Math.PI); }, getMarkPointTransform: function () { var xAxis = new Vector3(); var yAxis = new Vector3(); var zAxis = new Vector3(); var position = new Vector3(); return function (seriesIdx, data, matrix) { var series = this.series[seriesIdx]; var queryTarget = [ data, series.markPoint ]; var symbolSize = this.deepQuery(queryTarget, 'symbolSize'); var orientation = this.deepQuery(queryTarget, 'orientation'); var orientationAngle = this.deepQuery(queryTarget, 'orientationAngle'); this.getMarkCoord(seriesIdx, data, position); Vector3.normalize(zAxis, position); Vector3.cross(xAxis, Vector3.UP, zAxis); Vector3.normalize(xAxis, xAxis); Vector3.cross(yAxis, zAxis, xAxis); if (!isNaN(symbolSize)) { symbolSize = [ symbolSize, symbolSize ]; } if (orientation === 'tangent') { var tmp = zAxis; zAxis = yAxis; yAxis = tmp; Vector3.negate(zAxis, zAxis); Vector3.scaleAndAdd(position, position, yAxis, symbolSize[1]); } matrix.x = xAxis; matrix.y = yAxis; matrix.z = zAxis; Matrix4.rotateX(matrix, matrix, -orientationAngle / 180 * Math.PI); Matrix4.scale(matrix, matrix, new Vector3(symbolSize[0], symbolSize[1], 1)); var arr = matrix._array; arr[12] = position.x; arr[13] = position.y; arr[14] = position.z; }; }(), getMarkBarPoints: function () { var normal = new Vector3(); return function (seriesIdx, data, start, end) { var barHeight = data.barHeight != null ? data.barHeight : 1; if (typeof barHeight == 'function') { barHeight = barHeight(data); } this.getMarkCoord(seriesIdx, data, start); Vector3.copy(normal, start); Vector3.normalize(normal, normal); Vector3.scaleAndAdd(end, start, normal, barHeight); }; }(), getMarkLinePoints: function () { var normal = new Vector3(); var tangent = new Vector3(); var bitangent = new Vector3(); var halfVector = new Vector3(); return function (seriesIdx, data, p0, p1, p2, p3) { var isCurve = !!p2; if (!isCurve) { p3 = p1; } this.getMarkCoord(seriesIdx, data[0], p0); this.getMarkCoord(seriesIdx, data[1], p3); var normalize = Vector3.normalize; var cross = Vector3.cross; var sub = Vector3.sub; var add = Vector3.add; if (isCurve) { normalize(normal, p0); sub(tangent, p3, p0); normalize(tangent, tangent); cross(bitangent, tangent, normal); normalize(bitangent, bitangent); cross(tangent, normal, bitangent); add(p1, normal, tangent); normalize(p1, p1); normalize(normal, p3); sub(tangent, p0, p3); normalize(tangent, tangent); cross(bitangent, tangent, normal); normalize(bitangent, bitangent); cross(tangent, normal, bitangent); add(p2, normal, tangent); normalize(p2, p2); add(halfVector, p0, p3); normalize(halfVector, halfVector); var projDist = Vector3.dot(p0, halfVector); var cosTheta = Vector3.dot(halfVector, p1); var len = (this._earthRadius - projDist) / cosTheta * 2; Vector3.scaleAndAdd(p1, p0, p1, len); Vector3.scaleAndAdd(p2, p3, p2, len); } }; }(), onframe: function (deltaTime) { ChartBase3D.prototype.onframe.call(this, deltaTime); this._orbitControl.update(deltaTime); for (var i = 0; i < this._vfParticleSurfaceList.length; i++) { this._vfParticleSurfaceList[i].update(Math.min(deltaTime / 1000, 0.5)); this.zr.refreshNextFrame(); } }, refresh: function (newOption) { if (!this.baseLayer.renderer) { return; } if (newOption) { this.option = newOption; this.series = newOption.series; } this._init(); }, ondataRange: function (param, status) { if (this.component.dataRange) { this.refresh(); this.zr.refreshNextFrame(); } }, dispose: function () { ChartBase3D.prototype.dispose.call(this); this.baseLayer.dispose(); if (this._orbitControl) { this._orbitControl.dispose(); } this._globeNode = null; this._orbitControl = null; this._disposed = true; for (var i = 0; i < this._vfParticleSurfaceList.length; i++) { this._vfParticleSurfaceList[i].dispose(); } } }; zrUtil.inherits(Map3D, ChartBase3D); require('echarts/chart').define(ecConfig.CHART_TYPE_MAP3D, Map3D); return Map3D; });define('qtek/Node', [ 'require', './core/Base', './math/Vector3', './math/Quaternion', './math/Matrix4', './dep/glmatrix' ], function (require) { 'use strict'; var Base = require('./core/Base'); var Vector3 = require('./math/Vector3'); var Quaternion = require('./math/Quaternion'); var Matrix4 = require('./math/Matrix4'); var glMatrix = require('./dep/glmatrix'); var mat4 = glMatrix.mat4; var nameId = 0; var Node = Base.derive({ name: '', position: null, rotation: null, scale: null, worldTransform: null, localTransform: null, autoUpdateLocalTransform: true, _parent: null, _scene: null, _needsUpdateWorldTransform: true, _inIterating: false, __depth: 0 }, function () { if (!this.name) { this.name = 'NODE_' + nameId++; } if (!this.position) { this.position = new Vector3(); } if (!this.rotation) { this.rotation = new Quaternion(); } if (!this.scale) { this.scale = new Vector3(1, 1, 1); } this.worldTransform = new Matrix4(); this.localTransform = new Matrix4(); this._children = []; }, { visible: true, isRenderable: function () { return false; }, setName: function (name) { if (this._scene) { delete this._scene._nodeRepository[this.name]; this._scene._nodeRepository[name] = this; } this.name = name; }, add: function (node) { if (this._inIterating) { console.warn('Add operation can cause unpredictable error when in iterating'); } if (node._parent === this) { return; } if (node._parent) { node._parent.remove(node); } node._parent = this; this._children.push(node); if (this._scene && this._scene !== node.scene) { node.traverse(this._addSelfToScene, this); } }, remove: function (node) { if (this._inIterating) { console.warn('Remove operation can cause unpredictable error when in iterating'); } var idx = this._children.indexOf(node); if (idx < 0) { return; } this._children.splice(idx, 1); node._parent = null; if (this._scene) { node.traverse(this._removeSelfFromScene, this); } }, getScene: function () { return this._scene; }, getParent: function () { return this._parent; }, _removeSelfFromScene: function (descendant) { descendant._scene.removeFromScene(descendant); descendant._scene = null; }, _addSelfToScene: function (descendant) { this._scene.addToScene(descendant); descendant._scene = this._scene; }, isAncestor: function (node) { var parent = node._parent; while (parent) { if (parent === this) { return true; } parent = parent._parent; } return false; }, children: function () { return this._children.slice(); }, childAt: function (idx) { return this._children[idx]; }, getChildByName: function (name) { for (var i = 0; i < this._children.length; i++) { if (this._children[i].name === name) { return this._children[i]; } } }, getDescendantByName: function (name) { for (var i = 0; i < this._children.length; i++) { var child = this._children[i]; if (child.name === name) { return child; } else { var res = child.getDescendantByName(name); if (res) { return res; } } } }, queryNode: function (path) { if (!path) { return; } var pathArr = path.split('/'); var current = this; for (var i = 0; i < pathArr.length; i++) { var name = pathArr[i]; if (!name) { continue; } var found = false; for (var j = 0; j < current._children.length; j++) { var child = current._children[j]; if (child.name === name) { current = child; found = true; break; } } if (!found) { return; } } return current; }, getPath: function (rootNode) { if (!this._parent) { return '/'; } var current = this._parent; var path = this.name; while (current._parent) { path = current.name + '/' + path; if (current._parent == rootNode) { break; } current = current._parent; } if (!current._parent && rootNode) { return null; } return path; }, traverse: function (callback, context, ctor) { this._inIterating = true; if (ctor === undefined || this.constructor === ctor) { callback.call(context, this); } var _children = this._children; for (var i = 0, len = _children.length; i < len; i++) { _children[i].traverse(callback, context, ctor); } this._inIterating = false; }, setLocalTransform: function (matrix) { mat4.copy(this.localTransform._array, matrix._array); this.decomposeLocalTransform(); }, decomposeLocalTransform: function (keepScale) { var scale = !keepScale ? this.scale : null; this.localTransform.decomposeMatrix(scale, this.rotation, this.position); }, setWorldTransform: function (matrix) { mat4.copy(this.worldTransform._array, matrix._array); this.decomposeWorldTransform(); }, decomposeWorldTransform: function () { var tmp = mat4.create(); return function (keepScale) { if (this._parent) { mat4.invert(tmp, this._parent.worldTransform._array); mat4.multiply(this.localTransform._array, tmp, this.worldTransform._array); } else { mat4.copy(this.localTransform._array, this.worldTransform._array); } var scale = !keepScale ? this.scale : null; this.localTransform.decomposeMatrix(scale, this.rotation, this.position); }; }(), updateLocalTransform: function () { var position = this.position; var rotation = this.rotation; var scale = this.scale; if (position._dirty || scale._dirty || rotation._dirty) { var m = this.localTransform._array; mat4.fromRotationTranslation(m, rotation._array, position._array); mat4.scale(m, m, scale._array); rotation._dirty = false; scale._dirty = false; position._dirty = false; this._needsUpdateWorldTransform = true; } }, updateWorldTransform: function () { if (this._parent) { mat4.multiply(this.worldTransform._array, this._parent.worldTransform._array, this.localTransform._array); } else { mat4.copy(this.worldTransform._array, this.localTransform._array); } }, update: function (forceUpdateWorld) { if (this.autoUpdateLocalTransform) { this.updateLocalTransform(); } else { forceUpdateWorld = true; } if (forceUpdateWorld || this._needsUpdateWorldTransform) { this.updateWorldTransform(); forceUpdateWorld = true; this._needsUpdateWorldTransform = false; } for (var i = 0, len = this._children.length; i < len; i++) { this._children[i].update(forceUpdateWorld); } }, getWorldPosition: function (out) { var m = this.worldTransform._array; if (out) { out._array[0] = m[12]; out._array[1] = m[13]; out._array[2] = m[14]; return out; } else { return new Vector3(m[12], m[13], m[14]); } }, clone: function () { var node = new this.constructor(); node.setName(this.name); node.position.copy(this.position); node.rotation.copy(this.rotation); node.scale.copy(this.scale); for (var i = 0; i < this._children.length; i++) { node.add(this._children[i].clone()); } return node; }, rotateAround: function () { var v = new Vector3(); var RTMatrix = new Matrix4(); return function (point, axis, angle) { v.copy(this.position).subtract(point); this.localTransform.identity(); this.localTransform.translate(point); this.localTransform.rotate(angle, axis); RTMatrix.fromRotationTranslation(this.rotation, v); this.localTransform.multiply(RTMatrix); this.localTransform.scale(this.scale); this.decomposeLocalTransform(); this._needsUpdateWorldTransform = true; }; }(), lookAt: function () { var m = new Matrix4(); return function (target, up) { m.lookAt(this.position, target, up || this.localTransform.y).invert(); m.decomposeMatrix(null, this.rotation, this.position); }; }() }); return Node; });define('qtek/Mesh', [ 'require', './Renderable', './core/glenum' ], function (require) { 'use strict'; var Renderable = require('./Renderable'); var glenum = require('./core/glenum'); var Mesh = Renderable.derive({ skeleton: null, joints: null }, function () { if (!this.joints) { this.joints = []; } }, { render: function (_gl, globalMaterial) { var material = globalMaterial || this.material; if (this.skeleton) { var skinMatricesArray = this.skeleton.getSubSkinMatrices(this.__GUID__, this.joints); material.shader.setUniformBySemantic(_gl, 'SKIN_MATRIX', skinMatricesArray); } return Renderable.prototype.render.call(this, _gl, globalMaterial); } }); Mesh.POINTS = glenum.POINTS; Mesh.LINES = glenum.LINES; Mesh.LINE_LOOP = glenum.LINE_LOOP; Mesh.LINE_STRIP = glenum.LINE_STRIP; Mesh.TRIANGLES = glenum.TRIANGLES; Mesh.TRIANGLE_STRIP = glenum.TRIANGLE_STRIP; Mesh.TRIANGLE_FAN = glenum.TRIANGLE_FAN; Mesh.BACK = glenum.BACK; Mesh.FRONT = glenum.FRONT; Mesh.FRONT_AND_BACK = glenum.FRONT_AND_BACK; Mesh.CW = glenum.CW; Mesh.CCW = glenum.CCW; return Mesh; });define('qtek/geometry/Sphere', [ 'require', '../DynamicGeometry', '../dep/glmatrix', '../math/BoundingBox' ], function (require) { 'use strict'; var DynamicGeometry = require('../DynamicGeometry'); var glMatrix = require('../dep/glmatrix'); var vec3 = glMatrix.vec3; var vec2 = glMatrix.vec2; var BoundingBox = require('../math/BoundingBox'); var Sphere = DynamicGeometry.derive({ widthSegments: 20, heightSegments: 20, phiStart: 0, phiLength: Math.PI * 2, thetaStart: 0, thetaLength: Math.PI, radius: 1 }, function () { this.build(); }, { build: function () { var positions = this.attributes.position.value; var texcoords = this.attributes.texcoord0.value; var normals = this.attributes.normal.value; positions.length = 0; texcoords.length = 0; normals.length = 0; this.faces.length = 0; var x, y, z, u, v, i, j; var normal; var heightSegments = this.heightSegments; var widthSegments = this.widthSegments; var radius = this.radius; var phiStart = this.phiStart; var phiLength = this.phiLength; var thetaStart = this.thetaStart; var thetaLength = this.thetaLength; var radius = this.radius; for (j = 0; j <= heightSegments; j++) { for (i = 0; i <= widthSegments; i++) { u = i / widthSegments; v = j / heightSegments; x = -radius * Math.cos(phiStart + u * phiLength) * Math.sin(thetaStart + v * thetaLength); y = radius * Math.cos(thetaStart + v * thetaLength); z = radius * Math.sin(phiStart + u * phiLength) * Math.sin(thetaStart + v * thetaLength); positions.push(vec3.fromValues(x, y, z)); texcoords.push(vec2.fromValues(u, v)); normal = vec3.fromValues(x, y, z); vec3.normalize(normal, normal); normals.push(normal); } } var i1, i2, i3, i4; var faces = this.faces; var len = widthSegments + 1; for (j = 0; j < heightSegments; j++) { for (i = 0; i < widthSegments; i++) { i2 = j * len + i; i1 = j * len + i + 1; i4 = (j + 1) * len + i + 1; i3 = (j + 1) * len + i; faces.push(vec3.fromValues(i1, i2, i4)); faces.push(vec3.fromValues(i2, i3, i4)); } } this.boundingBox = new BoundingBox(); this.boundingBox.max.set(radius, radius, radius); this.boundingBox.min.set(-radius, -radius, -radius); } }); return Sphere; });define('qtek/Material', [ 'require', './core/Base', './Texture' ], function (require) { 'use strict'; var Base = require('./core/Base'); var Texture = require('./Texture'); var Material = Base.derive({ name: '', uniforms: null, shader: null, depthTest: true, depthMask: true, transparent: false, blend: null, _enabledUniforms: null }, function () { if (!this.name) { this.name = 'MATERIAL_' + this.__GUID__; } if (this.shader) { this.attachShader(this.shader); } }, { bind: function (_gl, prevMaterial) { var slot = 0; var sameShader = prevMaterial && prevMaterial.shader === this.shader; for (var u = 0; u < this._enabledUniforms.length; u++) { var symbol = this._enabledUniforms[u]; var uniform = this.uniforms[symbol]; if (sameShader) { if (prevMaterial.uniforms[symbol].value === uniform.value) { continue; } } if (uniform.value === undefined) { console.warn('Uniform value "' + symbol + '" is undefined'); continue; } else if (uniform.value === null) { continue; } else if (uniform.value instanceof Array && !uniform.value.length) { continue; } else if (uniform.value instanceof Texture) { var res = this.shader.setUniform(_gl, '1i', symbol, slot); if (!res) { continue; } var texture = uniform.value; _gl.activeTexture(_gl.TEXTURE0 + slot); if (texture.isRenderable()) { texture.bind(_gl); } else { texture.unbind(_gl); } slot++; } else if (uniform.value instanceof Array) { if (uniform.value.length === 0) { continue; } var exampleValue = uniform.value[0]; if (exampleValue instanceof Texture) { if (!this.shader.hasUniform(symbol)) { continue; } var arr = []; for (var i = 0; i < uniform.value.length; i++) { var texture = uniform.value[i]; _gl.activeTexture(_gl.TEXTURE0 + slot); if (texture.isRenderable()) { texture.bind(_gl); } else { texture.unbind(_gl); } arr.push(slot++); } this.shader.setUniform(_gl, '1iv', symbol, arr); } else { this.shader.setUniform(_gl, uniform.type, symbol, uniform.value); } } else { this.shader.setUniform(_gl, uniform.type, symbol, uniform.value); } } }, setUniform: function (symbol, value) { var uniform = this.uniforms[symbol]; if (uniform) { uniform.value = value; } }, setUniforms: function (obj) { for (var key in obj) { var val = obj[key]; this.setUniform(key, val); } }, enableUniform: function (symbol) { if (this.uniforms[symbol] && !this.isUniformEnabled(symbol)) { this._enabledUniforms.push(symbol); } }, disableUniform: function (symbol) { var idx = this._enabledUniforms.indexOf(symbol); if (idx >= 0) { this._enabledUniforms.splice(idx, 1); } }, isUniformEnabled: function (symbol) { return this._enabledUniforms.indexOf(symbol) >= 0; }, set: function (symbol, value) { if (typeof symbol === 'object') { for (var key in symbol) { var val = symbol[key]; this.set(key, val); } } else { var uniform = this.uniforms[symbol]; if (uniform) { uniform.value = value; } } }, get: function (symbol) { var uniform = this.uniforms[symbol]; if (uniform) { return uniform.value; } }, attachShader: function (shader, keepUniform) { if (this.shader) { this.shader.detached(); } var originalUniforms = this.uniforms; this.uniforms = shader.createUniforms(); this.shader = shader; this._enabledUniforms = Object.keys(this.uniforms); if (keepUniform) { for (var symbol in originalUniforms) { if (this.uniforms[symbol]) { this.uniforms[symbol].value = originalUniforms[symbol].value; } } } shader.attached(); }, detachShader: function () { this.shader.detached(); this.shader = null; this.uniforms = {}; }, clone: function () { var material = new Material({ name: this.name, shader: this.shader }); for (var symbol in this.uniforms) { material.uniforms[symbol].value = this.uniforms[symbol].value; } material.depthTest = this.depthTest; material.depthMask = this.depthMask; material.transparent = this.transparent; material.blend = this.blend; return material; }, dispose: function (_gl, disposeTexture) { if (disposeTexture) { for (var name in this.uniforms) { var val = this.uniforms[name].value; if (!val) { continue; } if (val instanceof Texture) { val.dispose(_gl); } else if (val instanceof Array) { for (var i = 0; i < val.length; i++) { if (val[i] instanceof Texture) { val[i].dispose(_gl); } } } } } var shader = this.shader; if (shader) { this.detachShader(); if (!shader.isAttachedToAny()) { shader.dispose(_gl); } } } }); return Material; });define('qtek/Texture2D', [ 'require', './Texture', './core/glinfo', './core/glenum' ], function (require) { var Texture = require('./Texture'); var glinfo = require('./core/glinfo'); var glenum = require('./core/glenum'); var Texture2D = Texture.derive(function () { return { image: null, pixels: null, mipmaps: [] }; }, { update: function (_gl) { _gl.bindTexture(_gl.TEXTURE_2D, this._cache.get('webgl_texture')); this.beforeUpdate(_gl); var glFormat = this.format; var glType = this.type; _gl.texParameteri(_gl.TEXTURE_2D, _gl.TEXTURE_WRAP_S, this.wrapS); _gl.texParameteri(_gl.TEXTURE_2D, _gl.TEXTURE_WRAP_T, this.wrapT); _gl.texParameteri(_gl.TEXTURE_2D, _gl.TEXTURE_MAG_FILTER, this.magFilter); _gl.texParameteri(_gl.TEXTURE_2D, _gl.TEXTURE_MIN_FILTER, this.minFilter); var anisotropicExt = glinfo.getExtension(_gl, 'EXT_texture_filter_anisotropic'); if (anisotropicExt && this.anisotropic > 1) { _gl.texParameterf(_gl.TEXTURE_2D, anisotropicExt.TEXTURE_MAX_ANISOTROPY_EXT, this.anisotropic); } if (glType === 36193) { var halfFloatExt = glinfo.getExtension(_gl, 'OES_texture_half_float'); if (!halfFloatExt) { glType = glenum.FLOAT; } } if (this.image) { _gl.texImage2D(_gl.TEXTURE_2D, 0, glFormat, glFormat, glType, this.image); } else { if (glFormat <= Texture.COMPRESSED_RGBA_S3TC_DXT5_EXT && glFormat >= Texture.COMPRESSED_RGB_S3TC_DXT1_EXT) { _gl.compressedTexImage2D(_gl.TEXTURE_2D, 0, glFormat, this.width, this.height, 0, this.pixels); } else { _gl.texImage2D(_gl.TEXTURE_2D, 0, glFormat, this.width, this.height, 0, glFormat, glType, this.pixels); } } if (this.useMipmap) { if (this.mipmaps.length) { if (this.image) { for (var i = 0; i < this.mipmaps.length; i++) { if (this.mipmaps[i]) { _gl.texImage2D(_gl.TEXTURE_2D, i, glFormat, glFormat, glType, this.mipmaps[i]); } } } else if (this.pixels) { var width = this.width; var height = this.height; for (var i = 0; i < this.mipmaps.length; i++) { if (this.mipmaps[i]) { if (glFormat <= Texture.COMPRESSED_RGBA_S3TC_DXT5_EXT && glFormat >= Texture.COMPRESSED_RGB_S3TC_DXT1_EXT) { _gl.compressedTexImage2D(_gl.TEXTURE_2D, 0, glFormat, width, height, 0, this.mipmaps[i]); } else { _gl.texImage2D(_gl.TEXTURE_2D, i, glFormat, width, height, 0, glFormat, glType, this.mipmaps[i]); } } width /= 2; height /= 2; } } } else if (!this.NPOT && !this.mipmaps.length) { _gl.generateMipmap(_gl.TEXTURE_2D); } } _gl.bindTexture(_gl.TEXTURE_2D, null); }, generateMipmap: function (_gl) { _gl.bindTexture(_gl.TEXTURE_2D, this._cache.get('webgl_texture')); _gl.generateMipmap(_gl.TEXTURE_2D); }, isPowerOfTwo: function () { var width; var height; if (this.image) { width = this.image.width; height = this.image.height; } else { width = this.width; height = this.height; } return (width & width - 1) === 0 && (height & height - 1) === 0; }, isRenderable: function () { if (this.image) { return this.image.nodeName === 'CANVAS' || this.image.complete; } else { return this.width && this.height; } }, bind: function (_gl) { _gl.bindTexture(_gl.TEXTURE_2D, this.getWebGLTexture(_gl)); }, unbind: function (_gl) { _gl.bindTexture(_gl.TEXTURE_2D, null); }, load: function (src) { var image = new Image(); var self = this; image.onload = function () { self.dirty(); self.trigger('success', self); image.onload = null; }; image.onerror = function () { self.trigger('error', self); image.onerror = null; }; image.src = src; this.image = image; return this; } }); return Texture2D; });define('qtek/math/Vector3', [ 'require', '../dep/glmatrix' ], function (require) { 'use strict'; var glMatrix = require('../dep/glmatrix'); var vec3 = glMatrix.vec3; var Vector3 = function (x, y, z) { x = x || 0; y = y || 0; z = z || 0; this._array = vec3.fromValues(x, y, z); this._dirty = true; }; Vector3.prototype = { constructor: Vector3, add: function (b) { vec3.add(this._array, this._array, b._array); this._dirty = true; return this; }, set: function (x, y, z) { this._array[0] = x; this._array[1] = y; this._array[2] = z; this._dirty = true; return this; }, setArray: function (arr) { this._array[0] = arr[0]; this._array[1] = arr[1]; this._array[2] = arr[2]; this._dirty = true; return this; }, clone: function () { return new Vector3(this.x, this.y, this.z); }, copy: function (b) { vec3.copy(this._array, b._array); this._dirty = true; return this; }, cross: function (out, b) { vec3.cross(out._array, this._array, b._array); out._dirty = true; return this; }, dist: function (b) { return vec3.dist(this._array, b._array); }, distance: function (b) { return vec3.distance(this._array, b._array); }, div: function (b) { vec3.div(this._array, this._array, b._array); this._dirty = true; return this; }, divide: function (b) { vec3.divide(this._array, this._array, b._array); this._dirty = true; return this; }, dot: function (b) { return vec3.dot(this._array, b._array); }, len: function () { return vec3.len(this._array); }, length: function () { return vec3.length(this._array); }, lerp: function (a, b, t) { vec3.lerp(this._array, a._array, b._array, t); this._dirty = true; return this; }, min: function (b) { vec3.min(this._array, this._array, b._array); this._dirty = true; return this; }, max: function (b) { vec3.max(this._array, this._array, b._array); this._dirty = true; return this; }, mul: function (b) { vec3.mul(this._array, this._array, b._array); this._dirty = true; return this; }, multiply: function (b) { vec3.multiply(this._array, this._array, b._array); this._dirty = true; return this; }, negate: function () { vec3.negate(this._array, this._array); this._dirty = true; return this; }, normalize: function () { vec3.normalize(this._array, this._array); this._dirty = true; return this; }, random: function (scale) { vec3.random(this._array, scale); this._dirty = true; return this; }, scale: function (s) { vec3.scale(this._array, this._array, s); this._dirty = true; return this; }, scaleAndAdd: function (b, s) { vec3.scaleAndAdd(this._array, this._array, b._array, s); this._dirty = true; return this; }, sqrDist: function (b) { return vec3.sqrDist(this._array, b._array); }, squaredDistance: function (b) { return vec3.squaredDistance(this._array, b._array); }, sqrLen: function () { return vec3.sqrLen(this._array); }, squaredLength: function () { return vec3.squaredLength(this._array); }, sub: function (b) { vec3.sub(this._array, this._array, b._array); this._dirty = true; return this; }, subtract: function (b) { vec3.subtract(this._array, this._array, b._array); this._dirty = true; return this; }, transformMat3: function (m) { vec3.transformMat3(this._array, this._array, m._array); this._dirty = true; return this; }, transformMat4: function (m) { vec3.transformMat4(this._array, this._array, m._array); this._dirty = true; return this; }, transformQuat: function (q) { vec3.transformQuat(this._array, this._array, q._array); this._dirty = true; return this; }, applyProjection: function (m) { var v = this._array; m = m._array; if (m[15] === 0) { var w = -1 / v[2]; v[0] = m[0] * v[0] * w; v[1] = m[5] * v[1] * w; v[2] = (m[10] * v[2] + m[14]) * w; } else { v[0] = m[0] * v[0] + m[12]; v[1] = m[5] * v[1] + m[13]; v[2] = m[10] * v[2] + m[14]; } this._dirty = true; return this; }, setEulerFromQuaternion: function (q) { }, toString: function () { return '[' + Array.prototype.join.call(this._array, ',') + ']'; } }; if (Object.defineProperty) { var proto = Vector3.prototype; Object.defineProperty(proto, 'x', { get: function () { return this._array[0]; }, set: function (value) { this._array[0] = value; this._dirty = true; } }); Object.defineProperty(proto, 'y', { get: function () { return this._array[1]; }, set: function (value) { this._array[1] = value; this._dirty = true; } }); Object.defineProperty(proto, 'z', { get: function () { return this._array[2]; }, set: function (value) { this._array[2] = value; this._dirty = true; } }); } Vector3.add = function (out, a, b) { vec3.add(out._array, a._array, b._array); out._dirty = true; return out; }; Vector3.set = function (out, x, y, z) { vec3.set(out._array, x, y, z); out._dirty = true; }; Vector3.copy = function (out, b) { vec3.copy(out._array, b._array); out._dirty = true; return out; }; Vector3.cross = function (out, a, b) { vec3.cross(out._array, a._array, b._array); out._dirty = true; return out; }; Vector3.dist = function (a, b) { return vec3.distance(a._array, b._array); }; Vector3.distance = Vector3.dist; Vector3.div = function (out, a, b) { vec3.divide(out._array, a._array, b._array); out._dirty = true; return out; }; Vector3.divide = Vector3.div; Vector3.dot = function (a, b) { return vec3.dot(a._array, b._array); }; Vector3.len = function (b) { return vec3.length(b._array); }; Vector3.lerp = function (out, a, b, t) { vec3.lerp(out._array, a._array, b._array, t); out._dirty = true; return out; }; Vector3.min = function (out, a, b) { vec3.min(out._array, a._array, b._array); out._dirty = true; return out; }; Vector3.max = function (out, a, b) { vec3.max(out._array, a._array, b._array); out._dirty = true; return out; }; Vector3.mul = function (out, a, b) { vec3.multiply(out._array, a._array, b._array); out._dirty = true; return out; }; Vector3.multiply = Vector3.mul; Vector3.negate = function (out, a) { vec3.negate(out._array, a._array); out._dirty = true; return out; }; Vector3.normalize = function (out, a) { vec3.normalize(out._array, a._array); out._dirty = true; return out; }; Vector3.random = function (out, scale) { vec3.random(out._array, scale); out._dirty = true; return out; }; Vector3.scale = function (out, a, scale) { vec3.scale(out._array, a._array, scale); out._dirty = true; return out; }; Vector3.scaleAndAdd = function (out, a, b, scale) { vec3.scaleAndAdd(out._array, a._array, b._array, scale); out._dirty = true; return out; }; Vector3.sqrDist = function (a, b) { return vec3.sqrDist(a._array, b._array); }; Vector3.squaredDistance = Vector3.sqrDist; Vector3.sqrLen = function (a) { return vec3.sqrLen(a._array); }; Vector3.squaredLength = Vector3.sqrLen; Vector3.sub = function (out, a, b) { vec3.subtract(out._array, a._array, b._array); out._dirty = true; return out; }; Vector3.subtract = Vector3.sub; Vector3.transformMat3 = function (out, a, m) { vec3.transformMat3(out._array, a._array, m._array); out._dirty = true; return out; }; Vector3.transformMat4 = function (out, a, m) { vec3.transformMat4(out._array, a._array, m._array); out._dirty = true; return out; }; Vector3.transformQuat = function (out, a, q) { vec3.transformQuat(out._array, a._array, q._array); out._dirty = true; return out; }; Vector3.POSITIVE_X = new Vector3(1, 0, 0); Vector3.NEGATIVE_X = new Vector3(-1, 0, 0); Vector3.POSITIVE_Y = new Vector3(0, 1, 0); Vector3.NEGATIVE_Y = new Vector3(0, -1, 0); Vector3.POSITIVE_Z = new Vector3(0, 0, 1); Vector3.NEGATIVE_Z = new Vector3(0, 0, -1); Vector3.UP = new Vector3(0, 1, 0); Vector3.ZERO = new Vector3(0, 0, 0); return Vector3; });define('qtek/math/Matrix4', [ 'require', '../dep/glmatrix', './Vector3' ], function (require) { 'use strict'; var glMatrix = require('../dep/glmatrix'); var Vector3 = require('./Vector3'); var mat4 = glMatrix.mat4; var vec3 = glMatrix.vec3; var mat3 = glMatrix.mat3; var quat = glMatrix.quat; function makeProperty(n) { return { set: function (value) { this._array[n] = value; this._dirty = true; }, get: function () { return this._array[n]; } }; } var Matrix4 = function () { this._axisX = new Vector3(); this._axisY = new Vector3(); this._axisZ = new Vector3(); this._array = mat4.create(); this._dirty = true; }; Matrix4.prototype = { constructor: Matrix4, adjoint: function () { mat4.adjoint(this._array, this._array); this._dirty = true; return this; }, clone: function () { return new Matrix4().copy(this); }, copy: function (a) { mat4.copy(this._array, a._array); this._dirty = true; return this; }, determinant: function () { return mat4.determinant(this._array); }, fromQuat: function (q) { mat4.fromQuat(this._array, q._array); this._dirty = true; return this; }, fromRotationTranslation: function (q, v) { mat4.fromRotationTranslation(this._array, q._array, v._array); this._dirty = true; return this; }, fromMat2d: function (m2d) { Matrix4.fromMat2d(this, m2d); return this; }, frustum: function (left, right, bottom, top, near, far) { mat4.frustum(this._array, left, right, bottom, top, near, far); this._dirty = true; return this; }, identity: function () { mat4.identity(this._array); this._dirty = true; return this; }, invert: function () { mat4.invert(this._array, this._array); this._dirty = true; return this; }, lookAt: function (eye, center, up) { mat4.lookAt(this._array, eye._array, center._array, up._array); this._dirty = true; return this; }, mul: function (b) { mat4.mul(this._array, this._array, b._array); this._dirty = true; return this; }, mulLeft: function (a) { mat4.mul(this._array, a._array, this._array); this._dirty = true; return this; }, multiply: function (b) { mat4.multiply(this._array, this._array, b._array); this._dirty = true; return this; }, multiplyLeft: function (a) { mat4.multiply(this._array, a._array, this._array); this._dirty = true; return this; }, ortho: function (left, right, bottom, top, near, far) { mat4.ortho(this._array, left, right, bottom, top, near, far); this._dirty = true; return this; }, perspective: function (fovy, aspect, near, far) { mat4.perspective(this._array, fovy, aspect, near, far); this._dirty = true; return this; }, rotate: function (rad, axis) { mat4.rotate(this._array, this._array, rad, axis._array); this._dirty = true; return this; }, rotateX: function (rad) { mat4.rotateX(this._array, this._array, rad); this._dirty = true; return this; }, rotateY: function (rad) { mat4.rotateY(this._array, this._array, rad); this._dirty = true; return this; }, rotateZ: function (rad) { mat4.rotateZ(this._array, this._array, rad); this._dirty = true; return this; }, scale: function (v) { mat4.scale(this._array, this._array, v._array); this._dirty = true; return this; }, translate: function (v) { mat4.translate(this._array, this._array, v._array); this._dirty = true; return this; }, transpose: function () { mat4.transpose(this._array, this._array); this._dirty = true; return this; }, decomposeMatrix: function () { var x = vec3.create(); var y = vec3.create(); var z = vec3.create(); var m3 = mat3.create(); return function (scale, rotation, position) { var el = this._array; vec3.set(x, el[0], el[1], el[2]); vec3.set(y, el[4], el[5], el[6]); vec3.set(z, el[8], el[9], el[10]); var sx = vec3.length(x); var sy = vec3.length(y); var sz = vec3.length(z); if (scale) { scale.x = sx; scale.y = sy; scale.z = sz; scale._dirty = true; } position.set(el[12], el[13], el[14]); mat3.fromMat4(m3, el); mat3.transpose(m3, m3); m3[0] /= sx; m3[1] /= sx; m3[2] /= sx; m3[3] /= sy; m3[4] /= sy; m3[5] /= sy; m3[6] /= sz; m3[7] /= sz; m3[8] /= sz; quat.fromMat3(rotation._array, m3); quat.normalize(rotation._array, rotation._array); rotation._dirty = true; position._dirty = true; }; }(), toString: function () { return '[' + Array.prototype.join.call(this._array, ',') + ']'; } }; if (Object.defineProperty) { var proto = Matrix4.prototype; Object.defineProperty(proto, 'z', { get: function () { var el = this._array; this._axisZ.set(el[8], el[9], el[10]); return this._axisZ; }, set: function (v) { var el = this._array; v = v._array; el[8] = v[0]; el[9] = v[1]; el[10] = v[2]; this._dirty = true; } }); Object.defineProperty(proto, 'y', { get: function () { var el = this._array; this._axisY.set(el[4], el[5], el[6]); return this._axisY; }, set: function (v) { var el = this._array; v = v._array; el[4] = v[0]; el[5] = v[1]; el[6] = v[2]; this._dirty = true; } }); Object.defineProperty(proto, 'x', { get: function () { var el = this._array; this._axisX.set(el[0], el[1], el[2]); return this._axisX; }, set: function (v) { var el = this._array; v = v._array; el[0] = v[0]; el[1] = v[1]; el[2] = v[2]; this._dirty = true; } }); } Matrix4.adjoint = function (out, a) { mat4.adjoint(out._array, a._array); out._dirty = true; return out; }; Matrix4.copy = function (out, a) { mat4.copy(out._array, a._array); out._dirty = true; return out; }; Matrix4.determinant = function (a) { return mat4.determinant(a._array); }; Matrix4.identity = function (out) { mat4.identity(out._array); out._dirty = true; return out; }; Matrix4.ortho = function (out, left, right, bottom, top, near, far) { mat4.ortho(out._array, left, right, bottom, top, near, far); out._dirty = true; return out; }; Matrix4.perspective = function (out, fovy, aspect, near, far) { mat4.perspective(out._array, fovy, aspect, near, far); out._dirty = true; return out; }; Matrix4.lookAt = function (out, eye, center, up) { mat4.lookAt(out._array, eye._array, center._array, up._array); out._dirty = true; return out; }; Matrix4.invert = function (out, a) { mat4.invert(out._array, a._array); out._dirty = true; return out; }; Matrix4.mul = function (out, a, b) { mat4.mul(out._array, a._array, b._array); out._dirty = true; return out; }; Matrix4.multiply = Matrix4.mul; Matrix4.fromQuat = function (out, q) { mat4.fromQuat(out._array, q._array); out._dirty = true; return out; }; Matrix4.fromRotationTranslation = function (out, q, v) { mat4.fromRotationTranslation(out._array, q._array, v._array); out._dirty = true; return out; }; Matrix4.fromMat2d = function (m4, m2d) { m4._dirty = true; var m2d = m2d._array; var m4 = m4._array; m4[0] = m2d[0]; m4[4] = m2d[2]; m4[12] = m2d[4]; m4[1] = m2d[1]; m4[5] = m2d[3]; m4[13] = m2d[5]; return m4; }; Matrix4.rotate = function (out, a, rad, axis) { mat4.rotate(out._array, a._array, rad, axis._array); out._dirty = true; return out; }; Matrix4.rotateX = function (out, a, rad) { mat4.rotateX(out._array, a._array, rad); out._dirty = true; return out; }; Matrix4.rotateY = function (out, a, rad) { mat4.rotateY(out._array, a._array, rad); out._dirty = true; return out; }; Matrix4.rotateZ = function (out, a, rad) { mat4.rotateZ(out._array, a._array, rad); out._dirty = true; return out; }; Matrix4.scale = function (out, a, v) { mat4.scale(out._array, a._array, v._array); out._dirty = true; return out; }; Matrix4.transpose = function (out, a) { mat4.transpose(out._array, a._array); out._dirty = true; return out; }; Matrix4.translate = function (out, a, v) { mat4.translate(out._array, a._array, v._array); out._dirty = true; return out; }; return Matrix4; });define('qtek/core/glenum', [], function () { return { DEPTH_BUFFER_BIT: 256, STENCIL_BUFFER_BIT: 1024, COLOR_BUFFER_BIT: 16384, POINTS: 0, LINES: 1, LINE_LOOP: 2, LINE_STRIP: 3, TRIANGLES: 4, TRIANGLE_STRIP: 5, TRIANGLE_FAN: 6, ZERO: 0, ONE: 1, SRC_COLOR: 768, ONE_MINUS_SRC_COLOR: 769, SRC_ALPHA: 770, ONE_MINUS_SRC_ALPHA: 771, DST_ALPHA: 772, ONE_MINUS_DST_ALPHA: 773, DST_COLOR: 774, ONE_MINUS_DST_COLOR: 775, SRC_ALPHA_SATURATE: 776, FUNC_ADD: 32774, BLEND_EQUATION: 32777, BLEND_EQUATION_RGB: 32777, BLEND_EQUATION_ALPHA: 34877, FUNC_SUBTRACT: 32778, FUNC_REVERSE_SUBTRACT: 32779, BLEND_DST_RGB: 32968, BLEND_SRC_RGB: 32969, BLEND_DST_ALPHA: 32970, BLEND_SRC_ALPHA: 32971, CONSTANT_COLOR: 32769, ONE_MINUS_CONSTANT_COLOR: 32770, CONSTANT_ALPHA: 32771, ONE_MINUS_CONSTANT_ALPHA: 32772, BLEND_COLOR: 32773, ARRAY_BUFFER: 34962, ELEMENT_ARRAY_BUFFER: 34963, ARRAY_BUFFER_BINDING: 34964, ELEMENT_ARRAY_BUFFER_BINDING: 34965, STREAM_DRAW: 35040, STATIC_DRAW: 35044, DYNAMIC_DRAW: 35048, BUFFER_SIZE: 34660, BUFFER_USAGE: 34661, CURRENT_VERTEX_ATTRIB: 34342, FRONT: 1028, BACK: 1029, FRONT_AND_BACK: 1032, CULL_FACE: 2884, BLEND: 3042, DITHER: 3024, STENCIL_TEST: 2960, DEPTH_TEST: 2929, SCISSOR_TEST: 3089, POLYGON_OFFSET_FILL: 32823, SAMPLE_ALPHA_TO_COVERAGE: 32926, SAMPLE_COVERAGE: 32928, NO_ERROR: 0, INVALID_ENUM: 1280, INVALID_VALUE: 1281, INVALID_OPERATION: 1282, OUT_OF_MEMORY: 1285, CW: 2304, CCW: 2305, LINE_WIDTH: 2849, ALIASED_POINT_SIZE_RANGE: 33901, ALIASED_LINE_WIDTH_RANGE: 33902, CULL_FACE_MODE: 2885, FRONT_FACE: 2886, DEPTH_RANGE: 2928, DEPTH_WRITEMASK: 2930, DEPTH_CLEAR_VALUE: 2931, DEPTH_FUNC: 2932, STENCIL_CLEAR_VALUE: 2961, STENCIL_FUNC: 2962, STENCIL_FAIL: 2964, STENCIL_PASS_DEPTH_FAIL: 2965, STENCIL_PASS_DEPTH_PASS: 2966, STENCIL_REF: 2967, STENCIL_VALUE_MASK: 2963, STENCIL_WRITEMASK: 2968, STENCIL_BACK_FUNC: 34816, STENCIL_BACK_FAIL: 34817, STENCIL_BACK_PASS_DEPTH_FAIL: 34818, STENCIL_BACK_PASS_DEPTH_PASS: 34819, STENCIL_BACK_REF: 36003, STENCIL_BACK_VALUE_MASK: 36004, STENCIL_BACK_WRITEMASK: 36005, VIEWPORT: 2978, SCISSOR_BOX: 3088, COLOR_CLEAR_VALUE: 3106, COLOR_WRITEMASK: 3107, UNPACK_ALIGNMENT: 3317, PACK_ALIGNMENT: 3333, MAX_TEXTURE_SIZE: 3379, MAX_VIEWPORT_DIMS: 3386, SUBPIXEL_BITS: 3408, RED_BITS: 3410, GREEN_BITS: 3411, BLUE_BITS: 3412, ALPHA_BITS: 3413, DEPTH_BITS: 3414, STENCIL_BITS: 3415, POLYGON_OFFSET_UNITS: 10752, POLYGON_OFFSET_FACTOR: 32824, TEXTURE_BINDING_2D: 32873, SAMPLE_BUFFERS: 32936, SAMPLES: 32937, SAMPLE_COVERAGE_VALUE: 32938, SAMPLE_COVERAGE_INVERT: 32939, COMPRESSED_TEXTURE_FORMATS: 34467, DONT_CARE: 4352, FASTEST: 4353, NICEST: 4354, GENERATE_MIPMAP_HINT: 33170, BYTE: 5120, UNSIGNED_BYTE: 5121, SHORT: 5122, UNSIGNED_SHORT: 5123, INT: 5124, UNSIGNED_INT: 5125, FLOAT: 5126, DEPTH_COMPONENT: 6402, ALPHA: 6406, RGB: 6407, RGBA: 6408, LUMINANCE: 6409, LUMINANCE_ALPHA: 6410, UNSIGNED_SHORT_4_4_4_4: 32819, UNSIGNED_SHORT_5_5_5_1: 32820, UNSIGNED_SHORT_5_6_5: 33635, FRAGMENT_SHADER: 35632, VERTEX_SHADER: 35633, MAX_VERTEX_ATTRIBS: 34921, MAX_VERTEX_UNIFORM_VECTORS: 36347, MAX_VARYING_VECTORS: 36348, MAX_COMBINED_TEXTURE_IMAGE_UNITS: 35661, MAX_VERTEX_TEXTURE_IMAGE_UNITS: 35660, MAX_TEXTURE_IMAGE_UNITS: 34930, MAX_FRAGMENT_UNIFORM_VECTORS: 36349, SHADER_TYPE: 35663, DELETE_STATUS: 35712, LINK_STATUS: 35714, VALIDATE_STATUS: 35715, ATTACHED_SHADERS: 35717, ACTIVE_UNIFORMS: 35718, ACTIVE_ATTRIBUTES: 35721, SHADING_LANGUAGE_VERSION: 35724, CURRENT_PROGRAM: 35725, NEVER: 512, LESS: 513, EQUAL: 514, LEQUAL: 515, GREATER: 516, NOTEQUAL: 517, GEQUAL: 518, ALWAYS: 519, KEEP: 7680, REPLACE: 7681, INCR: 7682, DECR: 7683, INVERT: 5386, INCR_WRAP: 34055, DECR_WRAP: 34056, VENDOR: 7936, RENDERER: 7937, VERSION: 7938, NEAREST: 9728, LINEAR: 9729, NEAREST_MIPMAP_NEAREST: 9984, LINEAR_MIPMAP_NEAREST: 9985, NEAREST_MIPMAP_LINEAR: 9986, LINEAR_MIPMAP_LINEAR: 9987, TEXTURE_MAG_FILTER: 10240, TEXTURE_MIN_FILTER: 10241, TEXTURE_WRAP_S: 10242, TEXTURE_WRAP_T: 10243, TEXTURE_2D: 3553, TEXTURE: 5890, TEXTURE_CUBE_MAP: 34067, TEXTURE_BINDING_CUBE_MAP: 34068, TEXTURE_CUBE_MAP_POSITIVE_X: 34069, TEXTURE_CUBE_MAP_NEGATIVE_X: 34070, TEXTURE_CUBE_MAP_POSITIVE_Y: 34071, TEXTURE_CUBE_MAP_NEGATIVE_Y: 34072, TEXTURE_CUBE_MAP_POSITIVE_Z: 34073, TEXTURE_CUBE_MAP_NEGATIVE_Z: 34074, MAX_CUBE_MAP_TEXTURE_SIZE: 34076, TEXTURE0: 33984, TEXTURE1: 33985, TEXTURE2: 33986, TEXTURE3: 33987, TEXTURE4: 33988, TEXTURE5: 33989, TEXTURE6: 33990, TEXTURE7: 33991, TEXTURE8: 33992, TEXTURE9: 33993, TEXTURE10: 33994, TEXTURE11: 33995, TEXTURE12: 33996, TEXTURE13: 33997, TEXTURE14: 33998, TEXTURE15: 33999, TEXTURE16: 34000, TEXTURE17: 34001, TEXTURE18: 34002, TEXTURE19: 34003, TEXTURE20: 34004, TEXTURE21: 34005, TEXTURE22: 34006, TEXTURE23: 34007, TEXTURE24: 34008, TEXTURE25: 34009, TEXTURE26: 34010, TEXTURE27: 34011, TEXTURE28: 34012, TEXTURE29: 34013, TEXTURE30: 34014, TEXTURE31: 34015, ACTIVE_TEXTURE: 34016, REPEAT: 10497, CLAMP_TO_EDGE: 33071, MIRRORED_REPEAT: 33648, FLOAT_VEC2: 35664, FLOAT_VEC3: 35665, FLOAT_VEC4: 35666, INT_VEC2: 35667, INT_VEC3: 35668, INT_VEC4: 35669, BOOL: 35670, BOOL_VEC2: 35671, BOOL_VEC3: 35672, BOOL_VEC4: 35673, FLOAT_MAT2: 35674, FLOAT_MAT3: 35675, FLOAT_MAT4: 35676, SAMPLER_2D: 35678, SAMPLER_CUBE: 35680, VERTEX_ATTRIB_ARRAY_ENABLED: 34338, VERTEX_ATTRIB_ARRAY_SIZE: 34339, VERTEX_ATTRIB_ARRAY_STRIDE: 34340, VERTEX_ATTRIB_ARRAY_TYPE: 34341, VERTEX_ATTRIB_ARRAY_NORMALIZED: 34922, VERTEX_ATTRIB_ARRAY_POINTER: 34373, VERTEX_ATTRIB_ARRAY_BUFFER_BINDING: 34975, COMPILE_STATUS: 35713, LOW_FLOAT: 36336, MEDIUM_FLOAT: 36337, HIGH_FLOAT: 36338, LOW_INT: 36339, MEDIUM_INT: 36340, HIGH_INT: 36341, FRAMEBUFFER: 36160, RENDERBUFFER: 36161, RGBA4: 32854, RGB5_A1: 32855, RGB565: 36194, DEPTH_COMPONENT16: 33189, STENCIL_INDEX: 6401, STENCIL_INDEX8: 36168, DEPTH_STENCIL: 34041, RENDERBUFFER_WIDTH: 36162, RENDERBUFFER_HEIGHT: 36163, RENDERBUFFER_INTERNAL_FORMAT: 36164, RENDERBUFFER_RED_SIZE: 36176, RENDERBUFFER_GREEN_SIZE: 36177, RENDERBUFFER_BLUE_SIZE: 36178, RENDERBUFFER_ALPHA_SIZE: 36179, RENDERBUFFER_DEPTH_SIZE: 36180, RENDERBUFFER_STENCIL_SIZE: 36181, FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE: 36048, FRAMEBUFFER_ATTACHMENT_OBJECT_NAME: 36049, FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL: 36050, FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE: 36051, COLOR_ATTACHMENT0: 36064, DEPTH_ATTACHMENT: 36096, STENCIL_ATTACHMENT: 36128, DEPTH_STENCIL_ATTACHMENT: 33306, NONE: 0, FRAMEBUFFER_COMPLETE: 36053, FRAMEBUFFER_INCOMPLETE_ATTACHMENT: 36054, FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: 36055, FRAMEBUFFER_INCOMPLETE_DIMENSIONS: 36057, FRAMEBUFFER_UNSUPPORTED: 36061, FRAMEBUFFER_BINDING: 36006, RENDERBUFFER_BINDING: 36007, MAX_RENDERBUFFER_SIZE: 34024, INVALID_FRAMEBUFFER_OPERATION: 1286, UNPACK_FLIP_Y_WEBGL: 37440, UNPACK_PREMULTIPLY_ALPHA_WEBGL: 37441, CONTEXT_LOST_WEBGL: 37442, UNPACK_COLORSPACE_CONVERSION_WEBGL: 37443, BROWSER_DEFAULT_WEBGL: 37444 }; });define('echarts-x/chart/base3d', [ 'require', 'echarts/config', 'zrender/tool/util', '../component/base3d', '../util/color', 'qtek/core/LRU', 'qtek/math/Vector3', 'qtek/math/Matrix4', '../entity/marker/MarkLine', '../entity/marker/MarkBar', '../entity/marker/MarkPoint', '../entity/marker/LargeMarkPoint' ], function (require) { 'use strict'; var ecConfig = require('echarts/config'); var zrUtil = require('zrender/tool/util'); var ComponentBase3D = require('../component/base3d'); var colorUtil = require('../util/color'); var LRUCache = require('qtek/core/LRU'); var Vector3 = require('qtek/math/Vector3'); var Matrix4 = require('qtek/math/Matrix4'); var MarkerCtorMap = { markLine: require('../entity/marker/MarkLine'), markBar: require('../entity/marker/MarkBar'), markPoint: require('../entity/marker/MarkPoint'), largeMarkPoint: require('../entity/marker/LargeMarkPoint') }; function Base3D(ecTheme, messageCenter, zr, option, myChart) { ComponentBase3D.call(this, ecTheme, messageCenter, zr, option, myChart); this._markLineList = []; this._markLineCount = 0; this._markPointList = []; this._markPointCount = 0; this._markBarList = []; this._markBarCount = 0; this._largeMarkPointList = []; this._largeMarkPointCount = 0; this._markList = []; } ; Base3D.prototype = { constructor: Base3D, beforeBuildMark: function () { for (var i = 0; i < this._markList.length; i++) { this._markList[i].clear(); } this._markList.length = 0; this._markBarCount = 0; this._markPointCount = 0; this._markLineCount = 0; this._largeMarkPointCount = 0; }, buildMark: function (seriesIndex, parentNode) { var serie = this.series[seriesIndex]; if (serie.markPoint) { zrUtil.merge(zrUtil.merge(serie.markPoint, this.ecTheme.markPoint || {}), ecConfig.markPoint); if (serie.markPoint.large) { this._buildSingleTypeMarker('largeMarkPoint', seriesIndex, parentNode); } else { this._buildSingleTypeMarker('markPoint', seriesIndex, parentNode); } } if (serie.markLine) { zrUtil.merge(zrUtil.merge(serie.markLine, this.ecTheme.markLine || {}), ecConfig.markLine); this._buildSingleTypeMarker('markLine', seriesIndex, parentNode); } if (serie.markBar) { zrUtil.merge(zrUtil.merge(serie.markBar, this.ecTheme.markBar || {}), ecConfig.markBar); this._buildSingleTypeMarker('markBar', seriesIndex, parentNode); } }, afterBuildMark: function () { for (var i = this._markPointCount; i < this._markPointList.length; i++) { this._disposeSingleSerieMark(this._markPointList[i]); } this._markPointList.length = this._markPointCount; for (var i = this._largeMarkPointCount; i < this._largeMarkPointList.length; i++) { this._disposeSingleSerieMark(this._largeMarkPointList[i]); } this._largeMarkPointList.length = this._largeMarkPointCount; for (var i = this._markLineCount; i < this._markLineList.length; i++) { this._disposeSingleSerieMark(this._markLineList[i]); } this._markLineList.length = this._markLineCount; for (var i = this._markBarCount; i < this._markBarList.length; i++) { this._disposeSingleSerieMark(this._markBarList[i]); } this._markBarList.length = this._markBarCount; }, _disposeSingleSerieMark: function (marker) { var sceneNode = marker.getSceneNode(); if (sceneNode.getParent()) { sceneNode.getParent().remove(sceneNode); } marker.dispose(); }, _buildSingleTypeMarker: function (markerType, seriesIndex, parentNode) { var serie = this.series[seriesIndex]; var list = this['_' + markerType + 'List']; var count = this['_' + markerType + 'Count']; var MarkerCtor = MarkerCtorMap[markerType]; if (!list || !MarkerCtor) { return; } if (!list[count]) { list[count] = new MarkerCtor(this); } var marker = list[count]; marker.setSeries(serie, seriesIndex); var sceneNode = marker.getSceneNode(); if (sceneNode.getParent() !== parentNode) { parentNode.add(sceneNode); } this['_' + markerType + 'Count']++; this._markList.push(marker); }, parseColor: function (colorStr) { if (!colorStr) { return null; } if (colorStr instanceof Array) { return colorStr; } if (!this._colorCache) { this._colorCache = new LRUCache(10); } var colorArr = this._colorCache.get(colorStr); if (!colorArr) { colorArr = colorUtil.parse(colorStr); this._colorCache.put(colorStr, colorArr); colorArr[0] /= 255; colorArr[1] /= 255; colorArr[2] /= 255; } return colorArr; }, getMarkCoord: function (seriesIndex, data, point) { point._array[0] = data.x; point._array[1] = data.y; point._array[2] = data.z; }, getMarkPointTransform: function (seriesIndex, data, matrix) { Matrix4.identity(matrix); var position = new Vector3(); this.getMarkCoord(seriesIndex, data, position); var arr = matrix._array; arr[12] = position.x; arr[13] = position.y; arr[14] = position.z; }, getMarkBarPoints: function (seriesIndex, data, start, end) { var barHeight = data.barHeight != null ? data.barHeight : 1; if (typeof barHeight == 'function') { barHeight = barHeight(data); } this.getMarkCoord(seriesIndex, data, start); Vector3.scaleAndAdd(end, end, start, 1); }, getMarkLinePoints: function (seriesIndex, data, p0, p1, p2, p3) { var isCurve = !!p2; if (!isCurve) { p3 = p1; } this.getMarkCoord(seriesIndex, data[0], p0); this.getMarkCoord(seriesIndex, data[1], p3); if (isCurve) { Vector3.copy(p1, p0); Vector3.copy(p2, p3); } }, getSerieLabelText: function (serie, data, name, status) { var formatter = this.deepQuery([ data, serie ], 'itemStyle.' + status + '.label.formatter'); if (!formatter && status === 'emphasis') { formatter = this.deepQuery([ data, serie ], 'itemStyle.normal.label.formatter'); } var value = this.getDataFromOption(data, '-'); if (formatter) { if (typeof formatter === 'function') { return formatter.call(this.myChart, { seriesName: serie.name, series: serie, name: name, value: value, data: data, status: status }); } else if (typeof formatter === 'string') { formatter = formatter.replace('{a}', '{a0}').replace('{b}', '{b0}').replace('{c}', '{c0}').replace('{a0}', serie.name).replace('{b0}', name).replace('{c0}', this.numAddCommas(value)); return formatter; } } else { if (value instanceof Array) { return value[2] != null ? this.numAddCommas(value[2]) : value[0] + ' , ' + value[1]; } else { return this.numAddCommas(value); } } }, onlegendSelected: function (param, status) { var legendSelected = param.selected; for (var itemName in this.selectedMap) { if (this.selectedMap[itemName] != legendSelected[itemName]) { status.needRefresh = true; } this.selectedMap[itemName] = legendSelected[itemName]; } return; }, dispose: function () { ComponentBase3D.prototype.dispose.call(this); for (var i = 0; i < this._markList.length; i++) { this._disposeSingleSerieMark(this._markList[i]); } }, onframe: function (deltaTime) { for (var i = 0; i < this._markList.length; i++) { this._markList[i].onframe(deltaTime); } } }; zrUtil.inherits(Base3D, ComponentBase3D); return Base3D; });define('echarts-x/util/OrbitControl', [ 'require', 'zrender/config', 'qtek/math/Vector2' ], function (require) { 'use strict'; var zrConfig = require('zrender/config'); var Vector2 = require('qtek/math/Vector2'); var EVENT = zrConfig.EVENT; var OrbitControl = function (target, zr, layer) { this.zr = zr; this.layer = layer; this.target = target; this.autoRotate = false; this.minZoom = 0.5; this.maxZoom = 1.5; this._zoom = 1; this._rotateY = 0; this._rotateX = 0; this._mouseX = 0; this._mouseY = 0; this._rotateVelocity = new Vector2(); this._zoomSpeed = 0; }; OrbitControl.prototype = { constructor: OrbitControl, init: function () { this.layer.bind(EVENT.MOUSEDOWN, this._mouseDownHandler, this); this.layer.bind(EVENT.MOUSEWHEEL, this._mouseWheelHandler, this); }, dispose: function () { this.layer.unbind(EVENT.MOUSEDOWN, this._mouseDownHandler); this.layer.unbind(EVENT.MOUSEMOVE, this._mouseMoveHandler); this.layer.unbind(EVENT.MOUSEUP, this._mouseUpHandler); this.layer.unbind(EVENT.MOUSEWHEEL, this._mouseWheelHandler); }, update: function (deltaTime) { this._rotateY = (this._rotateVelocity.y + this._rotateY) % (Math.PI * 2); this._rotateX = (this._rotateVelocity.x + this._rotateX) % (Math.PI * 2); this._rotateX = Math.max(Math.min(this._rotateX, Math.PI / 2), -Math.PI / 2); this._zoom += this._zoomSpeed; this._zoom = Math.max(Math.min(this._zoom, this.maxZoom), this.minZoom); this.target.rotation.identity().rotateX(this._rotateX).rotateY(this._rotateY); var zoom = this._zoom; this.target.scale.set(zoom, zoom, zoom); if (this.autoRotate) { this._rotateY -= deltaTime * 0.0001; this.zr.refreshNextFrame(); } else if (this._rotateVelocity.len() > 0 || this._zoomSpeed !== 0) { this.zr.refreshNextFrame(); } var speed = this._rotateVelocity.len(); speed = speed * 0.8; if (speed < 0.0001) { speed = 0; } this._rotateVelocity.normalize().scale(speed); this._zoomSpeed *= 0.8; if (Math.abs(this._zoomSpeed) < 0.001) { this._zoomSpeed = 0; } }, _mouseDownHandler: function (e) { this.layer.bind(EVENT.MOUSEMOVE, this._mouseMoveHandler, this); this.layer.bind(EVENT.MOUSEUP, this._mouseUpHandler, this); e = e.event; this._rotateVelocity.set(0, 0); this._mouseX = e.pageX; this._mouseY = e.pageY; if (this.autoRotate) { this.autoRotate = false; } }, _mouseMoveHandler: function (e) { e = e.event; this._rotateVelocity.y = (e.pageX - this._mouseX) / 500; this._rotateVelocity.x = (e.pageY - this._mouseY) / 500; this._mouseX = e.pageX; this._mouseY = e.pageY; }, _mouseWheelHandler: function (e) { e = e.event; var delta = e.wheelDelta || -e.detail; this._zoomSpeed = delta > 0 ? 0.05 : -0.05; }, _mouseUpHandler: function () { this.layer.unbind(EVENT.MOUSEMOVE, this._mouseMoveHandler, this); this.layer.unbind(EVENT.MOUSEUP, this._mouseUpHandler, this); } }; return OrbitControl; });define('echarts-x/surface/ZRenderSurface', [ 'require', 'zrender/Storage', 'qtek/Texture2D', 'qtek/math/Vector3', 'qtek/math/Vector2' ], function (require) { var Storage = require('zrender/Storage'); var Texture = require('qtek/Texture2D'); var Vector3 = require('qtek/math/Vector3'); var Vector2 = require('qtek/math/Vector2'); var ZRenderSurface = function (width, height) { this.onrefresh = function () { }; this._storage = new Storage(); this._canvas = document.createElement('canvas'); this._width = width || 512; this._height = height || 512; this._canvas.width = this._width; this._canvas.height = this._height; this._ctx = this._canvas.getContext('2d'); this._texture = new Texture({ image: this._canvas, anisotropic: 32, flipY: false }); this.refreshNextTick = this.refreshNextTick.bind(this); }; ZRenderSurface.prototype = { constructor: ZRenderSurface, backgroundColor: '', backgroundImage: null, addElement: function (el) { this._storage.addRoot(el); }, delElement: function (el) { this._storage.delRoot(el); }, clearElements: function () { this._storage.delRoot(); }, getTexture: function () { return this._texture; }, resize: function (width, height) { if (this._width === width && this._height === height) { return; } this._width = width; this._height = height; this._canvas.width = width; this._canvas.height = height; this.refresh(); }, getWidth: function () { return this._width; }, getHeight: function () { return this._height; }, refresh: function () { var ctx = this._ctx; ctx.clearRect(0, 0, this._width, this._height); if (this.backgroundColor) { ctx.fillStyle = this.backgroundColor; ctx.fillRect(0, 0, this._width, this._height); } var bg = this.backgroundImage; if (bg && bg.width && bg.height) { ctx.drawImage(this.backgroundImage, 0, 0, this._width, this._height); } var list = this._storage.getShapeList(true); for (var i = 0; i < list.length; i++) { var shape = list[i]; if (!shape.invisible) { shape.brush(ctx, shape.isHighlight, this.refreshNextTick); } } this._texture.dirty(); this.onrefresh && this.onrefresh(); }, refreshNextTick: function () { var timeout; return function () { var self = this; if (timeout) { clearTimeout(timeout); } timeout = setTimeout(function () { self.refresh(); }, 16); }; }(), hover: function (e) { var list = this._storage.getShapeList(); var shape = this.pick(e.target, e.face, e.point, list); var needsRefresh = false; for (var i = 0; i < list.length; i++) { list[i].isHighlight = false; list[i].zlevel = 0; if (list[i] == shape && !list[i].isHighlight || list[i] != shape && list[i].isHighlight) { needsRefresh = true; } } if (shape) { shape.isHighlight = true; shape.zlevel = 10; } if (needsRefresh) { this.refresh(); } return shape; }, pick: function () { var p0 = new Vector3(); var p1 = new Vector3(); var p2 = new Vector3(); var uv0 = new Vector2(); var uv1 = new Vector2(); var uv2 = new Vector2(); var uv = new Vector2(); var vCross = new Vector3(); return function (attachedMesh, triangle, points, list) { var geo = attachedMesh.geometry; var position = geo.attributes.position; var texcoord = geo.attributes.texcoord0; position.get(triangle[0], p0); position.get(triangle[1], p1); position.get(triangle[2], p2); texcoord.get(triangle[0], uv0); texcoord.get(triangle[1], uv1); texcoord.get(triangle[2], uv2); Vector3.cross(vCross, p1, p2); var det = Vector3.dot(p0, vCross); var t = Vector3.dot(points, vCross) / det; Vector3.cross(vCross, p2, p0); var u = Vector3.dot(points, vCross) / det; Vector3.cross(vCross, p0, p1); var v = Vector3.dot(points, vCross) / det; Vector2.scale(uv, uv0, t); Vector2.scaleAndAdd(uv, uv, uv1, u); Vector2.scaleAndAdd(uv, uv, uv2, v); var x = uv.x * this._width; var y = uv.y * this._height; var list = list || this._storage.getShapeList(); for (var i = list.length - 1; i >= 0; i--) { var shape = list[i]; if (!shape.isSilent() && shape.isCover(x, y)) { return shape; } } }; }() }; return ZRenderSurface; });define('echarts-x/surface/VectorFieldParticleSurface', [ 'require', 'qtek/compositor/Pass', 'qtek/StaticGeometry', 'qtek/Mesh', 'qtek/Material', 'qtek/Shader', 'qtek/Texture2D', 'qtek/core/glenum', 'qtek/camera/Orthographic', 'qtek/Scene', 'qtek/FrameBuffer', '../util/sprite' ], function (require) { var Pass = require('qtek/compositor/Pass'); var StaticGeometry = require('qtek/StaticGeometry'); var Mesh = require('qtek/Mesh'); var Material = require('qtek/Material'); var Shader = require('qtek/Shader'); var Texture2D = require('qtek/Texture2D'); var glenum = require('qtek/core/glenum'); var OrthoCamera = require('qtek/camera/Orthographic'); var Scene = require('qtek/Scene'); var FrameBuffer = require('qtek/FrameBuffer'); var spriteUtil = require('../util/sprite'); var VectorFieldParticleSurface = function (renderer) { this.renderer = renderer; this.motionBlurFactor = 0.99; this.vectorFieldTexture = null; this.particleLife = [ 10, 20 ]; this.particleSizeScaling = 1; this.particleColor = [ 1, 1, 1, 1 ]; this.particleSpeedScaling = 1; this.surfaceTexture = null; this.surfaceMesh = null; this._particlePass = null; this._spawnTexture = null; this._particleTexture0 = null; this._particleTexture1 = null; this._particleMesh = null; this._frameBuffer = null; this._elapsedTime = 0; this._scene = null; this._camera = null; this._motionBlurPass = null; this._thisFrameTexture = null; this._lastFrameTexture = null; }; VectorFieldParticleSurface.prototype = { constructor: VectorFieldParticleSurface, init: function (width, height) { var geometry = new StaticGeometry({ mainAttribute: 'texcoord0' }); var nVertex = width * height; var attributes = geometry.attributes; attributes.texcoord0.init(nVertex); var spawnTextureData = new Float32Array(nVertex * 4); var off = 0; var lifeRange = this.particleLife; for (var i = 0; i < width; i++) { for (var j = 0; j < height; j++, off++) { attributes.texcoord0.value[off * 2] = i / width; attributes.texcoord0.value[off * 2 + 1] = j / height; spawnTextureData[off * 4] = Math.random(); spawnTextureData[off * 4 + 1] = Math.random(); spawnTextureData[off * 4 + 2] = Math.random(); var life = (lifeRange[1] - lifeRange[0]) * Math.random() + lifeRange[0]; spawnTextureData[off * 4 + 3] = life; } } var parameters = { width: width, height: height, type: glenum.FLOAT, minFilter: glenum.NEAREST, magFilter: glenum.NEAREST, wrapS: glenum.REPEAT, wrapT: glenum.REPEAT, useMipmap: false }; this._spawnTexture = new Texture2D(parameters); this._spawnTexture.pixels = spawnTextureData; this._particleTexture0 = new Texture2D(parameters); this._particleTexture1 = new Texture2D(parameters); this._frameBuffer = new FrameBuffer(); this._particlePass = new Pass({ fragment: Shader.source('ecx.vfParticle.particle.fragment') }); this._particlePass.setUniform('velocityTexture', this.vectorFieldTexture); this._particlePass.setUniform('spawnTexture', this._spawnTexture); this._particlePass.setUniform('speedScaling', this.particleSpeedScaling); this._motionBlurPass = new Pass({ fragment: Shader.source('ecx.motionBlur.fragment') }); this._motionBlurPass.setUniform('percent', this.motionBlurFactor); var particleMesh = new Mesh({ material: new Material({ shader: new Shader({ vertex: Shader.source('ecx.vfParticle.renderPoints.vertex'), fragment: Shader.source('ecx.vfParticle.renderPoints.fragment') }) }), mode: glenum.POINTS, geometry: geometry }); particleMesh.material.set('spriteTexture', new Texture2D({ image: spriteUtil.makeSimpleSprite(128) })); particleMesh.material.set('sizeScaling', this.particleSizeScaling * this.renderer.getDevicePixelRatio()); particleMesh.material.set('color', this.particleColor); this._particleMesh = particleMesh; this._scene = new Scene(); this._scene.add(this._particleMesh); this._camera = new OrthoCamera(); if (!this.surfaceTexture) { this.surfaceTexture = new Texture2D({ width: 1024, height: 1024 }); } var surfaceWidth = this.surfaceTexture.width; var surfaceHeight = this.surfaceTexture.height; this._lastFrameTexture = new Texture2D({ width: surfaceWidth, height: surfaceHeight }); this._thisFrameTexture = new Texture2D({ width: surfaceWidth, height: surfaceHeight }); }, update: function (deltaTime) { var frameBuffer = this._frameBuffer; var particlePass = this._particlePass; var motionBlurPass = this._motionBlurPass; particlePass.attachOutput(this._particleTexture1); particlePass.setUniform('particleTexture', this._particleTexture0); particlePass.setUniform('deltaTime', deltaTime); particlePass.setUniform('elapsedTime', this._elapsedTime); particlePass.render(this.renderer, frameBuffer); this._particleMesh.material.set('particleTexture', this._particleTexture1); frameBuffer.attach(this.renderer.gl, this._thisFrameTexture); frameBuffer.bind(this.renderer); this.renderer.render(this._scene, this._camera); frameBuffer.unbind(this.renderer); motionBlurPass.attachOutput(this.surfaceTexture); motionBlurPass.setUniform('lastFrame', this._lastFrameTexture); motionBlurPass.setUniform('thisFrame', this._thisFrameTexture); motionBlurPass.render(this.renderer, frameBuffer); this._swapTexture(); if (this.surfaceMesh) { this.surfaceMesh.material.set('diffuseMap', this.surfaceTexture); } this._elapsedTime += deltaTime; }, _swapTexture: function () { var tmp = this._particleTexture0; this._particleTexture0 = this._particleTexture1; this._particleTexture1 = tmp; var tmp = this.surfaceTexture; this.surfaceTexture = this._lastFrameTexture; this._lastFrameTexture = tmp; }, dispose: function () { var renderer = this.renderer; renderer.disposeFrameBuffer(this._frameBuffer); renderer.disposeTexture(this.vectorFieldTexture); renderer.disposeTexture(this._spawnTexture); renderer.disposeTexture(this._particleTexture0); renderer.disposeTexture(this._particleTexture1); renderer.disposeTexture(this._thisFrameTexture); renderer.disposeTexture(this._lastFrameTexture); renderer.disposeScene(this._scene); } }; return VectorFieldParticleSurface; });define('qtek/core/LRU', [ 'require', './LinkedList' ], function (require) { 'use strict'; var LinkedList = require('./LinkedList'); var LRU = function (maxSize) { this._list = new LinkedList(); this._map = {}; this._maxSize = maxSize || 10; }; LRU.prototype.setMaxSize = function (size) { this._maxSize = size; }; LRU.prototype.put = function (key, value) { if (typeof this._map[key] == 'undefined') { var len = this._list.length(); if (len >= this._maxSize && len > 0) { var leastUsedEntry = this._list.head; this._list.remove(leastUsedEntry); delete this._map[leastUsedEntry.key]; } var entry = this._list.insert(value); entry.key = key; this._map[key] = entry; } }; LRU.prototype.get = function (key) { var entry = this._map[key]; if (typeof entry != 'undefined') { if (entry !== this._list.tail) { this._list.remove(entry); this._list.insertEntry(entry); } return entry.value; } }; LRU.prototype.remove = function (key) { var entry = this._map[key]; if (typeof entry != 'undefined') { delete this._map[key]; this._list.remove(entry); } }; LRU.prototype.clear = function () { this._list.clear(); this._map = {}; }; return LRU; });define('qtek/math/Quaternion', [ 'require', '../dep/glmatrix' ], function (require) { 'use strict'; var glMatrix = require('../dep/glmatrix'); var quat = glMatrix.quat; var Quaternion = function (x, y, z, w) { x = x || 0; y = y || 0; z = z || 0; w = w === undefined ? 1 : w; this._array = quat.fromValues(x, y, z, w); this._dirty = true; }; Quaternion.prototype = { constructor: Quaternion, add: function (b) { quat.add(this._array, this._array, b._array); this._dirty = true; return this; }, calculateW: function () { quat.calculateW(this._array, this._array); this._dirty = true; return this; }, set: function (x, y, z, w) { this._array[0] = x; this._array[1] = y; this._array[2] = z; this._array[3] = w; this._dirty = true; return this; }, setArray: function (arr) { this._array[0] = arr[0]; this._array[1] = arr[1]; this._array[2] = arr[2]; this._array[3] = arr[3]; this._dirty = true; return this; }, clone: function () { return new Quaternion(this.x, this.y, this.z, this.w); }, conjugate: function () { quat.conjugate(this._array, this._array); this._dirty = true; return this; }, copy: function (b) { quat.copy(this._array, b._array); this._dirty = true; return this; }, dot: function (b) { return quat.dot(this._array, b._array); }, fromMat3: function (m) { quat.fromMat3(this._array, m._array); this._dirty = true; return this; }, fromMat4: function () { var mat3 = glMatrix.mat3; var m3 = mat3.create(); return function (m) { mat3.fromMat4(m3, m._array); mat3.transpose(m3, m3); quat.fromMat3(this._array, m3); this._dirty = true; return this; }; }(), identity: function () { quat.identity(this._array); this._dirty = true; return this; }, invert: function () { quat.invert(this._array, this._array); this._dirty = true; return this; }, len: function () { return quat.len(this._array); }, length: function () { return quat.length(this._array); }, lerp: function (a, b, t) { quat.lerp(this._array, a._array, b._array, t); this._dirty = true; return this; }, mul: function (b) { quat.mul(this._array, this._array, b._array); this._dirty = true; return this; }, mulLeft: function (a) { quat.multiply(this._array, a._array, this._array); this._dirty = true; return this; }, multiply: function (b) { quat.multiply(this._array, this._array, b._array); this._dirty = true; return this; }, multiplyLeft: function (a) { quat.multiply(this._array, a._array, this._array); this._dirty = true; return this; }, normalize: function () { quat.normalize(this._array, this._array); this._dirty = true; return this; }, rotateX: function (rad) { quat.rotateX(this._array, this._array, rad); this._dirty = true; return this; }, rotateY: function (rad) { quat.rotateY(this._array, this._array, rad); this._dirty = true; return this; }, rotateZ: function (rad) { quat.rotateZ(this._array, this._array, rad); this._dirty = true; return this; }, rotationTo: function (a, b) { quat.rotationTo(this._array, a._array, b._array); this._dirty = true; return this; }, setAxes: function (view, right, up) { quat.setAxes(this._array, view._array, right._array, up._array); this._dirty = true; return this; }, setAxisAngle: function (axis, rad) { quat.setAxisAngle(this._array, axis._array, rad); this._dirty = true; return this; }, slerp: function (a, b, t) { quat.slerp(this._array, a._array, b._array, t); this._dirty = true; return this; }, sqrLen: function () { return quat.sqrLen(this._array); }, squaredLength: function () { return quat.squaredLength(this._array); }, setFromEuler: function (v) { }, toString: function () { return '[' + Array.prototype.join.call(this._array, ',') + ']'; } }; if (Object.defineProperty) { var proto = Quaternion.prototype; Object.defineProperty(proto, 'x', { get: function () { return this._array[0]; }, set: function (value) { this._array[0] = value; this._dirty = true; } }); Object.defineProperty(proto, 'y', { get: function () { return this._array[1]; }, set: function (value) { this._array[1] = value; this._dirty = true; } }); Object.defineProperty(proto, 'z', { get: function () { return this._array[2]; }, set: function (value) { this._array[2] = value; this._dirty = true; } }); Object.defineProperty(proto, 'w', { get: function () { return this._array[3]; }, set: function (value) { this._array[3] = value; this._dirty = true; } }); } Quaternion.add = function (out, a, b) { quat.add(out._array, a._array, b._array); out._dirty = true; return out; }; Quaternion.set = function (out, x, y, z, w) { quat.set(out._array, x, y, z, w); out._dirty = true; }; Quaternion.copy = function (out, b) { quat.copy(out._array, b._array); out._dirty = true; return out; }; Quaternion.calculateW = function (out, a) { quat.calculateW(out._array, a._array); out._dirty = true; return out; }; Quaternion.conjugate = function (out, a) { quat.conjugate(out._array, a._array); out._dirty = true; return out; }; Quaternion.identity = function (out) { quat.identity(out._array); out._dirty = true; return out; }; Quaternion.invert = function (out, a) { quat.invert(out._array, a._array); out._dirty = true; return out; }; Quaternion.dot = function (a, b) { return quat.dot(a._array, b._array); }; Quaternion.len = function (a) { return quat.length(a._array); }; Quaternion.lerp = function (out, a, b, t) { quat.lerp(out._array, a._array, b._array, t); out._dirty = true; return out; }; Quaternion.slerp = function (out, a, b, t) { quat.slerp(out._array, a._array, b._array, t); out._dirty = true; return out; }; Quaternion.mul = function (out, a, b) { quat.multiply(out._array, a._array, b._array); out._dirty = true; return out; }; Quaternion.multiply = Quaternion.mul; Quaternion.rotateX = function (out, a, rad) { quat.rotateX(out._array, a._array, rad); out._dirty = true; return out; }; Quaternion.rotateY = function (out, a, rad) { quat.rotateY(out._array, a._array, rad); out._dirty = true; return out; }; Quaternion.rotateZ = function (out, a, rad) { quat.rotateZ(out._array, a._array, rad); out._dirty = true; return out; }; Quaternion.setAxisAngle = function (out, axis, rad) { quat.setAxisAngle(out._array, axis._array, rad); out._dirty = true; return out; }; Quaternion.normalize = function (out, a) { quat.normalize(out._array, a._array); out._dirty = true; return out; }; Quaternion.sqrLen = function (a) { return quat.sqrLen(a._array); }; Quaternion.squaredLength = Quaternion.sqrLen; Quaternion.fromMat3 = function (out, m) { quat.fromMat3(out._array, m._array); out._dirty = true; return out; }; Quaternion.setAxes = function (out, view, right, up) { quat.setAxes(out._array, view._array, right._array, up._array); out._dirty = true; return out; }; Quaternion.rotationTo = function (out, a, b) { quat.rotationTo(out._array, a._array, b._array); out._dirty = true; return out; }; return Quaternion; });define('qtek/Renderable', [ 'require', './Node', './core/glenum', './core/glinfo', './DynamicGeometry' ], function (require) { 'use strict'; var Node = require('./Node'); var glenum = require('./core/glenum'); var glinfo = require('./core/glinfo'); var DynamicGeometry = require('./DynamicGeometry'); var prevDrawID = 0; var prevDrawIndicesBuffer = null; var prevDrawIsUseFace = true; var currentDrawID; var RenderInfo = function () { this.faceNumber = 0; this.vertexNumber = 0; this.drawCallNumber = 0; }; function VertexArrayObject(availableAttributes, availableAttributeSymbols, indicesBuffer) { this.availableAttributes = availableAttributes; this.availableAttributeSymbols = availableAttributeSymbols; this.indicesBuffer = indicesBuffer; this.vao = null; } var Renderable = Node.derive({ material: null, geometry: null, mode: glenum.TRIANGLES, _drawCache: null, _renderInfo: null }, function () { this._drawCache = {}; this._renderInfo = new RenderInfo(); }, { lineWidth: 1, culling: true, cullFace: glenum.BACK, frontFace: glenum.CCW, frustumCulling: true, receiveShadow: true, castShadow: true, ignorePicking: false, isRenderable: function () { return this.geometry && this.material && this.material.shader && this.visible; }, render: function (_gl, globalMaterial) { var material = globalMaterial || this.material; var shader = material.shader; var geometry = this.geometry; var glDrawMode = this.mode; var nVertex = geometry.getVertexNumber(); var isUseFace = geometry.isUseFace(); var uintExt = glinfo.getExtension(_gl, 'OES_element_index_uint'); var useUintExt = uintExt && nVertex > 65535; var indicesType = useUintExt ? _gl.UNSIGNED_INT : _gl.UNSIGNED_SHORT; var vaoExt = glinfo.getExtension(_gl, 'OES_vertex_array_object'); var isStatic = !geometry.dynamic; var renderInfo = this._renderInfo; renderInfo.vertexNumber = nVertex; renderInfo.faceNumber = 0; renderInfo.drawCallNumber = 0; var drawHashChanged = false; currentDrawID = _gl.__GLID__ + '-' + geometry.__GUID__ + '-' + shader.__GUID__; if (currentDrawID !== prevDrawID) { drawHashChanged = true; } else { if (geometry instanceof DynamicGeometry && (nVertex > 65535 && !uintExt) && isUseFace || vaoExt && isStatic || geometry._cache.isDirty()) { drawHashChanged = true; } } prevDrawID = currentDrawID; if (!drawHashChanged) { if (prevDrawIsUseFace) { _gl.drawElements(glDrawMode, prevDrawIndicesBuffer.count, indicesType, 0); renderInfo.faceNumber = prevDrawIndicesBuffer.count / 3; } else { _gl.drawArrays(glDrawMode, 0, nVertex); } renderInfo.drawCallNumber = 1; } else { var vaoList = this._drawCache[currentDrawID]; if (!vaoList) { var chunks = geometry.getBufferChunks(_gl); if (!chunks) { return; } vaoList = []; for (var c = 0; c < chunks.length; c++) { var chunk = chunks[c]; var attributeBuffers = chunk.attributeBuffers; var indicesBuffer = chunk.indicesBuffer; var availableAttributes = []; var availableAttributeSymbols = []; for (var a = 0; a < attributeBuffers.length; a++) { var attributeBufferInfo = attributeBuffers[a]; var name = attributeBufferInfo.name; var semantic = attributeBufferInfo.semantic; var symbol; if (semantic) { var semanticInfo = shader.attribSemantics[semantic]; symbol = semanticInfo && semanticInfo.symbol; } else { symbol = name; } if (symbol && shader.attributeTemplates[symbol]) { availableAttributes.push(attributeBufferInfo); availableAttributeSymbols.push(symbol); } } var vao = new VertexArrayObject(availableAttributes, availableAttributeSymbols, indicesBuffer); vaoList.push(vao); } if (isStatic) { this._drawCache[currentDrawID] = vaoList; } } for (var i = 0; i < vaoList.length; i++) { var vao = vaoList[i]; var needsBindAttributes = true; if (vaoExt && isStatic) { if (vao.vao == null) { vao.vao = vaoExt.createVertexArrayOES(); } else { needsBindAttributes = false; } vaoExt.bindVertexArrayOES(vao.vao); } var availableAttributes = vao.availableAttributes; var indicesBuffer = vao.indicesBuffer; if (needsBindAttributes) { var locationList = shader.enableAttributes(_gl, vao.availableAttributeSymbols, vaoExt && isStatic && vao.vao); for (var a = 0; a < availableAttributes.length; a++) { var location = locationList[a]; if (location === -1) { continue; } var attributeBufferInfo = availableAttributes[a]; var buffer = attributeBufferInfo.buffer; var size = attributeBufferInfo.size; var glType; switch (attributeBufferInfo.type) { case 'float': glType = _gl.FLOAT; break; case 'byte': glType = _gl.BYTE; break; case 'ubyte': glType = _gl.UNSIGNED_BYTE; break; case 'short': glType = _gl.SHORT; break; case 'ushort': glType = _gl.UNSIGNED_SHORT; break; default: glType = _gl.FLOAT; break; } _gl.bindBuffer(_gl.ARRAY_BUFFER, buffer); _gl.vertexAttribPointer(location, size, glType, false, 0, 0); } } if (glDrawMode == glenum.LINES || glDrawMode == glenum.LINE_STRIP || glDrawMode == glenum.LINE_LOOP) { _gl.lineWidth(this.lineWidth); } prevDrawIndicesBuffer = indicesBuffer; prevDrawIsUseFace = geometry.isUseFace(); if (prevDrawIsUseFace) { if (needsBindAttributes) { _gl.bindBuffer(_gl.ELEMENT_ARRAY_BUFFER, indicesBuffer.buffer); } _gl.drawElements(glDrawMode, indicesBuffer.count, indicesType, 0); renderInfo.faceNumber += indicesBuffer.count / 3; } else { _gl.drawArrays(glDrawMode, 0, nVertex); } if (vaoExt && isStatic) { vaoExt.bindVertexArrayOES(null); } renderInfo.drawCallNumber++; } } return renderInfo; }, clone: function () { var properties = [ 'castShadow', 'receiveShadow', 'mode', 'culling', 'cullFace', 'frontFace', 'frustumCulling' ]; return function () { var renderable = Node.prototype.clone.call(this); renderable.geometry = this.geometry; renderable.material = this.material; for (var i = 0; i < properties.length; i++) { var name = properties[i]; if (renderable[name] !== this[name]) { renderable[name] = this[name]; } } return renderable; }; }() }); Renderable.beforeFrame = function () { prevDrawID = 0; }; Renderable.POINTS = glenum.POINTS; Renderable.LINES = glenum.LINES; Renderable.LINE_LOOP = glenum.LINE_LOOP; Renderable.LINE_STRIP = glenum.LINE_STRIP; Renderable.TRIANGLES = glenum.TRIANGLES; Renderable.TRIANGLE_STRIP = glenum.TRIANGLE_STRIP; Renderable.TRIANGLE_FAN = glenum.TRIANGLE_FAN; Renderable.BACK = glenum.BACK; Renderable.FRONT = glenum.FRONT; Renderable.FRONT_AND_BACK = glenum.FRONT_AND_BACK; Renderable.CW = glenum.CW; Renderable.CCW = glenum.CCW; Renderable.RenderInfo = RenderInfo; return Renderable; });define('qtek/core/glinfo', [], function () { 'use strict'; var EXTENSION_LIST = [ 'OES_texture_float', 'OES_texture_half_float', 'OES_texture_float_linear', 'OES_texture_half_float_linear', 'OES_standard_derivatives', 'OES_vertex_array_object', 'OES_element_index_uint', 'WEBGL_compressed_texture_s3tc', 'WEBGL_depth_texture', 'EXT_texture_filter_anisotropic', 'WEBGL_draw_buffers' ]; var PARAMETER_NAMES = [ 'MAX_TEXTURE_SIZE', 'MAX_CUBE_MAP_TEXTURE_SIZE' ]; var extensions = {}; var parameters = {}; var glinfo = { initialize: function (_gl) { var glid = _gl.__GLID__; if (extensions[glid]) { return; } extensions[glid] = {}; parameters[glid] = {}; for (var i = 0; i < EXTENSION_LIST.length; i++) { var extName = EXTENSION_LIST[i]; this._createExtension(_gl, extName); } for (var i = 0; i < PARAMETER_NAMES.length; i++) { var name = PARAMETER_NAMES[i]; parameters[glid][name] = _gl.getParameter(_gl[name]); } }, getExtension: function (_gl, name) { var glid = _gl.__GLID__; if (extensions[glid]) { if (typeof extensions[glid][name] == 'undefined') { this._createExtension(_gl, name); } return extensions[glid][name]; } }, getParameter: function (_gl, name) { var glid = _gl.__GLID__; if (parameters[glid]) { return parameters[glid][name]; } }, dispose: function (_gl) { delete extensions[_gl.__GLID__]; delete parameters[_gl.__GLID__]; }, _createExtension: function (_gl, name) { var ext = _gl.getExtension(name); if (!ext) { ext = _gl.getExtension('MOZ_' + name); } if (!ext) { ext = _gl.getExtension('WEBKIT_' + name); } extensions[_gl.__GLID__][name] = ext; } }; return glinfo; });define('qtek/DynamicGeometry', [ 'require', './Geometry', './math/BoundingBox', './core/glenum', './core/glinfo', './dep/glmatrix' ], function (require) { 'use strict'; var Geometry = require('./Geometry'); var BoundingBox = require('./math/BoundingBox'); var glenum = require('./core/glenum'); var glinfo = require('./core/glinfo'); var glMatrix = require('./dep/glmatrix'); var vec3 = glMatrix.vec3; var mat4 = glMatrix.mat4; var arrSlice = Array.prototype.slice; var DynamicGeometry = Geometry.derive(function () { return { attributes: { position: new Geometry.Attribute('position', 'float', 3, 'POSITION', true), texcoord0: new Geometry.Attribute('texcoord0', 'float', 2, 'TEXCOORD_0', true), texcoord1: new Geometry.Attribute('texcoord1', 'float', 2, 'TEXCOORD_1', true), normal: new Geometry.Attribute('normal', 'float', 3, 'NORMAL', true), tangent: new Geometry.Attribute('tangent', 'float', 4, 'TANGENT', true), color: new Geometry.Attribute('color', 'float', 4, 'COLOR', true), weight: new Geometry.Attribute('weight', 'float', 3, 'WEIGHT', true), joint: new Geometry.Attribute('joint', 'float', 4, 'JOINT', true), barycentric: new Geometry.Attribute('barycentric', 'float', 3, null, true) }, dynamic: true, hint: glenum.DYNAMIC_DRAW, faces: [], _enabledAttributes: null, _arrayChunks: [] }; }, { updateBoundingBox: function () { if (!this.boundingBox) { this.boundingBox = new BoundingBox(); } this.boundingBox.updateFromVertices(this.attributes.position.value); }, dirty: function (field) { if (!field) { this.dirty('indices'); for (var name in this.attributes) { this.dirty(name); } return; } this._cache.dirtyAll(field); this._cache.dirtyAll(); this._enabledAttributes = null; }, getVertexNumber: function () { var mainAttribute = this.attributes[this.mainAttribute]; if (!mainAttribute || !mainAttribute.value) { return 0; } return mainAttribute.value.length; }, getFaceNumber: function () { return this.faces.length; }, getFace: function (idx, out) { if (idx < this.getFaceNumber() && idx >= 0) { if (!out) { out = vec3.create(); } vec3.copy(out, this.faces[idx]); return out; } }, isUseFace: function () { return this.useFace && this.faces.length > 0; }, isSplitted: function () { return this.getVertexNumber() > 65535; }, createAttribute: function (name, type, size, semantic) { var attrib = new Geometry.Attribute(name, type, size, semantic, true); this.attributes[name] = attrib; this._attributeList.push(name); return attrib; }, removeAttribute: function (name) { var idx = this._attributeList.indexOf(name); if (idx >= 0) { this._attributeList.splice(idx, 1); delete this.attributes[name]; return true; } return false; }, getEnabledAttributes: function () { if (this._enabledAttributes) { return this._enabledAttributes; } var result = {}; var nVertex = this.getVertexNumber(); for (var i = 0; i < this._attributeList.length; i++) { var name = this._attributeList[i]; var attrib = this.attributes[name]; if (attrib.value.length) { if (attrib.value.length === nVertex) { result[name] = attrib; } } } this._enabledAttributes = result; return result; }, _getDirtyAttributes: function () { var attributes = this.getEnabledAttributes(); if (this._cache.miss('chunks')) { return attributes; } else { var result = {}; var noDirtyAttributes = true; for (var name in attributes) { if (this._cache.isDirty(name)) { result[name] = attributes[name]; noDirtyAttributes = false; } } if (!noDirtyAttributes) { return result; } } }, getChunkNumber: function () { return this._arrayChunks.length; }, getBufferChunks: function (_gl) { this._cache.use(_gl.__GLID__); if (this._cache.isDirty()) { var dirtyAttributes = this._getDirtyAttributes(); var isFacesDirty = this._cache.isDirty('indices'); isFacesDirty = isFacesDirty && this.isUseFace(); if (dirtyAttributes) { this._updateAttributesAndIndicesArrays(dirtyAttributes, isFacesDirty, glinfo.getExtension(_gl, 'OES_element_index_uint') != null); this._updateBuffer(_gl, dirtyAttributes, isFacesDirty); for (var name in dirtyAttributes) { this._cache.fresh(name); } this._cache.fresh('indices'); this._cache.fresh(); } } return this._cache.get('chunks'); }, _updateAttributesAndIndicesArrays: function (attributes, isFacesDirty, useUintExtension) { var self = this; var nVertex = this.getVertexNumber(); var verticesReorganizedMap = []; var reorganizedFaces = []; var ArrayConstructors = {}; for (var name in attributes) { switch (type) { case 'byte': ArrayConstructors[name] = Int8Array; break; case 'ubyte': ArrayConstructors[name] = Uint8Array; break; case 'short': ArrayConstructors[name] = Int16Array; break; case 'ushort': ArrayConstructors[name] = Uint16Array; break; default: ArrayConstructors[name] = Float32Array; break; } } var newChunk = function (chunkIdx) { if (self._arrayChunks[chunkIdx]) { return self._arrayChunks[chunkIdx]; } var chunk = { attributeArrays: {}, indicesArray: null }; for (var name in attributes) { chunk.attributeArrays[name] = null; } for (var i = 0; i < nVertex; i++) { verticesReorganizedMap[i] = -1; } self._arrayChunks.push(chunk); return chunk; }; var attribNameList = Object.keys(attributes); if (nVertex > 65535 && this.isUseFace() && !useUintExtension) { var chunkIdx = 0; var currentChunk; var chunkFaceStart = [0]; var vertexUseCount = []; for (i = 0; i < nVertex; i++) { vertexUseCount[i] = -1; verticesReorganizedMap[i] = -1; } if (isFacesDirty) { for (i = 0; i < this.faces.length; i++) { reorganizedFaces[i] = [ 0, 0, 0 ]; } } currentChunk = newChunk(chunkIdx); var vertexCount = 0; for (var i = 0; i < this.faces.length; i++) { var face = this.faces[i]; var reorganizedFace = reorganizedFaces[i]; if (vertexCount + 3 > 65535) { chunkIdx++; chunkFaceStart[chunkIdx] = i; vertexCount = 0; currentChunk = newChunk(chunkIdx); } for (var f = 0; f < 3; f++) { var ii = face[f]; var isNew = verticesReorganizedMap[ii] === -1; for (var k = 0; k < attribNameList.length; k++) { var name = attribNameList[k]; var attribArray = currentChunk.attributeArrays[name]; var values = attributes[name].value; var size = attributes[name].size; if (!attribArray) { attribArray = currentChunk.attributeArrays[name] = []; } if (isNew) { if (size === 1) { attribArray[vertexCount] = values[ii]; } for (var j = 0; j < size; j++) { attribArray[vertexCount * size + j] = values[ii][j]; } } } if (isNew) { verticesReorganizedMap[ii] = vertexCount; reorganizedFace[f] = vertexCount; vertexCount++; } else { reorganizedFace[f] = verticesReorganizedMap[ii]; } } } for (var c = 0; c < this._arrayChunks.length; c++) { var chunk = this._arrayChunks[c]; for (var name in chunk.attributeArrays) { var array = chunk.attributeArrays[name]; if (array instanceof Array) { chunk.attributeArrays[name] = new ArrayConstructors[name](array); } } } if (isFacesDirty) { var chunkStart, chunkEnd, cursor, chunk; for (var c = 0; c < this._arrayChunks.length; c++) { chunkStart = chunkFaceStart[c]; chunkEnd = chunkFaceStart[c + 1] || this.faces.length; cursor = 0; chunk = this._arrayChunks[c]; var indicesArray = chunk.indicesArray; if (!indicesArray) { indicesArray = chunk.indicesArray = new Uint16Array((chunkEnd - chunkStart) * 3); } for (var i = chunkStart; i < chunkEnd; i++) { indicesArray[cursor++] = reorganizedFaces[i][0]; indicesArray[cursor++] = reorganizedFaces[i][1]; indicesArray[cursor++] = reorganizedFaces[i][2]; } } } } else { var chunk = newChunk(0); if (isFacesDirty) { var indicesArray = chunk.indicesArray; var nFace = this.faces.length; if (!indicesArray || nFace * 3 !== indicesArray.length) { var ArrayCtor = nVertex > 65535 ? Uint32Array : Uint16Array; indicesArray = chunk.indicesArray = new ArrayCtor(this.faces.length * 3); } var cursor = 0; for (var i = 0; i < nFace; i++) { indicesArray[cursor++] = this.faces[i][0]; indicesArray[cursor++] = this.faces[i][1]; indicesArray[cursor++] = this.faces[i][2]; } } for (var name in attributes) { var values = attributes[name].value; var type = attributes[name].type; var size = attributes[name].size; var attribArray = chunk.attributeArrays[name]; var arrSize = nVertex * size; if (!attribArray || attribArray.length !== arrSize) { attribArray = new ArrayConstructors[name](arrSize); chunk.attributeArrays[name] = attribArray; } if (size === 1) { for (var i = 0; i < values.length; i++) { attribArray[i] = values[i]; } } else { var cursor = 0; for (var i = 0; i < values.length; i++) { for (var j = 0; j < size; j++) { attribArray[cursor++] = values[i][j]; } } } } } }, _updateBuffer: function (_gl, dirtyAttributes, isFacesDirty) { var chunks = this._cache.get('chunks'); var firstUpdate = false; if (!chunks) { chunks = []; for (var i = 0; i < this._arrayChunks.length; i++) { chunks[i] = { attributeBuffers: [], indicesBuffer: null }; } this._cache.put('chunks', chunks); firstUpdate = true; } for (var cc = 0; cc < this._arrayChunks.length; cc++) { var chunk = chunks[cc]; if (!chunk) { chunk = chunks[cc] = { attributeBuffers: [], indicesBuffer: null }; } var attributeBuffers = chunk.attributeBuffers; var indicesBuffer = chunk.indicesBuffer; var arrayChunk = this._arrayChunks[cc]; var attributeArrays = arrayChunk.attributeArrays; var indicesArray = arrayChunk.indicesArray; var count = 0; var prevSearchIdx = 0; for (var name in dirtyAttributes) { var attribute = dirtyAttributes[name]; var type = attribute.type; var semantic = attribute.semantic; var size = attribute.size; var bufferInfo; if (!firstUpdate) { for (var i = prevSearchIdx; i < attributeBuffers.length; i++) { if (attributeBuffers[i].name === name) { bufferInfo = attributeBuffers[i]; prevSearchIdx = i + 1; break; } } if (!bufferInfo) { for (var i = prevSearchIdx - 1; i >= 0; i--) { if (attributeBuffers[i].name === name) { bufferInfo = attributeBuffers[i]; prevSearchIdx = i; break; } } } } var buffer; if (bufferInfo) { buffer = bufferInfo.buffer; } else { buffer = _gl.createBuffer(); } _gl.bindBuffer(_gl.ARRAY_BUFFER, buffer); _gl.bufferData(_gl.ARRAY_BUFFER, attributeArrays[name], this.hint); attributeBuffers[count++] = new Geometry.AttributeBuffer(name, type, buffer, size, semantic); } attributeBuffers.length = count; if (isFacesDirty) { if (!indicesBuffer) { indicesBuffer = new Geometry.IndicesBuffer(_gl.createBuffer()); chunk.indicesBuffer = indicesBuffer; } indicesBuffer.count = indicesArray.length; _gl.bindBuffer(_gl.ELEMENT_ARRAY_BUFFER, indicesBuffer.buffer); _gl.bufferData(_gl.ELEMENT_ARRAY_BUFFER, indicesArray, this.hint); } } }, generateVertexNormals: function () { var faces = this.faces; var len = faces.length; var positions = this.attributes.position.value; var normals = this.attributes.normal.value; var normal = vec3.create(); var v21 = vec3.create(), v32 = vec3.create(); for (var i = 0; i < normals.length; i++) { vec3.set(normals[i], 0, 0, 0); } for (var i = normals.length; i < positions.length; i++) { normals[i] = [ 0, 0, 0 ]; } for (var f = 0; f < len; f++) { var face = faces[f]; var i1 = face[0]; var i2 = face[1]; var i3 = face[2]; var p1 = positions[i1]; var p2 = positions[i2]; var p3 = positions[i3]; vec3.sub(v21, p1, p2); vec3.sub(v32, p2, p3); vec3.cross(normal, v21, v32); vec3.add(normals[i1], normals[i1], normal); vec3.add(normals[i2], normals[i2], normal); vec3.add(normals[i3], normals[i3], normal); } for (var i = 0; i < normals.length; i++) { vec3.normalize(normals[i], normals[i]); } }, generateFaceNormals: function () { if (!this.isUniqueVertex()) { this.generateUniqueVertex(); } var faces = this.faces; var len = faces.length; var positions = this.attributes.position.value; var normals = this.attributes.normal.value; var normal = vec3.create(); var v21 = vec3.create(), v32 = vec3.create(); var isCopy = normals.length === positions.length; for (var i = 0; i < len; i++) { var face = faces[i]; var i1 = face[0]; var i2 = face[1]; var i3 = face[2]; var p1 = positions[i1]; var p2 = positions[i2]; var p3 = positions[i3]; vec3.sub(v21, p1, p2); vec3.sub(v32, p2, p3); vec3.cross(normal, v21, v32); if (isCopy) { vec3.copy(normals[i1], normal); vec3.copy(normals[i2], normal); vec3.copy(normals[i3], normal); } else { normals[i1] = normals[i2] = normals[i3] = arrSlice.call(normal); } } }, generateTangents: function () { var texcoords = this.attributes.texcoord0.value; var positions = this.attributes.position.value; var tangents = this.attributes.tangent.value; var normals = this.attributes.normal.value; var tan1 = []; var tan2 = []; var nVertex = this.getVertexNumber(); for (var i = 0; i < nVertex; i++) { tan1[i] = [ 0, 0, 0 ]; tan2[i] = [ 0, 0, 0 ]; } var sdir = [ 0, 0, 0 ]; var tdir = [ 0, 0, 0 ]; for (var i = 0; i < this.faces.length; i++) { var face = this.faces[i], i1 = face[0], i2 = face[1], i3 = face[2], st1 = texcoords[i1], st2 = texcoords[i2], st3 = texcoords[i3], p1 = positions[i1], p2 = positions[i2], p3 = positions[i3]; var x1 = p2[0] - p1[0], x2 = p3[0] - p1[0], y1 = p2[1] - p1[1], y2 = p3[1] - p1[1], z1 = p2[2] - p1[2], z2 = p3[2] - p1[2]; var s1 = st2[0] - st1[0], s2 = st3[0] - st1[0], t1 = st2[1] - st1[1], t2 = st3[1] - st1[1]; var r = 1 / (s1 * t2 - t1 * s2); sdir[0] = (t2 * x1 - t1 * x2) * r; sdir[1] = (t2 * y1 - t1 * y2) * r; sdir[2] = (t2 * z1 - t1 * z2) * r; tdir[0] = (s1 * x2 - s2 * x1) * r; tdir[1] = (s1 * y2 - s2 * y1) * r; tdir[2] = (s1 * z2 - s2 * z1) * r; vec3.add(tan1[i1], tan1[i1], sdir); vec3.add(tan1[i2], tan1[i2], sdir); vec3.add(tan1[i3], tan1[i3], sdir); vec3.add(tan2[i1], tan2[i1], tdir); vec3.add(tan2[i2], tan2[i2], tdir); vec3.add(tan2[i3], tan2[i3], tdir); } var tmp = [ 0, 0, 0, 0 ]; var nCrossT = [ 0, 0, 0 ]; for (var i = 0; i < nVertex; i++) { var n = normals[i]; var t = tan1[i]; vec3.scale(tmp, n, vec3.dot(n, t)); vec3.sub(tmp, t, tmp); vec3.normalize(tmp, tmp); vec3.cross(nCrossT, n, t); tmp[3] = vec3.dot(nCrossT, tan2[i]) < 0 ? -1 : 1; tangents[i] = tmp.slice(); } }, isUniqueVertex: function () { if (this.isUseFace()) { return this.getVertexNumber() === this.faces.length * 3; } else { return true; } }, generateUniqueVertex: function () { var vertexUseCount = []; for (var i = 0; i < this.getVertexNumber(); i++) { vertexUseCount[i] = 0; } var cursor = this.getVertexNumber(); var attributes = this.getEnabledAttributes(); var faces = this.faces; var attributeNameList = Object.keys(attributes); for (var i = 0; i < faces.length; i++) { var face = faces[i]; for (var j = 0; j < 3; j++) { var ii = face[j]; if (vertexUseCount[ii] > 0) { for (var a = 0; a < attributeNameList.length; a++) { var name = attributeNameList[a]; var array = attributes[name].value; var size = attributes[name].size; if (size === 1) { array.push(array[ii]); } else { array.push(arrSlice.call(array[ii])); } } face[j] = cursor; cursor++; } vertexUseCount[ii]++; } } this.dirty(); }, generateBarycentric: function () { var a = [ 1, 0, 0 ]; var b = [ 0, 0, 1 ]; var c = [ 0, 1, 0 ]; return function () { if (!this.isUniqueVertex()) { this.generateUniqueVertex(); } var array = this.attributes.barycentric.value; if (array.length == this.faces.length * 3) { return; } var i1, i2, i3, face; for (var i = 0; i < this.faces.length; i++) { face = this.faces[i]; i1 = face[0]; i2 = face[1]; i3 = face[2]; array[i1] = a; array[i2] = b; array[i3] = c; } }; }(), convertToStatic: function (geometry, useUintExtension) { this._updateAttributesAndIndicesArrays(this.getEnabledAttributes(), true, useUintExtension); if (this._arrayChunks.length > 1) { console.warn('Large geometry will discard chunks when convert to StaticGeometry'); } else if (this._arrayChunks.length === 0) { return geometry; } var chunk = this._arrayChunks[0]; var attributes = this.getEnabledAttributes(); for (var name in attributes) { var attrib = attributes[name]; var geoAttrib = geometry.attributes[name]; if (!geoAttrib) { geoAttrib = geometry.attributes[name] = { type: attrib.type, size: attrib.size, value: null }; if (attrib.semantic) { geoAttrib.semantic = attrib.semantic; } } geoAttrib.value = chunk.attributeArrays[name]; } geometry.faces = chunk.indicesArray; if (this.boundingBox) { geometry.boundingBox = new BoundingBox(); geometry.boundingBox.min.copy(this.boundingBox.min); geometry.boundingBox.max.copy(this.boundingBox.max); } return geometry; }, applyTransform: function (matrix) { if (this.boundingBox) { this.boundingBox.applyTransform(matrix); } var positions = this.attributes.position.value; var normals = this.attributes.normal.value; var tangents = this.attributes.tangent.value; matrix = matrix._array; for (var i = 0; i < positions.length; i++) { vec3.transformMat4(positions[i], positions[i], matrix); } var inverseTransposeMatrix = mat4.create(); mat4.invert(inverseTransposeMatrix, matrix); mat4.transpose(inverseTransposeMatrix, inverseTransposeMatrix); for (var i = 0; i < normals.length; i++) { vec3.transformMat4(normals[i], normals[i], inverseTransposeMatrix); } for (var i = 0; i < tangents.length; i++) { vec3.transformMat4(tangents[i], tangents[i], inverseTransposeMatrix); } }, dispose: function (_gl) { this._cache.use(_gl.__GLID__); var chunks = this._cache.get('chunks'); if (chunks) { for (var c = 0; c < chunks.length; c++) { var chunk = chunks[c]; for (var k = 0; k < chunk.attributeBuffers.length; k++) { var attribs = chunk.attributeBuffers[k]; _gl.deleteBuffer(attribs.buffer); } } } this._cache.deleteContext(_gl.__GLID__); } }); return DynamicGeometry; });define('qtek/Geometry', [ 'require', './core/Base', './core/glenum', './core/Cache', './dep/glmatrix' ], function (require) { 'use strict'; var Base = require('./core/Base'); var glenum = require('./core/glenum'); var Cache = require('./core/Cache'); var glmatrix = require('./dep/glmatrix'); var vec2 = glmatrix.vec2; var vec3 = glmatrix.vec3; var vec4 = glmatrix.vec4; function Attribute(name, type, size, semantic, isDynamic) { this.name = name; this.type = type; this.size = size; if (semantic) { this.semantic = semantic; } if (isDynamic) { this._isDynamic = true; this.value = []; } else { this._isDynamic = false; this.value = null; } switch (size) { case 1: this.get = function (idx) { return this.value[idx]; }; this.set = function (idx, value) { this.value[idx] = value; }; break; case 2: if (isDynamic) { this.get = function (idx, out) { out = out._array || out; var item = this.value[idx]; if (item) { vec2.copy(out, item); } return out; }; this.set = function (idx, val) { val = val._array || val; var item = this.value[idx]; if (!item) { item = this.value[idx] = vec2.create(); } vec2.copy(item, val); }; } else { this.get = function (idx, out) { out = out._array || out; out[0] = this.value[idx * 2]; out[1] = this.value[idx * 2 + 1]; return out; }; this.set = function (idx, val) { val = val._array || val; this.value[idx * 2] = val[0]; this.value[idx * 2 + 1] = val[1]; }; } break; case 3: if (isDynamic) { this.get = function (idx, out) { out = out._array || out; var item = this.value[idx]; if (item) { vec3.copy(out, item); } return out; }; this.set = function (idx, val) { val = val._array || val; var item = this.value[idx]; if (!item) { item = this.value[idx] = vec3.create(); } vec3.copy(item, val); }; } else { this.get = function (idx, out) { out = out._array || out; out[0] = this.value[idx * 3]; out[1] = this.value[idx * 3 + 1]; out[2] = this.value[idx * 3 + 2]; return out; }; this.set = function (idx, val) { val = val._array || val; this.value[idx * 3] = val[0]; this.value[idx * 3 + 1] = val[1]; this.value[idx * 3 + 2] = val[2]; }; } break; case 4: if (isDynamic) { this.get = function (idx, out) { out = out._array || out; var item = this.value[idx]; if (item) { vec4.copy(out, item); } return out; }; this.set = function (idx, val) { val = val._array || val; var item = this.value[idx]; if (!item) { item = this.value[idx] = vec4.create(); } vec4.copy(item, val); }; } else { this.get = function (idx, out) { out = out._array || out; out[0] = this.value[idx * 4]; out[1] = this.value[idx * 4 + 1]; out[2] = this.value[idx * 4 + 2]; out[3] = this.value[idx * 4 + 3]; return out; }; this.set = function (idx, val) { val = val._array || val; this.value[idx * 4] = val[0]; this.value[idx * 4 + 1] = val[1]; this.value[idx * 4 + 2] = val[2]; this.value[idx * 4 + 3] = val[3]; }; } break; } } Attribute.prototype.init = function (nVertex) { if (!this._isDynamic) { if (!this.value || this.value.length != nVertex * this.size) { var ArrayConstructor; switch (this.type) { case 'byte': ArrayConstructor = Int8Array; break; case 'ubyte': ArrayConstructor = Uint8Array; break; case 'short': ArrayConstructor = Int16Array; break; case 'ushort': ArrayConstructor = Uint16Array; break; default: ArrayConstructor = Float32Array; break; } this.value = new ArrayConstructor(nVertex * this.size); } } else { console.warn('Dynamic geometry not support init method'); } }; Attribute.prototype.clone = function (copyValue) { var ret = new Attribute(this.name, this.type, this.size, this.semantic, this._isDynamic); if (copyValue) { console.warn('todo'); } return ret; }; function AttributeBuffer(name, type, buffer, size, semantic) { this.name = name; this.type = type; this.buffer = buffer; this.size = size; this.semantic = semantic; this.symbol = ''; } function IndicesBuffer(buffer) { this.buffer = buffer; this.count = 0; } function notImplementedWarn() { console.warn('Geometry doesn\'t implement this method, use DynamicGeometry or StaticGeometry instead'); } var Geometry = Base.derive({ boundingBox: null, attributes: {}, faces: null, dynamic: false, useFace: true }, function () { this._cache = new Cache(); this._attributeList = Object.keys(this.attributes); }, { mainAttribute: 'position', dirty: notImplementedWarn, createAttribute: notImplementedWarn, removeAttribute: notImplementedWarn, getVertexNumber: notImplementedWarn, getFaceNumber: notImplementedWarn, getFace: notImplementedWarn, isUseFace: notImplementedWarn, getEnabledAttributes: notImplementedWarn, getBufferChunks: notImplementedWarn, generateVertexNormals: notImplementedWarn, generateFaceNormals: notImplementedWarn, isUniqueVertex: notImplementedWarn, generateUniqueVertex: notImplementedWarn, generateTangents: notImplementedWarn, generateBarycentric: notImplementedWarn, applyTransform: notImplementedWarn, dispose: notImplementedWarn }); Geometry.STATIC_DRAW = glenum.STATIC_DRAW; Geometry.DYNAMIC_DRAW = glenum.DYNAMIC_DRAW; Geometry.STREAM_DRAW = glenum.STREAM_DRAW; Geometry.AttributeBuffer = AttributeBuffer; Geometry.IndicesBuffer = IndicesBuffer; Geometry.Attribute = Attribute; return Geometry; });define('qtek/math/BoundingBox', [ 'require', './Vector3', '../dep/glmatrix' ], function (require) { 'use strict'; var Vector3 = require('./Vector3'); var glMatrix = require('../dep/glmatrix'); var vec3 = glMatrix.vec3; var vec3TransformMat4 = vec3.transformMat4; var vec3Copy = vec3.copy; var vec3Set = vec3.set; var BoundingBox = function (min, max) { this.min = min || new Vector3(Infinity, Infinity, Infinity); this.max = max || new Vector3(-Infinity, -Infinity, -Infinity); var vertices = []; for (var i = 0; i < 8; i++) { vertices[i] = vec3.fromValues(0, 0, 0); } this.vertices = vertices; }; BoundingBox.prototype = { constructor: BoundingBox, updateFromVertices: function (vertices) { if (vertices.length > 0) { var _min = this.min._array; var _max = this.max._array; vec3Copy(_min, vertices[0]); vec3Copy(_max, vertices[0]); for (var i = 1; i < vertices.length; i++) { var vertex = vertices[i]; if (vertex[0] < _min[0]) { _min[0] = vertex[0]; } if (vertex[1] < _min[1]) { _min[1] = vertex[1]; } if (vertex[2] < _min[2]) { _min[2] = vertex[2]; } if (vertex[0] > _max[0]) { _max[0] = vertex[0]; } if (vertex[1] > _max[1]) { _max[1] = vertex[1]; } if (vertex[2] > _max[2]) { _max[2] = vertex[2]; } } this.min._dirty = true; this.max._dirty = true; } }, union: function (bbox) { vec3.min(this.min._array, this.min._array, bbox.min._array); vec3.max(this.max._array, this.max._array, bbox.max._array); this.min._dirty = true; this.max._dirty = true; }, intersectBoundingBox: function (bbox) { var _min = this.min._array; var _max = this.max._array; var _min2 = bbox.min._array; var _max2 = bbox.max._array; return !(_min[0] > _max2[0] || _min[1] > _max2[1] || _min[2] > _max2[1] || _max[0] < _min2[0] || _max[1] < _min2[1] || _max[2] < _min2[2]); }, applyTransform: function (matrix) { if (this.min._dirty || this.max._dirty) { this.updateVertices(); this.min._dirty = false; this.max._dirty = false; } var m4 = matrix._array; var _min = this.min._array; var _max = this.max._array; var vertices = this.vertices; var v = vertices[0]; vec3TransformMat4(v, v, m4); vec3Copy(_min, v); vec3Copy(_max, v); for (var i = 1; i < 8; i++) { v = vertices[i]; vec3TransformMat4(v, v, m4); if (v[0] < _min[0]) { _min[0] = v[0]; } if (v[1] < _min[1]) { _min[1] = v[1]; } if (v[2] < _min[2]) { _min[2] = v[2]; } if (v[0] > _max[0]) { _max[0] = v[0]; } if (v[1] > _max[1]) { _max[1] = v[1]; } if (v[2] > _max[2]) { _max[2] = v[2]; } } this.min._dirty = true; this.max._dirty = true; }, applyProjection: function (matrix) { if (this.min._dirty || this.max._dirty) { this.updateVertices(); this.min._dirty = false; this.max._dirty = false; } var m = matrix._array; var v1 = this.vertices[0]; var v2 = this.vertices[3]; var v3 = this.vertices[7]; var _min = this.min._array; var _max = this.max._array; if (m[15] === 1) { _min[0] = m[0] * v1[0] + m[12]; _min[1] = m[5] * v1[1] + m[13]; _max[2] = m[10] * v1[2] + m[14]; _max[0] = m[0] * v3[0] + m[12]; _max[1] = m[5] * v3[1] + m[13]; _min[2] = m[10] * v3[2] + m[14]; } else { var w = -1 / v1[2]; _min[0] = m[0] * v1[0] * w; _min[1] = m[5] * v1[1] * w; _max[2] = (m[10] * v1[2] + m[14]) * w; w = -1 / v2[2]; _max[0] = m[0] * v2[0] * w; _max[1] = m[5] * v2[1] * w; w = -1 / v3[2]; _min[2] = (m[10] * v3[2] + m[14]) * w; } this.min._dirty = true; this.max._dirty = true; }, updateVertices: function () { var min = this.min._array; var max = this.max._array; var vertices = this.vertices; vec3Set(vertices[0], min[0], min[1], min[2]); vec3Set(vertices[1], min[0], max[1], min[2]); vec3Set(vertices[2], max[0], min[1], min[2]); vec3Set(vertices[3], max[0], max[1], min[2]); vec3Set(vertices[4], min[0], min[1], max[2]); vec3Set(vertices[5], min[0], max[1], max[2]); vec3Set(vertices[6], max[0], min[1], max[2]); vec3Set(vertices[7], max[0], max[1], max[2]); }, copy: function (bbox) { vec3Copy(this.min._array, bbox.min._array); vec3Copy(this.max._array, bbox.max._array); this.min._dirty = true; this.max._dirty = true; }, clone: function () { var boundingBox = new BoundingBox(); boundingBox.copy(this); return boundingBox; } }; return BoundingBox; });define('qtek/Texture', [ 'require', './core/Base', './core/glenum', './core/Cache' ], function (require) { 'use strict'; var Base = require('./core/Base'); var glenum = require('./core/glenum'); var Cache = require('./core/Cache'); var Texture = Base.derive({ width: 512, height: 512, type: glenum.UNSIGNED_BYTE, format: glenum.RGBA, wrapS: glenum.CLAMP_TO_EDGE, wrapT: glenum.CLAMP_TO_EDGE, minFilter: glenum.LINEAR_MIPMAP_LINEAR, magFilter: glenum.LINEAR, useMipmap: true, anisotropic: 1, flipY: true, unpackAlignment: 4, premultiplyAlpha: false, dynamic: false, NPOT: false }, function () { this._cache = new Cache(); }, { getWebGLTexture: function (_gl) { var cache = this._cache; cache.use(_gl.__GLID__); if (cache.miss('webgl_texture')) { cache.put('webgl_texture', _gl.createTexture()); } if (this.dynamic) { this.update(_gl); } else if (cache.isDirty()) { this.update(_gl); cache.fresh(); } return cache.get('webgl_texture'); }, bind: function () { }, unbind: function () { }, dirty: function () { this._cache.dirtyAll(); }, update: function (_gl) { }, beforeUpdate: function (_gl) { _gl.pixelStorei(_gl.UNPACK_FLIP_Y_WEBGL, this.flipY); _gl.pixelStorei(_gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, this.premultiplyAlpha); _gl.pixelStorei(_gl.UNPACK_ALIGNMENT, this.unpackAlignment); this.fallBack(); }, fallBack: function () { var isPowerOfTwo = this.isPowerOfTwo(); if (this.format === glenum.DEPTH_COMPONENT) { this.useMipmap = false; } if (!isPowerOfTwo || !this.useMipmap) { this.NPOT = true; this._minFilterOriginal = this.minFilter; this._magFilterOriginal = this.magFilter; this._wrapSOriginal = this.wrapS; this._wrapTOriginal = this.wrapT; if (this.minFilter == glenum.NEAREST_MIPMAP_NEAREST || this.minFilter == glenum.NEAREST_MIPMAP_LINEAR) { this.minFilter = glenum.NEAREST; } else if (this.minFilter == glenum.LINEAR_MIPMAP_LINEAR || this.minFilter == glenum.LINEAR_MIPMAP_NEAREST) { this.minFilter = glenum.LINEAR; } this.wrapS = glenum.CLAMP_TO_EDGE; this.wrapT = glenum.CLAMP_TO_EDGE; } else { this.NPOT = false; if (this._minFilterOriginal) { this.minFilter = this._minFilterOriginal; } if (this._magFilterOriginal) { this.magFilter = this._magFilterOriginal; } if (this._wrapSOriginal) { this.wrapS = this._wrapSOriginal; } if (this._wrapTOriginal) { this.wrapT = this._wrapTOriginal; } } }, nextHighestPowerOfTwo: function (x) { --x; for (var i = 1; i < 32; i <<= 1) { x = x | x >> i; } return x + 1; }, dispose: function (_gl) { var cache = this._cache; cache.use(_gl.__GLID__); var webglTexture = cache.get('webgl_texture'); if (webglTexture) { _gl.deleteTexture(webglTexture); } cache.deleteContext(_gl.__GLID__); }, isRenderable: function () { }, isPowerOfTwo: function () { } }); Texture.BYTE = glenum.BYTE; Texture.UNSIGNED_BYTE = glenum.UNSIGNED_BYTE; Texture.SHORT = glenum.SHORT; Texture.UNSIGNED_SHORT = glenum.UNSIGNED_SHORT; Texture.INT = glenum.INT; Texture.UNSIGNED_INT = glenum.UNSIGNED_INT; Texture.FLOAT = glenum.FLOAT; Texture.HALF_FLOAT = 36193; Texture.DEPTH_COMPONENT = glenum.DEPTH_COMPONENT; Texture.ALPHA = glenum.ALPHA; Texture.RGB = glenum.RGB; Texture.RGBA = glenum.RGBA; Texture.LUMINANCE = glenum.LUMINANCE; Texture.LUMINANCE_ALPHA = glenum.LUMINANCE_ALPHA; Texture.COMPRESSED_RGB_S3TC_DXT1_EXT = 33776; Texture.COMPRESSED_RGBA_S3TC_DXT1_EXT = 33777; Texture.COMPRESSED_RGBA_S3TC_DXT3_EXT = 33778; Texture.COMPRESSED_RGBA_S3TC_DXT5_EXT = 33779; Texture.NEAREST = glenum.NEAREST; Texture.LINEAR = glenum.LINEAR; Texture.NEAREST_MIPMAP_NEAREST = glenum.NEAREST_MIPMAP_NEAREST; Texture.LINEAR_MIPMAP_NEAREST = glenum.LINEAR_MIPMAP_NEAREST; Texture.NEAREST_MIPMAP_LINEAR = glenum.NEAREST_MIPMAP_LINEAR; Texture.LINEAR_MIPMAP_LINEAR = glenum.LINEAR_MIPMAP_LINEAR; Texture.TEXTURE_MAG_FILTER = glenum.TEXTURE_MAG_FILTER; Texture.TEXTURE_MIN_FILTER = glenum.TEXTURE_MIN_FILTER; Texture.REPEAT = glenum.REPEAT; Texture.CLAMP_TO_EDGE = glenum.CLAMP_TO_EDGE; Texture.MIRRORED_REPEAT = glenum.MIRRORED_REPEAT; return Texture; });define('echarts-x/component/base3d', [ 'require', 'echarts/component/base', '../core/Layer3D', 'zrender/tool/util' ], function (require) { 'use strict'; var ComponentBase = require('echarts/component/base'); var Layer3D = require('../core/Layer3D'); var zrUtil = require('zrender/tool/util'); var Base3D = function (ecTheme, messageCenter, zr, option, myChart) { ComponentBase.call(this, ecTheme, messageCenter, zr, option, myChart); var zlevel = this.getZlevelBase(); this.baseLayer = new Layer3D(zlevel, this.zr.painter); this.zr.painter.insertLayer(zlevel, this.baseLayer); this.zr.animation.bind('frame', this.onframe, this); }; Base3D.prototype = { constructor: Base3D, onframe: function () { }, dispose: function () { this.zr.animation.unbind('frame', this.onframe); } }; zrUtil.inherits(Base3D, ComponentBase); return Base3D; });define('echarts-x/util/color', [], function () { var kCSSColorTable = { 'transparent': [ 0, 0, 0, 0 ], 'aliceblue': [ 240, 248, 255, 1 ], 'antiquewhite': [ 250, 235, 215, 1 ], 'aqua': [ 0, 255, 255, 1 ], 'aquamarine': [ 127, 255, 212, 1 ], 'azure': [ 240, 255, 255, 1 ], 'beige': [ 245, 245, 220, 1 ], 'bisque': [ 255, 228, 196, 1 ], 'black': [ 0, 0, 0, 1 ], 'blanchedalmond': [ 255, 235, 205, 1 ], 'blue': [ 0, 0, 255, 1 ], 'blueviolet': [ 138, 43, 226, 1 ], 'brown': [ 165, 42, 42, 1 ], 'burlywood': [ 222, 184, 135, 1 ], 'cadetblue': [ 95, 158, 160, 1 ], 'chartreuse': [ 127, 255, 0, 1 ], 'chocolate': [ 210, 105, 30, 1 ], 'coral': [ 255, 127, 80, 1 ], 'cornflowerblue': [ 100, 149, 237, 1 ], 'cornsilk': [ 255, 248, 220, 1 ], 'crimson': [ 220, 20, 60, 1 ], 'cyan': [ 0, 255, 255, 1 ], 'darkblue': [ 0, 0, 139, 1 ], 'darkcyan': [ 0, 139, 139, 1 ], 'darkgoldenrod': [ 184, 134, 11, 1 ], 'darkgray': [ 169, 169, 169, 1 ], 'darkgreen': [ 0, 100, 0, 1 ], 'darkgrey': [ 169, 169, 169, 1 ], 'darkkhaki': [ 189, 183, 107, 1 ], 'darkmagenta': [ 139, 0, 139, 1 ], 'darkolivegreen': [ 85, 107, 47, 1 ], 'darkorange': [ 255, 140, 0, 1 ], 'darkorchid': [ 153, 50, 204, 1 ], 'darkred': [ 139, 0, 0, 1 ], 'darksalmon': [ 233, 150, 122, 1 ], 'darkseagreen': [ 143, 188, 143, 1 ], 'darkslateblue': [ 72, 61, 139, 1 ], 'darkslategray': [ 47, 79, 79, 1 ], 'darkslategrey': [ 47, 79, 79, 1 ], 'darkturquoise': [ 0, 206, 209, 1 ], 'darkviolet': [ 148, 0, 211, 1 ], 'deeppink': [ 255, 20, 147, 1 ], 'deepskyblue': [ 0, 191, 255, 1 ], 'dimgray': [ 105, 105, 105, 1 ], 'dimgrey': [ 105, 105, 105, 1 ], 'dodgerblue': [ 30, 144, 255, 1 ], 'firebrick': [ 178, 34, 34, 1 ], 'floralwhite': [ 255, 250, 240, 1 ], 'forestgreen': [ 34, 139, 34, 1 ], 'fuchsia': [ 255, 0, 255, 1 ], 'gainsboro': [ 220, 220, 220, 1 ], 'ghostwhite': [ 248, 248, 255, 1 ], 'gold': [ 255, 215, 0, 1 ], 'goldenrod': [ 218, 165, 32, 1 ], 'gray': [ 128, 128, 128, 1 ], 'green': [ 0, 128, 0, 1 ], 'greenyellow': [ 173, 255, 47, 1 ], 'grey': [ 128, 128, 128, 1 ], 'honeydew': [ 240, 255, 240, 1 ], 'hotpink': [ 255, 105, 180, 1 ], 'indianred': [ 205, 92, 92, 1 ], 'indigo': [ 75, 0, 130, 1 ], 'ivory': [ 255, 255, 240, 1 ], 'khaki': [ 240, 230, 140, 1 ], 'lavender': [ 230, 230, 250, 1 ], 'lavenderblush': [ 255, 240, 245, 1 ], 'lawngreen': [ 124, 252, 0, 1 ], 'lemonchiffon': [ 255, 250, 205, 1 ], 'lightblue': [ 173, 216, 230, 1 ], 'lightcoral': [ 240, 128, 128, 1 ], 'lightcyan': [ 224, 255, 255, 1 ], 'lightgoldenrodyellow': [ 250, 250, 210, 1 ], 'lightgray': [ 211, 211, 211, 1 ], 'lightgreen': [ 144, 238, 144, 1 ], 'lightgrey': [ 211, 211, 211, 1 ], 'lightpink': [ 255, 182, 193, 1 ], 'lightsalmon': [ 255, 160, 122, 1 ], 'lightseagreen': [ 32, 178, 170, 1 ], 'lightskyblue': [ 135, 206, 250, 1 ], 'lightslategray': [ 119, 136, 153, 1 ], 'lightslategrey': [ 119, 136, 153, 1 ], 'lightsteelblue': [ 176, 196, 222, 1 ], 'lightyellow': [ 255, 255, 224, 1 ], 'lime': [ 0, 255, 0, 1 ], 'limegreen': [ 50, 205, 50, 1 ], 'linen': [ 250, 240, 230, 1 ], 'magenta': [ 255, 0, 255, 1 ], 'maroon': [ 128, 0, 0, 1 ], 'mediumaquamarine': [ 102, 205, 170, 1 ], 'mediumblue': [ 0, 0, 205, 1 ], 'mediumorchid': [ 186, 85, 211, 1 ], 'mediumpurple': [ 147, 112, 219, 1 ], 'mediumseagreen': [ 60, 179, 113, 1 ], 'mediumslateblue': [ 123, 104, 238, 1 ], 'mediumspringgreen': [ 0, 250, 154, 1 ], 'mediumturquoise': [ 72, 209, 204, 1 ], 'mediumvioletred': [ 199, 21, 133, 1 ], 'midnightblue': [ 25, 25, 112, 1 ], 'mintcream': [ 245, 255, 250, 1 ], 'mistyrose': [ 255, 228, 225, 1 ], 'moccasin': [ 255, 228, 181, 1 ], 'navajowhite': [ 255, 222, 173, 1 ], 'navy': [ 0, 0, 128, 1 ], 'oldlace': [ 253, 245, 230, 1 ], 'olive': [ 128, 128, 0, 1 ], 'olivedrab': [ 107, 142, 35, 1 ], 'orange': [ 255, 165, 0, 1 ], 'orangered': [ 255, 69, 0, 1 ], 'orchid': [ 218, 112, 214, 1 ], 'palegoldenrod': [ 238, 232, 170, 1 ], 'palegreen': [ 152, 251, 152, 1 ], 'paleturquoise': [ 175, 238, 238, 1 ], 'palevioletred': [ 219, 112, 147, 1 ], 'papayawhip': [ 255, 239, 213, 1 ], 'peachpuff': [ 255, 218, 185, 1 ], 'peru': [ 205, 133, 63, 1 ], 'pink': [ 255, 192, 203, 1 ], 'plum': [ 221, 160, 221, 1 ], 'powderblue': [ 176, 224, 230, 1 ], 'purple': [ 128, 0, 128, 1 ], 'red': [ 255, 0, 0, 1 ], 'rosybrown': [ 188, 143, 143, 1 ], 'royalblue': [ 65, 105, 225, 1 ], 'saddlebrown': [ 139, 69, 19, 1 ], 'salmon': [ 250, 128, 114, 1 ], 'sandybrown': [ 244, 164, 96, 1 ], 'seagreen': [ 46, 139, 87, 1 ], 'seashell': [ 255, 245, 238, 1 ], 'sienna': [ 160, 82, 45, 1 ], 'silver': [ 192, 192, 192, 1 ], 'skyblue': [ 135, 206, 235, 1 ], 'slateblue': [ 106, 90, 205, 1 ], 'slategray': [ 112, 128, 144, 1 ], 'slategrey': [ 112, 128, 144, 1 ], 'snow': [ 255, 250, 250, 1 ], 'springgreen': [ 0, 255, 127, 1 ], 'steelblue': [ 70, 130, 180, 1 ], 'tan': [ 210, 180, 140, 1 ], 'teal': [ 0, 128, 128, 1 ], 'thistle': [ 216, 191, 216, 1 ], 'tomato': [ 255, 99, 71, 1 ], 'turquoise': [ 64, 224, 208, 1 ], 'violet': [ 238, 130, 238, 1 ], 'wheat': [ 245, 222, 179, 1 ], 'white': [ 255, 255, 255, 1 ], 'whitesmoke': [ 245, 245, 245, 1 ], 'yellow': [ 255, 255, 0, 1 ], 'yellowgreen': [ 154, 205, 50, 1 ] }; function clamp_css_byte(i) { i = Math.round(i); return i < 0 ? 0 : i > 255 ? 255 : i; } function clamp_css_float(f) { return f < 0 ? 0 : f > 1 ? 1 : f; } function parse_css_int(str) { if (str[str.length - 1] === '%') return clamp_css_byte(parseFloat(str) / 100 * 255); return clamp_css_byte(parseInt(str)); } function parse_css_float(str) { if (str[str.length - 1] === '%') return clamp_css_float(parseFloat(str) / 100); return clamp_css_float(parseFloat(str)); } function css_hue_to_rgb(m1, m2, h) { if (h < 0) h += 1; else if (h > 1) h -= 1; if (h * 6 < 1) return m1 + (m2 - m1) * h * 6; if (h * 2 < 1) return m2; if (h * 3 < 2) return m1 + (m2 - m1) * (2 / 3 - h) * 6; return m1; } function parse(css_str) { var str = css_str.replace(/ /g, '').toLowerCase(); if (str in kCSSColorTable) return kCSSColorTable[str].slice(); if (str[0] === '#') { if (str.length === 4) { var iv = parseInt(str.substr(1), 16); if (!(iv >= 0 && iv <= 4095)) return null; return [ (iv & 3840) >> 4 | (iv & 3840) >> 8, iv & 240 | (iv & 240) >> 4, iv & 15 | (iv & 15) << 4, 1 ]; } else if (str.length === 7) { var iv = parseInt(str.substr(1), 16); if (!(iv >= 0 && iv <= 16777215)) return null; return [ (iv & 16711680) >> 16, (iv & 65280) >> 8, iv & 255, 1 ]; } return null; } var op = str.indexOf('('), ep = str.indexOf(')'); if (op !== -1 && ep + 1 === str.length) { var fname = str.substr(0, op); var params = str.substr(op + 1, ep - (op + 1)).split(','); var alpha = 1; switch (fname) { case 'rgba': if (params.length !== 4) return null; alpha = parse_css_float(params.pop()); case 'rgb': if (params.length !== 3) return null; return [ parse_css_int(params[0]), parse_css_int(params[1]), parse_css_int(params[2]), alpha ]; case 'hsla': if (params.length !== 4) return null; alpha = parse_css_float(params.pop()); case 'hsl': if (params.length !== 3) return null; var h = (parseFloat(params[0]) % 360 + 360) % 360 / 360; var s = parse_css_float(params[1]); var l = parse_css_float(params[2]); var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s; var m1 = l * 2 - m2; return [ clamp_css_byte(css_hue_to_rgb(m1, m2, h + 1 / 3) * 255), clamp_css_byte(css_hue_to_rgb(m1, m2, h) * 255), clamp_css_byte(css_hue_to_rgb(m1, m2, h - 1 / 3) * 255), alpha ]; default: return null; } } return null; } return { parse: parse }; });define('echarts-x/entity/marker/MarkLine', [ 'require', 'zrender/tool/util', './Base', 'qtek/Renderable', 'qtek/Material', 'qtek/Shader', 'qtek/Node', '../../util/geometry/Lines', '../../util/geometry/CurveAnimatingPoints', 'qtek/Texture2D', 'qtek/math/Vector3' ], function (require) { var zrUtil = require('zrender/tool/util'); var MarkBase = require('./Base'); var Renderable = require('qtek/Renderable'); var Material = require('qtek/Material'); var Shader = require('qtek/Shader'); var Node = require('qtek/Node'); var LinesGeometry = require('../../util/geometry/Lines'); var CurveAnimatingPointsGeometry = require('../../util/geometry/CurveAnimatingPoints'); var Texture2D = require('qtek/Texture2D'); var Vector3 = require('qtek/math/Vector3'); var MarkLine = function (chart) { MarkBase.call(this, chart); this._sceneNode = new Node(); this._markLineRenderable = null; this._curveAnimatingPointsRenderable = null; this._elapsedTime = 0; }; MarkLine.prototype = { constructor: MarkLine, _createMarkLineRenderable: function () { var material = new Material({ shader: new Shader({ vertex: Shader.source('ecx.albedo.vertex'), fragment: Shader.source('ecx.albedo.fragment') }), transparent: true, depthMask: false }); material.shader.define('both', 'VERTEX_COLOR'); this._markLineRenderable = new Renderable({ geometry: new LinesGeometry(), material: material, mode: Renderable.LINES }); this._sceneNode.add(this._markLineRenderable); }, _createCurveAnimatingPointsRenderable: function () { var material = new Material({ shader: new Shader({ vertex: Shader.source('ecx.curveAnimatingPoints.vertex'), fragment: Shader.source('ecx.curveAnimatingPoints.fragment') }) }); this._curveAnimatingPointsRenderable = new Renderable({ material: material, mode: Renderable.POINTS, geometry: new CurveAnimatingPointsGeometry() }); this._sceneNode.add(this._curveAnimatingPointsRenderable); }, setSeries: function (serie, seriesIndex) { if (!serie.markLine || !serie.markLine.data) { return; } this.seriesIndex = seriesIndex; var chart = this.chart; var legend = chart.component.legend; var zr = chart.zr; var markLine = serie.markLine; var devicePixelRatio = window.devicePixelRatio || 1; if (!this._markLineRenderable) { this._createMarkLineRenderable(); } var width = chart.query(markLine, 'itemStyle.normal.lineStyle.width'); var opacity = chart.query(markLine, 'itemStyle.normal.lineStyle.opacity'); var lineRenderable = this._markLineRenderable; lineRenderable.lineWidth = width * devicePixelRatio; lineRenderable.material.set('alpha', opacity); var showMarkLineEffect = chart.query(serie.markLine, 'effect.show'); var pointsRenderable; if (showMarkLineEffect) { var scaleSize = chart.query(markLine, 'effect.scaleSize'); if (!this._curveAnimatingPointsRenderable) { this._createCurveAnimatingPointsRenderable(); } pointsRenderable = this._curveAnimatingPointsRenderable; pointsRenderable.material.set('pointSize', scaleSize * devicePixelRatio); pointsRenderable.geometry.dirty(); } var serieColor; if (legend) { serieColor = legend.getColor(serie.name); } serieColor = chart.query(markLine, 'itemStyle.normal.color'); var serieDefaultColor = chart.zr.getColor(seriesIndex); var dataList = markLine.data; for (var i = 0; i < dataList.length; i++) { var p0 = new Vector3(); var p1 = new Vector3(); var p2 = new Vector3(); var p3 = new Vector3(); var dataItem = dataList[i]; var itemColor = chart.query(dataItem, 'itemStyle.normal.color'); var color = itemColor || serieColor || serieDefaultColor; if (typeof color == 'function') { color = color(dataItem); } var colorArr = chart.parseColor(color) || new Float32Array(); chart.getMarkLinePoints(seriesIndex, dataItem, p0, p1, p2, p3); lineRenderable.geometry.addCubicCurve(p0, p1, p2, p3, colorArr); if (showMarkLineEffect) { pointsRenderable.geometry.addPoint(p0, p1, p2, p3, colorArr); } } lineRenderable.geometry.dirty(); }, clear: function () { this._elapsedTime = 0; if (this._markLineRenderable) { this._markLineRenderable.geometry.clearLines(); } if (this._curveAnimatingPointsRenderable) { this._curveAnimatingPointsRenderable.geometry.clearPoints(); } }, getSceneNode: function () { return this._sceneNode; }, onframe: function (deltaTime) { var renderable = this._curveAnimatingPointsRenderable; if (renderable && renderable.geometry.getVertexNumber() > 0) { this._elapsedTime += deltaTime / 1000; var t = this._elapsedTime / 3; t %= 1; renderable.material.set('percent', t); this.chart.zr.refreshNextFrame(); } } }; zrUtil.inherits(MarkLine, MarkBase); return MarkLine; });define('echarts-x/entity/marker/MarkBar', [ 'require', 'zrender/tool/util', './Base', 'qtek/Renderable', 'qtek/Material', 'qtek/Shader', '../../util/geometry/Bars', 'qtek/math/Vector3' ], function (require) { var zrUtil = require('zrender/tool/util'); var MarkBase = require('./Base'); var Renderable = require('qtek/Renderable'); var Material = require('qtek/Material'); var Shader = require('qtek/Shader'); var BarsGeometry = require('../../util/geometry/Bars'); var Vector3 = require('qtek/math/Vector3'); var MarkBar = function (chart) { MarkBase.call(this, chart); this._markBarRenderable = null; }; MarkBar.prototype = { constructor: MarkBar, _createMarkBarRenderable: function () { var material = new Material({ shader: new Shader({ vertex: Shader.source('ecx.albedo.vertex'), fragment: Shader.source('ecx.albedo.fragment') }) }); material.shader.define('both', 'VERTEX_COLOR'); this._markBarRenderable = new Renderable({ geometry: new BarsGeometry(), material: material, ignorePicking: true }); }, setSeries: function (serie, seriesIndex) { if (!serie.markBar || !serie.markBar.data) { return; } var chart = this.chart; var component = chart.component; var legend = component.legend; var dataRange = component.dataRange; if (!this._markBarRenderable) { this._createMarkBarRenderable(); } var dataList = serie.markBar.data; var geometry = this._markBarRenderable.geometry; var serieColor; if (legend) { serieColor = legend.getColor(serie.name); } serieColor = chart.query(serie.markBar, 'itemStyle.normal.color') || serieColor; var serieDefaultColor = chart.zr.getColor(seriesIndex); var start = new Vector3(); var end = new Vector3(); var normal = new Vector3(); var globalBarSize = serie.markBar.barSize; for (var i = 0; i < dataList.length; i++) { var dataItem = dataList[i]; var value = chart.getDataFromOption(dataItem, null); var dataRangeColor = null; if (dataRange) { dataRangeColor = isNaN(value) ? color : dataRange.getColor(value); if (dataRangeColor == null) { continue; } } var itemColor = chart.query(dataItem, 'itemStyle.normal.color'); var color = itemColor || dataRangeColor || serieColor || serieDefaultColor; if (typeof color == 'function') { color = color(dataItem); } var colorArr = chart.parseColor(color) || new Float32Array(); var barSize = dataItem.barSize != null ? dataItem.barSize : globalBarSize; if (typeof barSize == 'function') { barSize = barSize(dataItem); } chart.getMarkBarPoints(seriesIndex, dataItem, start, end); this._markBarRenderable.geometry.addBar(start, end, barSize, colorArr); } this._markBarRenderable.geometry.dirty(); }, getSceneNode: function () { return this._markBarRenderable; }, clear: function () { if (this._markBarRenderable) { this._markBarRenderable.geometry.clearBars(); } } }; zrUtil.inherits(MarkBar, MarkBase); return MarkBar; });define('echarts-x/entity/marker/MarkPoint', [ 'require', 'zrender/tool/util', './Base', 'qtek/Renderable', 'qtek/Material', 'qtek/Shader', 'qtek/Node', 'qtek/Texture2D', 'qtek/Texture', '../../surface/TextureAtlasSurface', '../../util/geometry/Sprites', '../../util/sprite', 'echarts/util/shape/Icon', 'zrender/shape/Image', 'qtek/math/Matrix4' ], function (require) { var zrUtil = require('zrender/tool/util'); var MarkBase = require('./Base'); var Renderable = require('qtek/Renderable'); var Material = require('qtek/Material'); var Shader = require('qtek/Shader'); var Node = require('qtek/Node'); var Texture2D = require('qtek/Texture2D'); var Texture = require('qtek/Texture'); var TextureAtlasSurface = require('../../surface/TextureAtlasSurface'); var SpritesGeometry = require('../../util/geometry/Sprites'); var spriteUtil = require('../../util/sprite'); var IconShape = require('echarts/util/shape/Icon'); var ImageShape = require('zrender/shape/Image'); var Matrix4 = require('qtek/math/Matrix4'); var MarkPoint = function (chart) { MarkBase.call(this, chart); this._sceneNode = new Node(); this._spritesRenderables = []; this._spritesShader = null; this._textureAtlasList = []; this._spriteSize = 128; }; MarkPoint.prototype = { constructor: MarkPoint, setSeries: function (serie, seriesIndex) { if (!serie.markPoint || !serie.markPoint.data || serie.markPoint.data.length === 0) { return; } this.seriesIndex = seriesIndex; var chart = this.chart; var component = chart.component; var legend = component.legend; var dataRange = component.dataRange; var markPoint = serie.markPoint; var zr = chart.zr; var spriteSize = this._spriteSize; var dataList = markPoint.data; var serieColor; if (legend) { serieColor = legend.getColor(serie.name); } serieColor = chart.query(serie.markBar, 'itemStyle.normal.color') || serieColor; var serieDefaultColor = chart.zr.getColor(seriesIndex); var matrix = new Matrix4(); var atlasSize = Texture.prototype.nextHighestPowerOfTwo(Math.sqrt(dataList.length) * this._spriteSize); atlasSize = Math.min(2048, atlasSize); var textureAtlas = new TextureAtlasSurface(chart.zr, atlasSize, atlasSize); this._textureAtlasList.push(textureAtlas); var spriteRenderable = this._createSpritesRenderable(textureAtlas); for (var i = 0; i < dataList.length; i++) { var dataItem = dataList[i]; var value = chart.getDataFromOption(dataItem, null); var queryTarget = [ dataItem, markPoint ]; var dataRangeColor = null; if (dataRange) { dataRangeColor = isNaN(value) ? color : dataRange.getColor(value); if (dataRangeColor == null) { continue; } } var itemColor = chart.query(dataItem, 'itemStyle.normal.color'); var color = itemColor || dataRangeColor || serieColor || serieDefaultColor; if (typeof color == 'function') { color = color(dataItem); } var symbol = chart.deepQuery(queryTarget, 'symbol'); var symbolSize = chart.deepQuery(queryTarget, 'symbolSize'); var strokeColor = chart.deepQuery(queryTarget, 'itemStyle.normal.borderColor'); var lineWidth = chart.deepQuery(queryTarget, 'itemStyle.normal.borderWidth'); var shape; if (symbol.match(/^image:\/\//)) { shape = new ImageShape({ style: { image: symbol.replace(/^image:\/\//, '') } }); } else { shape = new IconShape({ style: { iconType: symbol, color: color, brushType: 'both', strokeColor: strokeColor, lineWidth: lineWidth / symbolSize * spriteSize } }); } var shapeStyle = shape.style; shapeStyle.x = shapeStyle.y = 0; shapeStyle.width = shapeStyle.height = spriteSize; if (chart.deepQuery(queryTarget, 'itemStyle.normal.label.show')) { shape.style.text = chart.getSerieLabelText(markPoint, dataItem, dataItem.name, 'normal'); shape.style.textPosition = 'inside'; shape.style.textColor = chart.deepQuery(queryTarget, 'itemStyle.normal.label.textStyle.color'); shape.style.textFont = chart.getFont(chart.deepQuery(queryTarget, 'itemStyle.normal.label.textStyle')); } var coords = textureAtlas.addShape(shape, spriteSize, spriteSize); if (!coords) { textureAtlas = new TextureAtlasSurface(chart.zr, atlasSize, atlasSize); this._textureAtlasList.push(textureAtlas); spriteRenderable = this._createSpritesRenderable(textureAtlas); coords = textureAtlas.addShape(shape, spriteSize, spriteSize); } chart.getMarkPointTransform(seriesIndex, dataItem, matrix); spriteRenderable.geometry.addSprite(matrix, coords); } for (var i = 0; i < this._textureAtlasList.length; i++) { this._textureAtlasList[i].refresh(); } }, _createSpritesRenderable: function (textureAtlas) { if (!this._spritesShader) { this._spritesShader = new Shader({ vertex: Shader.source('ecx.albedo.vertex'), fragment: Shader.source('ecx.albedo.fragment') }); this._spritesShader.enableTexture('diffuseMap'); } var renderable = new Renderable({ material: new Material({ shader: this._spritesShader, transparent: true, depthMask: false }), culling: false, geometry: new SpritesGeometry(), ignorePicking: true }); renderable.material.set('diffuseMap', textureAtlas.getTexture()); this._spritesRenderables.push(renderable); this._sceneNode.add(renderable); return renderable; }, clear: function () { var renderer = this.chart.baseLayer.renderer; renderer.disposeNode(this._sceneNode, true, true); this._sceneNode = new Node(); this._spritesRenderables = []; this._textureAtlasList = []; }, getSceneNode: function () { return this._sceneNode; } }; zrUtil.inherits(MarkPoint, MarkBase); return MarkPoint; });define('echarts-x/entity/marker/LargeMarkPoint', [ 'require', 'zrender/tool/util', './Base', 'qtek/Renderable', 'qtek/Material', 'qtek/Shader', 'qtek/Node', '../../util/geometry/Points', '../../util/geometry/AnimatingPoints', 'qtek/Texture2D', '../../util/sprite', 'qtek/math/Vector3', 'echarts/util/shape/Icon' ], function (require) { var zrUtil = require('zrender/tool/util'); var MarkBase = require('./Base'); var Renderable = require('qtek/Renderable'); var Material = require('qtek/Material'); var Shader = require('qtek/Shader'); var Node = require('qtek/Node'); var PointsGeometry = require('../../util/geometry/Points'); var AnimatingPointsGeometry = require('../../util/geometry/AnimatingPoints'); var Texture2D = require('qtek/Texture2D'); var spriteUtil = require('../../util/sprite'); var Vector3 = require('qtek/math/Vector3'); var IconShape = require('echarts/util/shape/Icon'); var LargeMarkPoint = function (chart) { MarkBase.call(this, chart); this._sceneNode = new Node(); this._markPointRenderable = null; this._animatingMarkPointRenderable = null; this._spriteTexture = null; this._elapsedTime = 0; }; LargeMarkPoint.prototype = { constructor: LargeMarkPoint, _createMarkPointRenderable: function () { var mat = new Material({ shader: new Shader({ vertex: Shader.source('ecx.points.vertex'), fragment: Shader.source('ecx.points.fragment') }), depthMask: false, transparent: true }); mat.shader.enableTexture('sprite'); this._markPointRenderable = new Renderable({ geometry: new PointsGeometry(), material: mat, mode: Renderable.POINTS }); if (this._spriteTexture) { mat.set('sprite', this._spriteTexture); } this._sceneNode.add(this._markPointRenderable); }, _createAnimatingMarkPointRenderable: function () { var mat = new Material({ shader: new Shader({ vertex: Shader.source('ecx.points.vertex'), fragment: Shader.source('ecx.points.fragment') }), depthMask: false, transparent: true }); mat.shader.enableTexture('sprite'); mat.shader.define('vertex', 'ANIMATING'); this._animatingMarkPointRenderable = new Renderable({ geometry: new AnimatingPointsGeometry(), material: mat, mode: Renderable.POINTS }); if (this._spriteTexture) { mat.set('sprite', this._spriteTexture); } this._sceneNode.add(this._animatingMarkPointRenderable); }, _updateSpriteTexture: function (size, shape) { if (!this._spriteTexture) { this._spriteTexture = new Texture2D({ flipY: false }); } var spriteTexture = this._spriteTexture; spriteTexture.image = spriteUtil.makeSpriteFromShape(size, shape, spriteTexture.image); spriteTexture.dirty(); }, clear: function () { if (this._markPointRenderable) { this._markPointRenderable.geometry.clearPoints(); } if (this._animatingMarkPointRenderable) { this._animatingMarkPointRenderable.geometry.clearPoints(); } this._elapsedTime = 0; }, setSeries: function (serie, seriesIndex) { if (!serie.markPoint || !serie.markPoint.data) { return; } this.seriesIndex = seriesIndex; var chart = this.chart; var component = chart.component; var legend = component.legend; var dataRange = component.dataRange; var markPoint = serie.markPoint; var zr = chart.zr; var symbol = chart.query(markPoint, 'symbol'); var showMarkPointEffect = chart.query(markPoint, 'effect.show'); var shadowBlur = chart.query(markPoint, 'effect.shadowBlur') || 0; var shape = new IconShape({ style: { x: 0, y: 0, width: 128, height: 128, iconType: symbol, color: 'white', shadowBlur: shadowBlur * 128, shadowColor: 'white' } }); this._updateSpriteTexture(128, shape); if (showMarkPointEffect) { if (!this._animatingMarkPointRenderable) { this._createAnimatingMarkPointRenderable(); } this._animatingMarkPointRenderable.geometry.dirty(); } else { if (!this._markPointRenderable) { this._createMarkPointRenderable(); } this._markPointRenderable.geometry.dirty(); } var dataList = markPoint.data; var serieColor; if (legend) { serieColor = legend.getColor(serie.name); } serieColor = chart.query(markPoint, 'itemStyle.normal.color') || serieColor; var serieDefaultColor = chart.zr.getColor(seriesIndex); var globalSize = chart.query(markPoint, 'symbolSize') || 2; for (var i = 0; i < dataList.length; i++) { var dataItem = dataList[i]; var value = chart.getDataFromOption(dataItem, null); var dataRangeColor = null; if (dataRange) { dataRangeColor = isNaN(value) ? color : dataRange.getColor(value); if (dataRangeColor == null) { continue; } } var itemColor = chart.query(dataItem, 'itemStyle.normal.color'); var color = itemColor || dataRangeColor || serieColor || serieDefaultColor; if (typeof color == 'function') { color = color(dataItem); } var colorArr = chart.parseColor(color) || new Float32Array(4); var size = dataItem.symbolSize == null ? globalSize : dataItem.symbolSize; if (typeof size == 'function') { size = size(dataItem); } size *= window.devicePixelRatio || 1; var coord = new Vector3(); chart.getMarkCoord(seriesIndex, dataItem, coord); if (showMarkPointEffect) { this._animatingMarkPointRenderable.geometry.addPoint(coord, colorArr, size, Math.random() * 2); } else { this._markPointRenderable.geometry.addPoint(coord, colorArr, size); } } }, getSceneNode: function () { return this._sceneNode; }, onframe: function (deltaTime) { if (this._animatingMarkPointRenderable) { var renderable = this._animatingMarkPointRenderable; if (renderable.geometry.getVertexNumber() > 0) { this._elapsedTime += deltaTime / 1000; renderable.material.set('elapsedTime', this._elapsedTime); this.chart.zr.refreshNextFrame(); } } } }; zrUtil.inherits(LargeMarkPoint, MarkBase); return LargeMarkPoint; });define('echarts-x/core/Layer3D', [ 'require', 'qtek/Renderer', 'qtek/Scene', 'qtek/camera/Perspective', 'qtek/camera/Orthographic', 'qtek/picking/RayPicking', 'zrender/mixin/Eventful', 'zrender/tool/util' ], function (require) { var Renderer = require('qtek/Renderer'); var Scene = require('qtek/Scene'); var PerspectiveCamera = require('qtek/camera/Perspective'); var OrthoCamera = require('qtek/camera/Orthographic'); var RayPicking = require('qtek/picking/RayPicking'); var Eventful = require('zrender/mixin/Eventful'); var zrUtil = require('zrender/tool/util'); var Layer3D = function (id, painter) { Eventful.call(this); this.id = id; try { this.renderer = new Renderer(); this.renderer.resize(painter.getWidth(), painter.getHeight()); } catch (e) { this.renderer = null; this.dom = document.createElement('div'); this.dom.style.cssText = 'position:absolute; left: 0; top: 0; right: 0; bottom: 0;'; this.dom.className = 'ecx-nowebgl'; this.dom.innerHTML = 'Sorry, your browser does support WebGL'; return; } this.dom = this.renderer.canvas; var style = this.dom.style; style.position = 'absolute'; style.left = '0'; style.top = '0'; this.camera = new PerspectiveCamera(); this.camera.aspect = painter.getWidth() / painter.getHeight(); this.scene = new Scene(); this._viewport = { x: 0, y: 0, width: 1, height: 1 }; this._initHandlers(); }; Layer3D.prototype._initHandlers = function () { this.bind('click', this._clickHandler, this); this.bind('mousedown', this._mouseDownHandler, this); this.bind('mouseup', this._mouseUpHandler, this); this.bind('mousemove', this._mouseMoveHandler, this); this._picking = new RayPicking({ scene: this.scene, camera: this.camera, renderer: this.renderer }); }; Layer3D.prototype.resize = function (width, height) { var renderer = this.renderer; renderer.resize(width, height); var viewport = this._viewport; this.setViewport(viewport.x * width, viewport.y * height, viewport.width * width, viewport.height * height); }; Layer3D.prototype.setViewport = function (x, y, width, height) { var renderer = this.renderer; var rendererWidth = renderer.getWidth(); var rendererHeight = renderer.getHeight(); var viewport = this._viewport; viewport.x = x / rendererWidth; viewport.y = y / rendererHeight; viewport.width = width / rendererWidth; viewport.height = 1 - height / rendererHeight; renderer.setViewport(x, y, width, height); var camera = this.camera; if (camera instanceof PerspectiveCamera) { camera.aspect = width / height; } }; Layer3D.prototype.refresh = function () { this.renderer.render(this.scene, this.camera); }; Layer3D.prototype.dispose = function () { this.renderer.disposeScene(this.scene); }; Layer3D.prototype.onmousedown = function (e) { e = e.event; var obj = this.pickObject(e.offsetX, e.offsetY); if (obj) { this._dispatchEvent('mousedown', e, obj); } }; Layer3D.prototype.onmousemove = function (e) { e = e.event; var obj = this.pickObject(e.offsetX, e.offsetY); if (obj) { this._dispatchEvent('mousemove', e, obj); } }; Layer3D.prototype.onmouseup = function (e) { e = e.event; var obj = this.pickObject(e.offsetX, e.offsetY); if (obj) { this._dispatchEvent('mouseup', e, obj); } }; Layer3D.prototype.onclick = function (e) { e = e.event; var obj = this.pickObject(e.offsetX, e.offsetY); if (obj) { this._dispatchEvent('click', e, obj); } }; Layer3D.prototype.pickObject = function (x, y) { return this._picking.pick(x, y); }; Layer3D.prototype._dispatchEvent = function (eveName, e, obj) { var current = obj.target; obj.cancelBubble = false; obj.event = e; obj.type = eveName; while (current) { current.trigger(eveName, obj); current = current.getParent(); if (obj.cancelBubble) { break; } } }; zrUtil.inherits(Layer3D, Eventful); return Layer3D; });define('qtek/Renderer', [ 'require', './core/Base', './Texture', './core/glinfo', './core/glenum', './math/BoundingBox', './math/Matrix4', './shader/library', './Material', './math/Vector2', './dep/glmatrix' ], function (require) { 'use strict'; var Base = require('./core/Base'); var Texture = require('./Texture'); var glinfo = require('./core/glinfo'); var glenum = require('./core/glenum'); var BoundingBox = require('./math/BoundingBox'); var Matrix4 = require('./math/Matrix4'); var shaderLibrary = require('./shader/library'); var Material = require('./Material'); var Vector2 = require('./math/Vector2'); var glMatrix = require('./dep/glmatrix'); var mat4 = glMatrix.mat4; var vec3 = glMatrix.vec3; var glid = 0; var errorShader = {}; var Renderer = Base.derive(function () { return { canvas: null, width: 100, height: 100, devicePixelRatio: window.devicePixelRatio || 1, color: [ 0, 0, 0, 0 ], clear: 17664, alhpa: true, depth: true, stencil: false, antialias: true, premultipliedAlpha: true, preserveDrawingBuffer: false, throwError: true, gl: null, viewport: {}, _viewportSettings: [], _clearSettings: [], _sceneRendering: null }; }, function () { if (!this.canvas) { this.canvas = document.createElement('canvas'); this.canvas.width = this.width; this.canvas.height = this.height; } try { var opts = { alhpa: this.alhpa, depth: this.depth, stencil: this.stencil, antialias: this.antialias, premultipliedAlpha: this.premultipliedAlpha, preserveDrawingBuffer: this.preserveDrawingBuffer }; this.gl = this.canvas.getContext('webgl', opts) || this.canvas.getContext('experimental-webgl', opts); if (!this.gl) { throw new Error(); } this.gl.__GLID__ = glid++; this.width = this.canvas.width; this.height = this.canvas.height; this.resize(this.width, this.height); glinfo.initialize(this.gl); } catch (e) { throw 'Error creating WebGL Context ' + e; } }, { resize: function (width, height) { var canvas = this.canvas; if (typeof width !== 'undefined') { canvas.style.width = width + 'px'; canvas.style.height = height + 'px'; canvas.width = width * this.devicePixelRatio; canvas.height = height * this.devicePixelRatio; this.width = width; this.height = height; } else { this.width = canvas.width / this.devicePixelRatio; this.height = canvas.height / this.devicePixelRatio; } this.setViewport(0, 0, width, height); }, getWidth: function () { return this.width; }, getHeight: function () { return this.height; }, setDevicePixelRatio: function (devicePixelRatio) { this.devicePixelRatio = devicePixelRatio; this.resize(this.width, this.height); }, getDevicePixelRatio: function () { return this.devicePixelRatio; }, setViewport: function (x, y, width, height, dpr) { if (typeof x === 'object') { var obj = x; x = obj.x; y = obj.y; width = obj.width; height = obj.height; } dpr = dpr || this.devicePixelRatio; this.gl.viewport(x * dpr, y * dpr, width * dpr, height * dpr); this.viewport = { x: x, y: y, width: width, height: height }; }, saveViewport: function () { this._viewportSettings.push(this.viewport); }, restoreViewport: function () { if (this._viewportSettings.length > 0) { this.setViewport(this._viewportSettings.pop()); } }, saveClear: function () { this._clearSettings.push(this.clear); }, restoreClear: function () { if (this._clearSettings.length > 0) { this.clear = this._clearSettings.pop(); } }, render: function (scene, camera, notUpdateScene, preZ) { var _gl = this.gl; this._sceneRendering = scene; var color = this.color; if (this.clear) { _gl.clearColor(color[0], color[1], color[2], color[3]); _gl.clear(this.clear); } if (!notUpdateScene) { scene.update(false); } if (!camera.getScene()) { camera.update(true); } var opaqueQueue = scene.opaqueQueue; var transparentQueue = scene.transparentQueue; var sceneMaterial = scene.material; scene.trigger('beforerender', this, scene, camera); if (transparentQueue.length > 0) { var worldViewMat = mat4.create(); var posViewSpace = vec3.create(); for (var i = 0; i < transparentQueue.length; i++) { var node = transparentQueue[i]; mat4.multiply(worldViewMat, camera.viewMatrix._array, node.worldTransform._array); vec3.transformMat4(posViewSpace, node.position._array, worldViewMat); node.__depth = posViewSpace[2]; } } opaqueQueue.sort(Renderer.opaqueSortFunc); transparentQueue.sort(Renderer.transparentSortFunc); scene.trigger('beforerender:opaque', this, opaqueQueue); camera.sceneBoundingBoxLastFrame.min.set(Infinity, Infinity, Infinity); camera.sceneBoundingBoxLastFrame.max.set(-Infinity, -Infinity, -Infinity); _gl.disable(_gl.BLEND); _gl.enable(_gl.DEPTH_TEST); var opaqueRenderInfo = this.renderQueue(opaqueQueue, camera, sceneMaterial, preZ); scene.trigger('afterrender:opaque', this, opaqueQueue, opaqueRenderInfo); scene.trigger('beforerender:transparent', this, transparentQueue); _gl.enable(_gl.BLEND); var transparentRenderInfo = this.renderQueue(transparentQueue, camera, sceneMaterial); scene.trigger('afterrender:transparent', this, transparentQueue, transparentRenderInfo); var renderInfo = {}; for (var name in opaqueRenderInfo) { renderInfo[name] = opaqueRenderInfo[name] + transparentRenderInfo[name]; } scene.trigger('afterrender', this, scene, camera, renderInfo); return renderInfo; }, renderQueue: function (queue, camera, globalMaterial, preZ) { var renderInfo = { faceNumber: 0, vertexNumber: 0, drawCallNumber: 0, meshNumber: queue.length, renderedMeshNumber: 0 }; mat4.copy(matrices.VIEW, camera.viewMatrix._array); mat4.copy(matrices.PROJECTION, camera.projectionMatrix._array); mat4.multiply(matrices.VIEWPROJECTION, camera.projectionMatrix._array, matrices.VIEW); mat4.copy(matrices.VIEWINVERSE, camera.worldTransform._array); mat4.invert(matrices.PROJECTIONINVERSE, matrices.PROJECTION); mat4.invert(matrices.VIEWPROJECTIONINVERSE, matrices.VIEWPROJECTION); var _gl = this.gl; var scene = this._sceneRendering; var prevMaterial; var prevShader; var depthTest, depthMask; var culling, cullFace, frontFace; var culledRenderQueue; if (preZ) { var preZPassMaterial = new Material({ shader: shaderLibrary.get('buildin.prez') }); var preZPassShader = preZPassMaterial.shader; culledRenderQueue = []; preZPassShader.bind(_gl); _gl.colorMask(false, false, false, false); _gl.depthMask(true); for (var i = 0; i < queue.length; i++) { var renderable = queue[i]; var worldM = renderable.worldTransform._array; var geometry = renderable.geometry; mat4.multiply(matrices.WORLDVIEW, matrices.VIEW, worldM); mat4.multiply(matrices.WORLDVIEWPROJECTION, matrices.VIEWPROJECTION, worldM); if (geometry.boundingBox) { if (!this._frustumCulling(renderable, camera)) { continue; } } if (renderable.skeleton) { continue; } if (renderable.cullFace !== cullFace) { cullFace = renderable.cullFace; _gl.cullFace(cullFace); } if (renderable.frontFace !== frontFace) { frontFace = renderable.frontFace; _gl.frontFace(frontFace); } if (renderable.culling !== culling) { culling = renderable.culling; culling ? _gl.enable(_gl.CULL_FACE) : _gl.disable(_gl.CULL_FACE); } var semanticInfo = preZPassShader.matrixSemantics.WORLDVIEWPROJECTION; preZPassShader.setUniform(_gl, semanticInfo.type, semanticInfo.symbol, matrices.WORLDVIEWPROJECTION); renderable.render(_gl, preZPassMaterial); culledRenderQueue.push(renderable); } _gl.depthFunc(_gl.LEQUAL); _gl.colorMask(true, true, true, true); _gl.depthMask(false); } else { culledRenderQueue = queue; } for (var i = 0; i < culledRenderQueue.length; i++) { var renderable = culledRenderQueue[i]; var material = globalMaterial || renderable.material; var shader = material.shader; var geometry = renderable.geometry; var worldM = renderable.worldTransform._array; mat4.copy(matrices.WORLD, worldM); mat4.multiply(matrices.WORLDVIEW, matrices.VIEW, worldM); mat4.multiply(matrices.WORLDVIEWPROJECTION, matrices.VIEWPROJECTION, worldM); if (shader.matrixSemantics.WORLDINVERSE || shader.matrixSemantics.WORLDINVERSETRANSPOSE) { mat4.invert(matrices.WORLDINVERSE, worldM); } if (shader.matrixSemantics.WORLDVIEWINVERSE || shader.matrixSemantics.WORLDVIEWINVERSETRANSPOSE) { mat4.invert(matrices.WORLDVIEWINVERSE, matrices.WORLDVIEW); } if (shader.matrixSemantics.WORLDVIEWPROJECTIONINVERSE || shader.matrixSemantics.WORLDVIEWPROJECTIONINVERSETRANSPOSE) { mat4.invert(matrices.WORLDVIEWPROJECTIONINVERSE, matrices.WORLDVIEWPROJECTION); } if (geometry.boundingBox && !preZ) { if (!this._frustumCulling(renderable, camera)) { continue; } } if (prevShader !== shader) { if (scene && scene.isShaderLightNumberChanged(shader)) { scene.setShaderLightNumber(shader); } var errMsg = shader.bind(_gl); if (errMsg) { if (errorShader[shader.__GUID__]) { continue; } errorShader[shader.__GUID__] = true; if (this.throwError) { throw new Error(errMsg); } else { this.trigger('error', errMsg); } } if (scene) { scene.setLightUniforms(shader, _gl); } prevShader = shader; } if (prevMaterial !== material) { if (!preZ) { if (material.depthTest !== depthTest) { material.depthTest ? _gl.enable(_gl.DEPTH_TEST) : _gl.disable(_gl.DEPTH_TEST); depthTest = material.depthTest; } if (material.depthMask !== depthMask) { _gl.depthMask(material.depthMask); depthMask = material.depthMask; } } material.bind(_gl, prevMaterial); prevMaterial = material; if (material.transparent) { if (material.blend) { material.blend(_gl); } else { _gl.blendEquationSeparate(_gl.FUNC_ADD, _gl.FUNC_ADD); _gl.blendFuncSeparate(_gl.SRC_ALPHA, _gl.ONE_MINUS_SRC_ALPHA, _gl.ONE, _gl.ONE_MINUS_SRC_ALPHA); } } } var matrixSemanticKeys = shader.matrixSemanticKeys; for (var k = 0; k < matrixSemanticKeys.length; k++) { var semantic = matrixSemanticKeys[k]; var semanticInfo = shader.matrixSemantics[semantic]; var matrix = matrices[semantic]; if (semanticInfo.isTranspose) { var matrixNoTranspose = matrices[semanticInfo.semanticNoTranspose]; mat4.transpose(matrix, matrixNoTranspose); } shader.setUniform(_gl, semanticInfo.type, semanticInfo.symbol, matrix); } if (renderable.cullFace !== cullFace) { cullFace = renderable.cullFace; _gl.cullFace(cullFace); } if (renderable.frontFace !== frontFace) { frontFace = renderable.frontFace; _gl.frontFace(frontFace); } if (renderable.culling !== culling) { culling = renderable.culling; culling ? _gl.enable(_gl.CULL_FACE) : _gl.disable(_gl.CULL_FACE); } var objectRenderInfo = renderable.render(_gl, globalMaterial); if (objectRenderInfo) { renderInfo.faceNumber += objectRenderInfo.faceNumber; renderInfo.vertexNumber += objectRenderInfo.vertexNumber; renderInfo.drawCallNumber += objectRenderInfo.drawCallNumber; renderInfo.renderedMeshNumber++; } } return renderInfo; }, _frustumCulling: function () { var cullingBoundingBox = new BoundingBox(); var cullingMatrix = new Matrix4(); return function (renderable, camera) { var geoBBox = renderable.geometry.boundingBox; cullingMatrix._array = matrices.WORLDVIEW; cullingBoundingBox.copy(geoBBox); cullingBoundingBox.applyTransform(cullingMatrix); if (renderable.castShadow) { camera.sceneBoundingBoxLastFrame.union(cullingBoundingBox); } if (renderable.frustumCulling) { if (!cullingBoundingBox.intersectBoundingBox(camera.frustum.boundingBox)) { return false; } cullingMatrix._array = matrices.PROJECTION; if (cullingBoundingBox.max._array[2] > 0 && cullingBoundingBox.min._array[2] < 0) { cullingBoundingBox.max._array[2] = -1e-20; } cullingBoundingBox.applyProjection(cullingMatrix); var min = cullingBoundingBox.min._array; var max = cullingBoundingBox.max._array; if (max[0] < -1 || min[0] > 1 || max[1] < -1 || min[1] > 1 || max[2] < -1 || min[2] > 1) { return false; } } return true; }; }(), disposeScene: function (scene) { this.disposeNode(scene, true, true); scene.dispose(); }, disposeNode: function (root, disposeGeometry, disposeTexture) { var materials = {}; var _gl = this.gl; if (root.getParent()) { root.getParent().remove(root); } root.traverse(function (node) { if (node.geometry && disposeGeometry) { node.geometry.dispose(_gl); } if (node.material) { materials[node.material.__GUID__] = node.material; } if (node.dispose) { node.dispose(_gl); } }); for (var guid in materials) { var mat = materials[guid]; mat.dispose(_gl, disposeTexture); } }, disposeShader: function (shader) { shader.dispose(this.gl); }, disposeGeometry: function (geometry) { geometry.dispose(this.gl); }, disposeTexture: function (texture) { texture.dispose(this.gl); }, disposeFrameBuffer: function (frameBuffer) { frameBuffer.dispose(this.gl); }, dispose: function () { glinfo.dispose(this.gl); }, screenToNdc: function (x, y, out) { if (!out) { out = new Vector2(); } y = this.height - y; var viewport = this.viewport; var dpr = this.devicePixelRatio; var arr = out._array; arr[0] = (x - viewport.x) / viewport.width; arr[0] = arr[0] * 2 - 1; arr[1] = (y - viewport.y) / viewport.height; arr[1] = arr[1] * 2 - 1; return out; } }); Renderer.opaqueSortFunc = function (x, y) { if (x.material.shader === y.material.shader) { if (x.material === y.material) { return x.geometry.__GUID__ - y.geometry.__GUID__; } return x.material.__GUID__ - y.material.__GUID__; } return x.material.shader.__GUID__ - y.material.shader.__GUID__; }; Renderer.transparentSortFunc = function (x, y) { if (x.__depth === y.__depth) { if (x.material.shader === y.material.shader) { if (x.material === y.material) { return x.geometry.__GUID__ - y.geometry.__GUID__; } return x.material.__GUID__ - y.material.__GUID__; } return x.material.shader.__GUID__ - y.material.shader.__GUID__; } return x.__depth - y.__depth; }; var matrices = { WORLD: mat4.create(), VIEW: mat4.create(), PROJECTION: mat4.create(), WORLDVIEW: mat4.create(), VIEWPROJECTION: mat4.create(), WORLDVIEWPROJECTION: mat4.create(), WORLDINVERSE: mat4.create(), VIEWINVERSE: mat4.create(), PROJECTIONINVERSE: mat4.create(), WORLDVIEWINVERSE: mat4.create(), VIEWPROJECTIONINVERSE: mat4.create(), WORLDVIEWPROJECTIONINVERSE: mat4.create(), WORLDTRANSPOSE: mat4.create(), VIEWTRANSPOSE: mat4.create(), PROJECTIONTRANSPOSE: mat4.create(), WORLDVIEWTRANSPOSE: mat4.create(), VIEWPROJECTIONTRANSPOSE: mat4.create(), WORLDVIEWPROJECTIONTRANSPOSE: mat4.create(), WORLDINVERSETRANSPOSE: mat4.create(), VIEWINVERSETRANSPOSE: mat4.create(), PROJECTIONINVERSETRANSPOSE: mat4.create(), WORLDVIEWINVERSETRANSPOSE: mat4.create(), VIEWPROJECTIONINVERSETRANSPOSE: mat4.create(), WORLDVIEWPROJECTIONINVERSETRANSPOSE: mat4.create() }; Renderer.COLOR_BUFFER_BIT = glenum.COLOR_BUFFER_BIT; Renderer.DEPTH_BUFFER_BIT = glenum.DEPTH_BUFFER_BIT; Renderer.STENCIL_BUFFER_BIT = glenum.STENCIL_BUFFER_BIT; return Renderer; });define('qtek/Scene', [ 'require', './Node', './Light' ], function (require) { 'use strict'; var Node = require('./Node'); var Light = require('./Light'); var Scene = Node.derive(function () { return { material: null, autoUpdate: true, opaqueQueue: [], transparentQueue: [], lights: [], _lightUniforms: {}, _lightNumber: { 'POINT_LIGHT': 0, 'DIRECTIONAL_LIGHT': 0, 'SPOT_LIGHT': 0, 'AMBIENT_LIGHT': 0 }, _opaqueObjectCount: 0, _transparentObjectCount: 0, _nodeRepository: {} }; }, function () { this._scene = this; }, { addToScene: function (node) { if (node.name) { this._nodeRepository[node.name] = node; } }, removeFromScene: function (node) { if (node.name) { delete this._nodeRepository[node.name]; } }, getNode: function (name) { return this._nodeRepository[name]; }, cloneNode: function (node) { var newNode = node.clone(); var materialsMap = {}; var cloneSkeleton = function (current, currentNew) { if (current.skeleton) { currentNew.skeleton = current.skeleton.clone(node, newNode); currentNew.joints = current.joints.slice(); } if (current.material) { materialsMap[current.material.__GUID__] = { oldMat: current.material }; } for (var i = 0; i < current._children.length; i++) { cloneSkeleton(current._children[i], currentNew._children[i]); } }; cloneSkeleton(node, newNode); for (var guid in materialsMap) { materialsMap[guid].newMat = materialsMap[guid].oldMat.clone(); } newNode.traverse(function (current) { if (current.material) { current.material = materialsMap[current.material.__GUID__].newMat; } }); return newNode; }, update: function (force) { if (!(this.autoUpdate || force)) { return; } Node.prototype.update.call(this, force); var lights = this.lights; var sceneMaterialTransparent = this.material && this.material.transparent; this._opaqueObjectCount = 0; this._transparentObjectCount = 0; lights.length = 0; this._updateRenderQueue(this, sceneMaterialTransparent); this.opaqueQueue.length = this._opaqueObjectCount; this.transparentQueue.length = this._transparentObjectCount; for (var type in this._lightNumber) { this._lightNumber[type] = 0; } for (var i = 0; i < lights.length; i++) { var light = lights[i]; this._lightNumber[light.type]++; } this._updateLightUniforms(); }, _updateRenderQueue: function (parent, sceneMaterialTransparent) { if (!parent.visible) { return; } for (var i = 0; i < parent._children.length; i++) { var child = parent._children[i]; if (child instanceof Light) { this.lights.push(child); } if (child.isRenderable()) { if (child.material.transparent || sceneMaterialTransparent) { this.transparentQueue[this._transparentObjectCount++] = child; } else { this.opaqueQueue[this._opaqueObjectCount++] = child; } } if (child._children.length > 0) { this._updateRenderQueue(child); } } }, _updateLightUniforms: function () { var lights = this.lights; lights.sort(lightSortFunc); var lightUniforms = this._lightUniforms; for (var symbol in lightUniforms) { lightUniforms[symbol].value.length = 0; } for (var i = 0; i < lights.length; i++) { var light = lights[i]; for (symbol in light.uniformTemplates) { var uniformTpl = light.uniformTemplates[symbol]; if (!lightUniforms[symbol]) { lightUniforms[symbol] = { type: '', value: [] }; } var value = uniformTpl.value(light); var lu = lightUniforms[symbol]; lu.type = uniformTpl.type + 'v'; switch (uniformTpl.type) { case '1i': case '1f': lu.value.push(value); break; case '2f': case '3f': case '4f': for (var j = 0; j < value.length; j++) { lu.value.push(value[j]); } break; default: console.error('Unkown light uniform type ' + uniformTpl.type); } } } }, isShaderLightNumberChanged: function (shader) { return shader.lightNumber.POINT_LIGHT !== this._lightNumber.POINT_LIGHT || shader.lightNumber.DIRECTIONAL_LIGHT !== this._lightNumber.DIRECTIONAL_LIGHT || shader.lightNumber.SPOT_LIGHT !== this._lightNumber.SPOT_LIGHT || shader.lightNumber.AMBIENT_LIGHT !== this._lightNumber.AMBIENT_LIGHT; }, setShaderLightNumber: function (shader) { for (var type in this._lightNumber) { shader.lightNumber[type] = this._lightNumber[type]; } shader.dirty(); }, setLightUniforms: function (shader, _gl) { for (var symbol in this._lightUniforms) { var lu = this._lightUniforms[symbol]; shader.setUniform(_gl, lu.type, symbol, lu.value); } }, dispose: function () { this.material = null; this.opaqueQueue = []; this.transparentQueue = []; this.lights = []; this._lightUniforms = {}; this._lightNumber = {}; this._nodeRepository = {}; } }); function lightSortFunc(a, b) { if (b.castShadow && !a.castShadow) { return true; } } return Scene; });define('qtek/camera/Perspective', [ 'require', '../Camera' ], function (require) { 'use strict'; var Camera = require('../Camera'); var Perspective = Camera.derive({ fov: 50, aspect: 1, near: 0.1, far: 2000 }, { updateProjectionMatrix: function () { var rad = this.fov / 180 * Math.PI; this.projectionMatrix.perspective(rad, this.aspect, this.near, this.far); }, clone: function () { var camera = Camera.prototype.clone.call(this); camera.fov = this.fov; camera.aspect = this.aspect; camera.near = this.near; camera.far = this.far; return camera; } }); return Perspective; });define('qtek/camera/Orthographic', [ 'require', '../Camera' ], function (require) { 'use strict'; var Camera = require('../Camera'); var Orthographic = Camera.derive({ left: -1, right: 1, near: -1, far: 1, top: 1, bottom: -1 }, { updateProjectionMatrix: function () { this.projectionMatrix.ortho(this.left, this.right, this.bottom, this.top, this.near, this.far); }, clone: function () { var camera = Camera.prototype.clone.call(this); camera.left = this.left; camera.right = this.right; camera.near = this.near; camera.far = this.far; camera.top = this.top; camera.bottom = this.bottom; return camera; } }); return Orthographic; });define('qtek/picking/RayPicking', [ 'require', '../core/Base', '../math/Ray', '../math/Vector2', '../math/Vector3', '../math/Matrix4', '../Renderable', '../StaticGeometry', '../core/glenum' ], function (require) { var Base = require('../core/Base'); var Ray = require('../math/Ray'); var Vector2 = require('../math/Vector2'); var Vector3 = require('../math/Vector3'); var Matrix4 = require('../math/Matrix4'); var Renderable = require('../Renderable'); var StaticGeometry = require('../StaticGeometry'); var glenum = require('../core/glenum'); var RayPicking = Base.derive({ scene: null, camera: null, renderer: null }, function () { this._ray = new Ray(); this._ndc = new Vector2(); }, { pick: function (x, y) { var out = this.pickAll(x, y); return out[0] || null; }, pickAll: function (x, y) { this.renderer.screenToNdc(x, y, this._ndc); this.camera.castRay(this._ndc, this._ray); var output = []; this._intersectNode(this.scene, output); output.sort(this._intersectionCompareFunc); return output; }, _intersectNode: function (node, out) { if (node instanceof Renderable && node.isRenderable()) { if (!node.ignorePicking && node.geometry.isUseFace()) { this._intersectRenderable(node, out); } } for (var i = 0; i < node._children.length; i++) { this._intersectNode(node._children[i], out); } }, _intersectRenderable: function () { var v1 = new Vector3(); var v2 = new Vector3(); var v3 = new Vector3(); var ray = new Ray(); var worldInverse = new Matrix4(); return function (renderable, out) { ray.copy(this._ray); Matrix4.invert(worldInverse, renderable.worldTransform); ray.applyTransform(worldInverse); var geometry = renderable.geometry; if (geometry.boundingBox) { if (!ray.intersectBoundingBox(geometry.boundingBox)) { return false; } } var isStatic = geometry instanceof StaticGeometry; var cullBack = renderable.cullFace === glenum.BACK && renderable.frontFace === glenum.CCW || renderable.cullFace === glenum.FRONT && renderable.frontFace === glenum.CW; var point; if (isStatic) { var faces = geometry.faces; var positions = geometry.attributes.position.value; for (var i = 0; i < faces.length;) { var i1 = faces[i++] * 3; var i2 = faces[i++] * 3; var i3 = faces[i++] * 3; v1._array[0] = positions[i1]; v1._array[1] = positions[i1 + 1]; v1._array[2] = positions[i1 + 2]; v2._array[0] = positions[i2]; v2._array[1] = positions[i2 + 1]; v2._array[2] = positions[i2 + 2]; v3._array[0] = positions[i3]; v3._array[1] = positions[i3 + 1]; v3._array[2] = positions[i3 + 2]; if (cullBack) { point = ray.intersectTriangle(v1, v2, v3, renderable.culling); } else { point = ray.intersectTriangle(v1, v3, v2, renderable.culling); } if (point) { var pointW = new Vector3(); Vector3.transformMat4(pointW, point, renderable.worldTransform); out.push(new RayPicking.Intersection(point, pointW, renderable, [ i1, i2, i3 ], Vector3.dist(pointW, this._ray.origin))); } } } else { var faces = geometry.faces; var positions = geometry.attributes.position.value; for (var i = 0; i < faces.length; i++) { var face = faces[i]; var i1 = face[0]; var i2 = face[1]; var i3 = face[2]; v1.setArray(positions[i1]); v2.setArray(positions[i2]); v3.setArray(positions[i3]); if (cullBack) { point = ray.intersectTriangle(v1, v2, v3, renderable.culling); } else { point = ray.intersectTriangle(v1, v3, v2, renderable.culling); } if (point) { var pointW = new Vector3(); Vector3.transformMat4(pointW, point, renderable.worldTransform); out.push(new RayPicking.Intersection(point, pointW, renderable, [ i1, i2, i3 ], Vector3.dist(pointW, this._ray.origin))); } } } }; }(), _intersectionCompareFunc: function (a, b) { return a.distance - b.distance; } }); RayPicking.Intersection = function (point, pointWorld, target, face, distance) { this.point = point; this.pointWorld = pointWorld; this.target = target; this.face = face; this.distance = distance; }; return RayPicking; });define('qtek/shader/library', [ 'require', '../Shader', '../core/util' ], function (require) { var Shader = require('../Shader'); var util = require('../core/util'); var _library = {}; function ShaderLibrary() { this._pool = {}; } ShaderLibrary.prototype.get = function (name, option) { var enabledTextures = []; var vertexDefines = {}; var fragmentDefines = {}; if (typeof option === 'string') { enabledTextures = Array.prototype.slice.call(arguments, 1); } else if (Object.prototype.toString.call(option) == '[object Object]') { enabledTextures = option.textures || []; vertexDefines = option.vertexDefines || {}; fragmentDefines = option.fragmentDefines || {}; } else if (option instanceof Array) { enabledTextures = option; } var vertexDefineKeys = Object.keys(vertexDefines); var fragmentDefineKeys = Object.keys(fragmentDefines); enabledTextures.sort(); vertexDefineKeys.sort(); fragmentDefineKeys.sort(); var keyArr = [name]; keyArr = keyArr.concat(enabledTextures); for (var i = 0; i < vertexDefineKeys.length; i++) { keyArr.push(vertexDefines[vertexDefineKeys[i]]); } for (var i = 0; i < fragmentDefineKeys.length; i++) { keyArr.push(fragmentDefines[fragmentDefineKeys[i]]); } var key = keyArr.join('_'); if (this._pool[key]) { return this._pool[key]; } else { var source = _library[name]; if (!source) { console.error('Shader "' + name + '"' + ' is not in the library'); return; } var shader = new Shader({ 'vertex': source.vertex, 'fragment': source.fragment }); for (var i = 0; i < enabledTextures.length; i++) { shader.enableTexture(enabledTextures[i]); } for (var name in vertexDefines) { shader.define('vertex', name, vertexDefines[name]); } for (var name in fragmentDefines) { shader.define('fragment', name, fragmentDefines[name]); } this._pool[key] = shader; return shader; } }; ShaderLibrary.prototype.clear = function () { this._pool = {}; }; function template(name, vertex, fragment) { _library[name] = { vertex: vertex, fragment: fragment }; } var defaultLibrary = new ShaderLibrary(); return { createLibrary: function () { return new ShaderLibrary(); }, get: function () { return defaultLibrary.get.apply(defaultLibrary, arguments); }, template: template, clear: function () { return defaultLibrary.clear(); } }; });define('qtek/math/Vector2', [ 'require', '../dep/glmatrix' ], function (require) { 'use strict'; var glMatrix = require('../dep/glmatrix'); var vec2 = glMatrix.vec2; var Vector2 = function (x, y) { x = x || 0; y = y || 0; this._array = vec2.fromValues(x, y); this._dirty = true; }; Vector2.prototype = { constructor: Vector2, add: function (b) { vec2.add(this._array, this._array, b._array); this._dirty = true; return this; }, set: function (x, y) { this._array[0] = x; this._array[1] = y; this._dirty = true; return this; }, setArray: function (arr) { this._array[0] = arr[0]; this._array[1] = arr[1]; this._dirty = true; return this; }, clone: function () { return new Vector2(this.x, this.y); }, copy: function (b) { vec2.copy(this._array, b._array); this._dirty = true; return this; }, cross: function (out, b) { vec2.cross(out._array, this._array, b._array); out._dirty = true; return this; }, dist: function (b) { return vec2.dist(this._array, b._array); }, distance: function (b) { return vec2.distance(this._array, b._array); }, div: function (b) { vec2.div(this._array, this._array, b._array); this._dirty = true; return this; }, divide: function (b) { vec2.divide(this._array, this._array, b._array); this._dirty = true; return this; }, dot: function (b) { return vec2.dot(this._array, b._array); }, len: function () { return vec2.len(this._array); }, length: function () { return vec2.length(this._array); }, lerp: function (a, b, t) { vec2.lerp(this._array, a._array, b._array, t); this._dirty = true; return this; }, min: function (b) { vec2.min(this._array, this._array, b._array); this._dirty = true; return this; }, max: function (b) { vec2.max(this._array, this._array, b._array); this._dirty = true; return this; }, mul: function (b) { vec2.mul(this._array, this._array, b._array); this._dirty = true; return this; }, multiply: function (b) { vec2.multiply(this._array, this._array, b._array); this._dirty = true; return this; }, negate: function () { vec2.negate(this._array, this._array); this._dirty = true; return this; }, normalize: function () { vec2.normalize(this._array, this._array); this._dirty = true; return this; }, random: function (scale) { vec2.random(this._array, scale); this._dirty = true; return this; }, scale: function (s) { vec2.scale(this._array, this._array, s); this._dirty = true; return this; }, scaleAndAdd: function (b, s) { vec2.scaleAndAdd(this._array, this._array, b._array, s); this._dirty = true; return this; }, sqrDist: function (b) { return vec2.sqrDist(this._array, b._array); }, squaredDistance: function (b) { return vec2.squaredDistance(this._array, b._array); }, sqrLen: function () { return vec2.sqrLen(this._array); }, squaredLength: function () { return vec2.squaredLength(this._array); }, sub: function (b) { vec2.sub(this._array, this._array, b._array); this._dirty = true; return this; }, subtract: function (b) { vec2.subtract(this._array, this._array, b._array); this._dirty = true; return this; }, transformMat2: function (m) { vec2.transformMat2(this._array, this._array, m._array); this._dirty = true; return this; }, transformMat2d: function (m) { vec2.transformMat2d(this._array, this._array, m._array); this._dirty = true; return this; }, transformMat3: function (m) { vec2.transformMat3(this._array, this._array, m._array); this._dirty = true; return this; }, transformMat4: function (m) { vec2.transformMat4(this._array, this._array, m._array); this._dirty = true; return this; }, toString: function () { return '[' + Array.prototype.join.call(this._array, ',') + ']'; } }; if (Object.defineProperty) { var proto = Vector2.prototype; Object.defineProperty(proto, 'x', { get: function () { return this._array[0]; }, set: function (value) { this._array[0] = value; this._dirty = true; } }); Object.defineProperty(proto, 'y', { get: function () { return this._array[1]; }, set: function (value) { this._array[1] = value; this._dirty = true; } }); } Vector2.add = function (out, a, b) { vec2.add(out._array, a._array, b._array); out._dirty = true; return out; }; Vector2.set = function (out, x, y) { vec2.set(out._array, x, y); out._dirty = true; return out; }; Vector2.copy = function (out, b) { vec2.copy(out._array, b._array); out._dirty = true; return out; }; Vector2.cross = function (out, a, b) { vec2.cross(out._array, a._array, b._array); out._dirty = true; return out; }; Vector2.dist = function (a, b) { return vec2.distance(a._array, b._array); }; Vector2.distance = Vector2.dist; Vector2.div = function (out, a, b) { vec2.divide(out._array, a._array, b._array); out._dirty = true; return out; }; Vector2.divide = Vector2.div; Vector2.dot = function (a, b) { return vec2.dot(a._array, b._array); }; Vector2.len = function (b) { return vec2.length(b._array); }; Vector2.lerp = function (out, a, b, t) { vec2.lerp(out._array, a._array, b._array, t); out._dirty = true; return out; }; Vector2.min = function (out, a, b) { vec2.min(out._array, a._array, b._array); out._dirty = true; return out; }; Vector2.max = function (out, a, b) { vec2.max(out._array, a._array, b._array); out._dirty = true; return out; }; Vector2.mul = function (out, a, b) { vec2.multiply(out._array, a._array, b._array); out._dirty = true; return out; }; Vector2.multiply = Vector2.mul; Vector2.negate = function (out, a) { vec2.negate(out._array, a._array); out._dirty = true; return out; }; Vector2.normalize = function (out, a) { vec2.normalize(out._array, a._array); out._dirty = true; return out; }; Vector2.random = function (out, scale) { vec2.random(out._array, scale); out._dirty = true; return out; }; Vector2.scale = function (out, a, scale) { vec2.scale(out._array, a._array, scale); out._dirty = true; return out; }; Vector2.scaleAndAdd = function (out, a, b, scale) { vec2.scaleAndAdd(out._array, a._array, b._array, scale); out._dirty = true; return out; }; Vector2.sqrDist = function (a, b) { return vec2.sqrDist(a._array, b._array); }; Vector2.squaredDistance = Vector2.sqrDist; Vector2.sqrLen = function (a) { return vec2.sqrLen(a._array); }; Vector2.squaredLength = Vector2.sqrLen; Vector2.sub = function (out, a, b) { vec2.subtract(out._array, a._array, b._array); out._dirty = true; return out; }; Vector2.subtract = Vector2.sub; Vector2.transformMat2 = function (out, a, m) { vec2.transformMat2(out._array, a._array, m._array); out._dirty = true; return out; }; Vector2.transformMat2d = function (out, a, m) { vec2.transformMat2d(out._array, a._array, m._array); out._dirty = true; return out; }; Vector2.transformMat3 = function (out, a, m) { vec2.transformMat3(out._array, a._array, m._array); out._dirty = true; return out; }; Vector2.transformMat4 = function (out, a, m) { vec2.transformMat4(out._array, a._array, m._array); out._dirty = true; return out; }; return Vector2; });define('qtek/Light', [ 'require', './Node', './Shader', './light/light.essl' ], function (require) { 'use strict'; var Node = require('./Node'); var Shader = require('./Shader'); var Light = Node.derive(function () { return { color: [ 1, 1, 1 ], intensity: 1, castShadow: true, shadowResolution: 512 }; }, { type: '', clone: function () { var light = Node.prototype.clone.call(this); light.color = Array.prototype.slice.call(this.color); light.intensity = this.intensity; light.castShadow = this.castShadow; light.shadowResolution = this.shadowResolution; return light; } }); Shader['import'](require('./light/light.essl')); return Light; });; define('qtek/light/light.essl', function() { return '@export buildin.header.directional_light\nuniform vec3 directionalLightDirection[ DIRECTIONAL_LIGHT_NUMBER ] : unconfigurable;\nuniform vec3 directionalLightColor[ DIRECTIONAL_LIGHT_NUMBER ] : unconfigurable;\n@end\n\n@export buildin.header.ambient_light\nuniform vec3 ambientLightColor[ AMBIENT_LIGHT_NUMBER ] : unconfigurable;\n@end\n\n@export buildin.header.point_light\nuniform vec3 pointLightPosition[ POINT_LIGHT_NUMBER ] : unconfigurable;\nuniform float pointLightRange[ POINT_LIGHT_NUMBER ] : unconfigurable;\nuniform vec3 pointLightColor[ POINT_LIGHT_NUMBER ] : unconfigurable;\n@end\n\n@export buildin.header.spot_light\nuniform vec3 spotLightPosition[SPOT_LIGHT_NUMBER] : unconfigurable;\nuniform vec3 spotLightDirection[SPOT_LIGHT_NUMBER] : unconfigurable;\nuniform float spotLightRange[SPOT_LIGHT_NUMBER] : unconfigurable;\nuniform float spotLightUmbraAngleCosine[SPOT_LIGHT_NUMBER] : unconfigurable;\nuniform float spotLightPenumbraAngleCosine[SPOT_LIGHT_NUMBER] : unconfigurable;\nuniform float spotLightFalloffFactor[SPOT_LIGHT_NUMBER] : unconfigurable;\nuniform vec3 spotLightColor[SPOT_LIGHT_NUMBER] : unconfigurable;\n@end'}); define('qtek/Camera', [ 'require', './Node', './math/Matrix4', './math/Frustum', './math/BoundingBox', './math/Ray', './dep/glmatrix' ], function (require) { 'use strict'; var Node = require('./Node'); var Matrix4 = require('./math/Matrix4'); var Frustum = require('./math/Frustum'); var BoundingBox = require('./math/BoundingBox'); var Ray = require('./math/Ray'); var glMatrix = require('./dep/glmatrix'); var mat4 = glMatrix.mat4; var vec3 = glMatrix.vec3; var vec4 = glMatrix.vec4; var Camera = Node.derive(function () { return { projectionMatrix: new Matrix4(), invProjectionMatrix: new Matrix4(), viewMatrix: new Matrix4(), frustum: new Frustum(), sceneBoundingBoxLastFrame: new BoundingBox() }; }, function () { this.update(true); }, { update: function (force) { Node.prototype.update.call(this, force); mat4.invert(this.viewMatrix._array, this.worldTransform._array); this.updateProjectionMatrix(); mat4.invert(this.invProjectionMatrix._array, this.projectionMatrix._array); this.frustum.setFromProjection(this.projectionMatrix); }, updateProjectionMatrix: function () { }, castRay: function () { var v4 = vec4.create(); return function (ndc, out) { var ray = out !== undefined ? out : new Ray(); var x = ndc._array[0]; var y = ndc._array[1]; vec4.set(v4, x, y, -1, 1); vec4.transformMat4(v4, v4, this.invProjectionMatrix._array); vec4.transformMat4(v4, v4, this.worldTransform._array); vec3.scale(ray.origin._array, v4, 1 / v4[3]); vec4.set(v4, x, y, 1, 1); vec4.transformMat4(v4, v4, this.invProjectionMatrix._array); vec4.transformMat4(v4, v4, this.worldTransform._array); vec3.scale(v4, v4, 1 / v4[3]); vec3.sub(ray.direction._array, v4, ray.origin._array); vec3.normalize(ray.direction._array, ray.direction._array); ray.direction._dirty = true; ray.origin._dirty = true; return ray; }; }() }); return Camera; });define('qtek/math/Frustum', [ 'require', './Vector3', './BoundingBox', './Plane', '../dep/glmatrix' ], function (require) { 'use strict'; var Vector3 = require('./Vector3'); var BoundingBox = require('./BoundingBox'); var Plane = require('./Plane'); var glMatrix = require('../dep/glmatrix'); var vec3 = glMatrix.vec3; var Frustum = function () { this.planes = []; for (var i = 0; i < 6; i++) { this.planes.push(new Plane()); } this.boundingBox = new BoundingBox(); this.vertices = []; for (var i = 0; i < 8; i++) { this.vertices[i] = vec3.fromValues(0, 0, 0); } }; Frustum.prototype = { setFromProjection: function (projectionMatrix) { var planes = this.planes; var m = projectionMatrix._array; var m0 = m[0], m1 = m[1], m2 = m[2], m3 = m[3]; var m4 = m[4], m5 = m[5], m6 = m[6], m7 = m[7]; var m8 = m[8], m9 = m[9], m10 = m[10], m11 = m[11]; var m12 = m[12], m13 = m[13], m14 = m[14], m15 = m[15]; vec3.set(planes[0].normal._array, m3 - m0, m7 - m4, m11 - m8); planes[0].distance = -(m15 - m12); planes[0].normalize(); vec3.set(planes[1].normal._array, m3 + m0, m7 + m4, m11 + m8); planes[1].distance = -(m15 + m12); planes[1].normalize(); vec3.set(planes[2].normal._array, m3 + m1, m7 + m5, m11 + m9); planes[2].distance = -(m15 + m13); planes[2].normalize(); vec3.set(planes[3].normal._array, m3 - m1, m7 - m5, m11 - m9); planes[3].distance = -(m15 - m13); planes[3].normalize(); vec3.set(planes[4].normal._array, m3 - m2, m7 - m6, m11 - m10); planes[4].distance = -(m15 - m14); planes[4].normalize(); vec3.set(planes[5].normal._array, m3 + m2, m7 + m6, m11 + m10); planes[5].distance = -(m15 + m14); planes[5].normalize(); if (m15 === 0) { var aspect = m5 / m0; var zNear = -m14 / (m10 - 1); var zFar = -m14 / (m10 + 1); var farY = -zFar / m5; var nearY = -zNear / m5; this.boundingBox.min.set(-farY * aspect, -farY, zFar); this.boundingBox.max.set(farY * aspect, farY, zNear); var vertices = this.vertices; vec3.set(vertices[0], -farY * aspect, -farY, zFar); vec3.set(vertices[1], -farY * aspect, farY, zFar); vec3.set(vertices[2], farY * aspect, -farY, zFar); vec3.set(vertices[3], farY * aspect, farY, zFar); vec3.set(vertices[4], -nearY * aspect, -nearY, zNear); vec3.set(vertices[5], -nearY * aspect, nearY, zNear); vec3.set(vertices[6], nearY * aspect, -nearY, zNear); vec3.set(vertices[7], nearY * aspect, nearY, zNear); } else { var left = (-1 - m12) / m0; var right = (1 - m12) / m0; var top = (1 - m13) / m5; var bottom = (-1 - m13) / m5; var near = (-1 - m14) / m10; var far = (1 - m14) / m10; this.boundingBox.min.set(left, bottom, far); this.boundingBox.max.set(right, top, near); for (var i = 0; i < 8; i++) { vec3.copy(this.vertices[i], this.boundingBox.vertices[i]); } } }, getTransformedBoundingBox: function () { var tmpVec3 = vec3.create(); return function (bbox, matrix) { var vertices = this.vertices; var m4 = matrix._array; var _min = bbox.min._array; var _max = bbox.max._array; var v = vertices[0]; vec3.transformMat4(tmpVec3, v, m4); vec3.copy(_min, tmpVec3); vec3.copy(_max, tmpVec3); for (var i = 1; i < 8; i++) { v = vertices[i]; vec3.transformMat4(tmpVec3, v, m4); _min[0] = Math.min(tmpVec3[0], _min[0]); _min[1] = Math.min(tmpVec3[1], _min[1]); _min[2] = Math.min(tmpVec3[2], _min[2]); _max[0] = Math.max(tmpVec3[0], _max[0]); _max[1] = Math.max(tmpVec3[1], _max[1]); _max[2] = Math.max(tmpVec3[2], _max[2]); } bbox.min._dirty = true; bbox.max._dirty = true; return bbox; }; }() }; return Frustum; });define('qtek/math/Ray', [ 'require', './Vector3', '../dep/glmatrix' ], function (require) { 'use strict'; var Vector3 = require('./Vector3'); var glMatrix = require('../dep/glmatrix'); var vec3 = glMatrix.vec3; var EPSILON = 0.00001; var Ray = function (origin, direction) { this.origin = origin || new Vector3(); this.direction = direction || new Vector3(); }; Ray.prototype = { constructor: Ray, intersectPlane: function (plane, out) { var pn = plane.normal._array; var d = plane.distance; var ro = this.origin._array; var rd = this.direction._array; var divider = vec3.dot(pn, rd); if (divider === 0) { return null; } if (!out) { out = new Vector3(); } var t = (vec3.dot(pn, ro) - d) / divider; vec3.scaleAndAdd(out._array, ro, rd, -t); out._dirty = true; return out; }, mirrorAgainstPlane: function (plane) { var d = vec3.dot(plane.normal._array, this.direction._array); vec3.scaleAndAdd(this.direction._array, this.direction._array, plane.normal._array, -d * 2); this.direction._dirty = true; }, distanceToPoint: function () { var v = vec3.create(); return function (point) { vec3.sub(v, point, this.origin._array); var b = vec3.dot(v, this.direction._array); if (b < 0) { return vec3.distance(this.origin._array, point); } var c2 = vec3.lenSquared(v); return Math.sqrt(c2 - b * b); }; }(), intersectSphere: function () { var v = vec3.create(); return function (center, radius, out) { var origin = this.origin._array; var direction = this.direction._array; vec3.sub(v, center, origin); var b = vec3.dot(v, direction); var c2 = vec3.lenSquared(v); var d2 = c2 - b * b; var r2 = radius * radius; if (d2 > r2) { return; } var a = Math.sqrt(r2 - d2); var t0 = b - a; var t1 = b + a; if (!out) { out = new Vector3(); } if (t0 < 0) { if (t1 < 0) { return null; } else { vec3.scaleAndAdd(out._array, origin, direction, t1); return out; } } else { vec3.scaleAndAdd(out._array, origin, direction, t0); return out; } }; }(), intersectBoundingBox: function (bbox, out) { var dir = this.direction._array; var origin = this.origin._array; var min = bbox.min._array; var max = bbox.max._array; var invdirx = 1 / dir[0]; var invdiry = 1 / dir[1]; var invdirz = 1 / dir[2]; var tmin, tmax, tymin, tymax, tzmin, tzmax; if (invdirx >= 0) { tmin = (min[0] - origin[0]) * invdirx; tmax = (max[0] - origin[0]) * invdirx; } else { tmax = (min[0] - origin[0]) * invdirx; tmin = (max[0] - origin[0]) * invdirx; } if (invdiry >= 0) { tymin = (min[1] - origin[1]) * invdiry; tymax = (max[1] - origin[1]) * invdiry; } else { tymax = (min[1] - origin[1]) * invdiry; tymin = (max[1] - origin[1]) * invdiry; } if (tmin > tymax || tymin > tmax) { return null; } if (tymin > tmin || tmin !== tmin) { tmin = tymin; } if (tymax < tmax || tmax !== tmax) { tmax = tymax; } if (invdirz >= 0) { tzmin = (min[2] - origin[2]) * invdirz; tzmax = (max[2] - origin[2]) * invdirz; } else { tzmax = (min[2] - origin[2]) * invdirz; tzmin = (max[2] - origin[2]) * invdirz; } if (tmin > tzmax || tzmin > tmax) { return null; } if (tzmin > tmin || tmin !== tmin) { tmin = tzmin; } if (tzmax < tmax || tmax !== tmax) { tmax = tzmax; } if (tmax < 0) { return null; } var t = tmin >= 0 ? tmin : tmax; if (!out) { out = new Vector3(); } vec3.scaleAndAdd(out._array, origin, dir, t); return out; }, intersectTriangle: function () { var eBA = vec3.create(); var eCA = vec3.create(); var AO = vec3.create(); var vCross = vec3.create(); return function (a, b, c, singleSided, out, barycenteric) { var dir = this.direction._array; var origin = this.origin._array; a = a._array; b = b._array; c = c._array; vec3.sub(eBA, b, a); vec3.sub(eCA, c, a); vec3.cross(vCross, eCA, dir); var det = vec3.dot(eBA, vCross); if (singleSided) { if (det > -EPSILON) { return null; } } else { if (det > -EPSILON && det < EPSILON) { return null; } } vec3.sub(AO, origin, a); var u = vec3.dot(vCross, AO) / det; if (u < 0 || u > 1) { return null; } vec3.cross(vCross, eBA, AO); var v = vec3.dot(dir, vCross) / det; if (v < 0 || v > 1 || u + v > 1) { return null; } vec3.cross(vCross, eBA, eCA); var t = -vec3.dot(AO, vCross) / det; if (t < 0) { return null; } if (!out) { out = new Vector3(); } if (barycenteric) { Vector3.set(barycenteric, 1 - u - v, u, v); } vec3.scaleAndAdd(out._array, origin, dir, t); return out; }; }(), applyTransform: function (matrix) { Vector3.add(this.direction, this.direction, this.origin); Vector3.transformMat4(this.origin, this.origin, matrix); Vector3.transformMat4(this.direction, this.direction, matrix); Vector3.sub(this.direction, this.direction, this.origin); Vector3.normalize(this.direction, this.direction); }, copy: function (ray) { Vector3.copy(this.origin, ray.origin); Vector3.copy(this.direction, ray.direction); }, clone: function () { var ray = new Ray(); ray.copy(this); return ray; } }; return Ray; });define('qtek/math/Plane', [ 'require', './Vector3', '../dep/glmatrix' ], function (require) { 'use strict'; var Vector3 = require('./Vector3'); var glMatrix = require('../dep/glmatrix'); var vec3 = glMatrix.vec3; var mat4 = glMatrix.mat4; var vec4 = glMatrix.vec4; var Plane = function (normal, distance) { this.normal = normal || new Vector3(0, 1, 0); this.distance = distance || 0; }; Plane.prototype = { constructor: Plane, distanceToPoint: function (point) { return vec3.dot(point._array, this.normal._array) - this.distance; }, projectPoint: function (point, out) { if (!out) { out = new Vector3(); } var d = this.distanceToPoint(point); vec3.scaleAndAdd(out._array, point._array, this.normal._array, -d); out._dirty = true; return out; }, normalize: function () { var invLen = 1 / vec3.len(this.normal._array); vec3.scale(this.normal._array, invLen); this.distance *= invLen; }, intersectFrustum: function (frustum) { var coords = frustum.vertices; var normal = this.normal._array; var onPlane = vec3.dot(coords[0]._array, normal) > this.distance; for (var i = 1; i < 8; i++) { if (vec3.dot(coords[i]._array, normal) > this.distance != onPlane) { return true; } } }, intersectLine: function () { var rd = vec3.create(); return function (start, end, out) { var d0 = this.distanceToPoint(start); var d1 = this.distanceToPoint(end); if (d0 > 0 && d1 > 0 || d0 < 0 && d1 < 0) { return null; } var pn = this.normal._array; var d = this.distance; var ro = start._array; vec3.sub(rd, end._array, start._array); vec3.normalize(rd, rd); var divider = vec3.dot(pn, rd); if (divider === 0) { return null; } if (!out) { out = new Vector3(); } var t = (vec3.dot(pn, ro) - d) / divider; vec3.scaleAndAdd(out._array, ro, rd, -t); out._dirty = true; return out; }; }(), applyTransform: function () { var inverseTranspose = mat4.create(); var normalv4 = vec4.create(); var pointv4 = vec4.create(); pointv4[3] = 1; return function (m4) { m4 = m4._array; vec3.scale(pointv4, this.normal._array, this.distance); vec4.transformMat4(pointv4, pointv4, m4); this.distance = vec3.dot(pointv4, this.normal._array); mat4.invert(inverseTranspose, m4); mat4.transpose(inverseTranspose, inverseTranspose); normalv4[3] = 0; vec3.copy(normalv4, this.normal._array); vec4.transformMat4(normalv4, normalv4, inverseTranspose); vec3.copy(this.normal._array, normalv4); }; }(), copy: function (plane) { vec3.copy(this.normal._array, plane.normal._array); this.normal._dirty = true; this.distance = plane.distance; }, clone: function () { var plane = new Plane(); plane.copy(this); return plane; } }; return Plane; });define('qtek/StaticGeometry', [ 'require', './Geometry', './math/BoundingBox', './dep/glmatrix', './core/glenum' ], function (require) { 'use strict'; var Geometry = require('./Geometry'); var BoundingBox = require('./math/BoundingBox'); var glMatrix = require('./dep/glmatrix'); var glenum = require('./core/glenum'); var mat4 = glMatrix.mat4; var vec3 = glMatrix.vec3; var StaticGeometry = Geometry.derive(function () { return { attributes: { position: new Geometry.Attribute('position', 'float', 3, 'POSITION', false), texcoord0: new Geometry.Attribute('texcoord0', 'float', 2, 'TEXCOORD_0', false), texcoord1: new Geometry.Attribute('texcoord1', 'float', 2, 'TEXCOORD_1', false), normal: new Geometry.Attribute('normal', 'float', 3, 'NORMAL', false), tangent: new Geometry.Attribute('tangent', 'float', 4, 'TANGENT', false), color: new Geometry.Attribute('color', 'float', 4, 'COLOR', false), weight: new Geometry.Attribute('weight', 'float', 3, 'WEIGHT', false), joint: new Geometry.Attribute('joint', 'float', 4, 'JOINT', false), barycentric: new Geometry.Attribute('barycentric', 'float', 3, null, false) }, hint: glenum.STATIC_DRAW, faces: null, _normalType: 'vertex', _enabledAttributes: null }; }, { dirty: function () { this._cache.dirtyAll(); this._enabledAttributes = null; }, getVertexNumber: function () { var mainAttribute = this.attributes[this.mainAttribute]; if (!mainAttribute || !mainAttribute.value) { return 0; } return mainAttribute.value.length / mainAttribute.size; }, getFaceNumber: function () { if (!this.faces) { return 0; } else { return this.faces.length / 3; } }, getFace: function (idx, out) { if (idx < this.getFaceNumber() && idx >= 0) { if (!out) { out = vec3.create(); } out[0] = this.faces[idx * 3]; out[1] = this.faces[idx * 3 + 1]; out[2] = this.faces[idx * 3 + 2]; return out; } }, isUseFace: function () { return this.useFace && this.faces != null; }, createAttribute: function (name, type, size, semantic) { var attrib = new Geometry.Attribute(name, type, size, semantic, false); this.attributes[name] = attrib; this._attributeList.push(name); return attrib; }, removeAttribute: function (name) { var idx = this._attributeList.indexOf(name); if (idx >= 0) { this._attributeList.splice(idx, 1); delete this.attributes[name]; return true; } return false; }, getEnabledAttributes: function () { if (this._enabledAttributes) { return this._enabledAttributes; } var result = []; var nVertex = this.getVertexNumber(); for (var i = 0; i < this._attributeList.length; i++) { var name = this._attributeList[i]; var attrib = this.attributes[name]; if (attrib.value) { if (attrib.value.length === nVertex * attrib.size) { result.push(name); } } } this._enabledAttributes = result; return result; }, getBufferChunks: function (_gl) { this._cache.use(_gl.__GLID__); if (this._cache.isDirty()) { this._updateBuffer(_gl); this._cache.fresh(); } return this._cache.get('chunks'); }, _updateBuffer: function (_gl) { var chunks = this._cache.get('chunks'); var firstUpdate = false; if (!chunks) { chunks = []; chunks[0] = { attributeBuffers: [], indicesBuffer: null }; this._cache.put('chunks', chunks); firstUpdate = true; } var chunk = chunks[0]; var attributeBuffers = chunk.attributeBuffers; var indicesBuffer = chunk.indicesBuffer; var attributeList = this.getEnabledAttributes(); var prevSearchIdx = 0; var count = 0; for (var k = 0; k < attributeList.length; k++) { var name = attributeList[k]; var attribute = this.attributes[name]; var bufferInfo; if (!firstUpdate) { for (var i = prevSearchIdx; i < attributeBuffers.length; i++) { if (attributeBuffers[i].name === name) { bufferInfo = attributeBuffers[i]; prevSearchIdx = i + 1; break; } } if (!bufferInfo) { for (var i = prevSearchIdx - 1; i >= 0; i--) { if (attributeBuffers[i].name === name) { bufferInfo = attributeBuffers[i]; prevSearchIdx = i; break; } } } } var buffer; if (bufferInfo) { buffer = bufferInfo.buffer; } else { buffer = _gl.createBuffer(); } _gl.bindBuffer(_gl.ARRAY_BUFFER, buffer); _gl.bufferData(_gl.ARRAY_BUFFER, attribute.value, this.hint); attributeBuffers[count++] = new Geometry.AttributeBuffer(name, attribute.type, buffer, attribute.size, attribute.semantic); } attributeBuffers.length = count; if (this.isUseFace()) { if (!indicesBuffer) { indicesBuffer = new Geometry.IndicesBuffer(_gl.createBuffer()); chunk.indicesBuffer = indicesBuffer; } indicesBuffer.count = this.faces.length; _gl.bindBuffer(_gl.ELEMENT_ARRAY_BUFFER, indicesBuffer.buffer); _gl.bufferData(_gl.ELEMENT_ARRAY_BUFFER, this.faces, this.hint); } }, generateVertexNormals: function () { var faces = this.faces; var positions = this.attributes.position.value; var normals = this.attributes.normal.value; if (!normals || normals.length !== positions.length) { normals = this.attributes.normal.value = new Float32Array(positions.length); } else { for (var i = 0; i < normals.length; i++) { normals[i] = 0; } } var p1 = vec3.create(); var p2 = vec3.create(); var p3 = vec3.create(); var v21 = vec3.create(); var v32 = vec3.create(); var n = vec3.create(); for (var f = 0; f < faces.length;) { var i1 = faces[f++]; var i2 = faces[f++]; var i3 = faces[f++]; vec3.set(p1, positions[i1 * 3], positions[i1 * 3 + 1], positions[i1 * 3 + 2]); vec3.set(p2, positions[i2 * 3], positions[i2 * 3 + 1], positions[i2 * 3 + 2]); vec3.set(p3, positions[i3 * 3], positions[i3 * 3 + 1], positions[i3 * 3 + 2]); vec3.sub(v21, p1, p2); vec3.sub(v32, p2, p3); vec3.cross(n, v21, v32); for (var i = 0; i < 3; i++) { normals[i1 * 3 + i] = normals[i1 * 3 + i] + n[i]; normals[i2 * 3 + i] = normals[i2 * 3 + i] + n[i]; normals[i3 * 3 + i] = normals[i3 * 3 + i] + n[i]; } } for (var i = 0; i < normals.length;) { vec3.set(n, normals[i], normals[i + 1], normals[i + 2]); vec3.normalize(n, n); normals[i++] = n[0]; normals[i++] = n[1]; normals[i++] = n[2]; } }, generateFaceNormals: function () { if (!this.isUniqueVertex()) { this.generateUniqueVertex(); } var faces = this.faces; var positions = this.attributes.position.value; var normals = this.attributes.normal.value; var p1 = vec3.create(); var p2 = vec3.create(); var p3 = vec3.create(); var v21 = vec3.create(); var v32 = vec3.create(); var n = vec3.create(); if (!normals) { normals = this.attributes.position.value = new Float32Array(positions.length); } for (var f = 0; f < faces.length;) { var i1 = faces[f++]; var i2 = faces[f++]; var i3 = faces[f++]; vec3.set(p1, positions[i1 * 3], positions[i1 * 3 + 1], positions[i1 * 3 + 2]); vec3.set(p2, positions[i2 * 3], positions[i2 * 3 + 1], positions[i2 * 3 + 2]); vec3.set(p3, positions[i3 * 3], positions[i3 * 3 + 1], positions[i3 * 3 + 2]); vec3.sub(v21, p1, p2); vec3.sub(v32, p2, p3); vec3.cross(n, v21, v32); vec3.normalize(n, n); for (var i = 0; i < 3; i++) { normals[i1 * 3 + i] = n[i]; normals[i2 * 3 + i] = n[i]; normals[i3 * 3 + i] = n[i]; } } }, generateTangents: function () { var nVertex = this.getVertexNumber(); if (!this.attributes.tangent.value) { this.attributes.tangent.value = new Float32Array(nVertex * 4); } var texcoords = this.attributes.texcoord0.value; var positions = this.attributes.position.value; var tangents = this.attributes.tangent.value; var normals = this.attributes.normal.value; var tan1 = []; var tan2 = []; for (var i = 0; i < nVertex; i++) { tan1[i] = [ 0, 0, 0 ]; tan2[i] = [ 0, 0, 0 ]; } var sdir = [ 0, 0, 0 ]; var tdir = [ 0, 0, 0 ]; for (var i = 0; i < this.faces.length;) { var i1 = this.faces[i++], i2 = this.faces[i++], i3 = this.faces[i++], st1s = texcoords[i1 * 2], st2s = texcoords[i2 * 2], st3s = texcoords[i3 * 2], st1t = texcoords[i1 * 2 + 1], st2t = texcoords[i2 * 2 + 1], st3t = texcoords[i3 * 2 + 1], p1x = positions[i1 * 3], p2x = positions[i2 * 3], p3x = positions[i3 * 3], p1y = positions[i1 * 3 + 1], p2y = positions[i2 * 3 + 1], p3y = positions[i3 * 3 + 1], p1z = positions[i1 * 3 + 2], p2z = positions[i2 * 3 + 2], p3z = positions[i3 * 3 + 2]; var x1 = p2x - p1x, x2 = p3x - p1x, y1 = p2y - p1y, y2 = p3y - p1y, z1 = p2z - p1z, z2 = p3z - p1z; var s1 = st2s - st1s, s2 = st3s - st1s, t1 = st2t - st1t, t2 = st3t - st1t; var r = 1 / (s1 * t2 - t1 * s2); sdir[0] = (t2 * x1 - t1 * x2) * r; sdir[1] = (t2 * y1 - t1 * y2) * r; sdir[2] = (t2 * z1 - t1 * z2) * r; tdir[0] = (s1 * x2 - s2 * x1) * r; tdir[1] = (s1 * y2 - s2 * y1) * r; tdir[2] = (s1 * z2 - s2 * z1) * r; vec3.add(tan1[i1], tan1[i1], sdir); vec3.add(tan1[i2], tan1[i2], sdir); vec3.add(tan1[i3], tan1[i3], sdir); vec3.add(tan2[i1], tan2[i1], tdir); vec3.add(tan2[i2], tan2[i2], tdir); vec3.add(tan2[i3], tan2[i3], tdir); } var tmp = vec3.create(); var nCrossT = vec3.create(); var n = vec3.create(); for (var i = 0; i < nVertex; i++) { n[0] = normals[i * 3]; n[1] = normals[i * 3 + 1]; n[2] = normals[i * 3 + 2]; var t = tan1[i]; vec3.scale(tmp, n, vec3.dot(n, t)); vec3.sub(tmp, t, tmp); vec3.normalize(tmp, tmp); vec3.cross(nCrossT, n, t); tangents[i * 4] = tmp[0]; tangents[i * 4 + 1] = tmp[1]; tangents[i * 4 + 2] = tmp[2]; tangents[i * 4 + 3] = vec3.dot(nCrossT, tan2[i]) < 0 ? -1 : 1; } }, isUniqueVertex: function () { if (this.isUseFace()) { return this.getVertexNumber() === this.faces.length; } else { return true; } }, generateUniqueVertex: function () { var vertexUseCount = []; for (var i = 0, len = this.getVertexNumber(); i < len; i++) { vertexUseCount[i] = 0; } var cursor = this.getVertexNumber(); var attributes = this.attributes; var faces = this.faces; var attributeNameList = this.getEnabledAttributes(); for (var a = 0; a < attributeNameList.length; a++) { var name = attributeNameList[a]; var expandedArray = new Float32Array(this.faces.length * attributes[name].size); var len = attributes[name].value.length; for (var i = 0; i < len; i++) { expandedArray[i] = attributes[name].value[i]; } attributes[name].value = expandedArray; } for (var i = 0; i < faces.length; i++) { var ii = faces[i]; if (vertexUseCount[ii] > 0) { for (var a = 0; a < attributeNameList.length; a++) { var name = attributeNameList[a]; var array = attributes[name].value; var size = attributes[name].size; for (var k = 0; k < size; k++) { array[cursor * size + k] = array[ii * size + k]; } } faces[i] = cursor; cursor++; } vertexUseCount[ii]++; } }, generateBarycentric: function () { if (!this.isUniqueVertex()) { this.generateUniqueVertex(); } var array = this.attributes.barycentric.value; if (array && array.length === this.faces.length * 3) { return; } array = this.attributes.barycentric.value = new Float32Array(this.faces.length * 3); for (var i = 0; i < this.faces.length;) { for (var j = 0; j < 3; j++) { var ii = this.faces[i++]; array[ii + j] = 1; } } }, convertToDynamic: function (geometry) { for (var i = 0; i < this.faces.length; i += 3) { geometry.faces.push(this.face.subarray(i, i + 3)); } var attributes = this.getEnabledAttributes(); for (var name in attributes) { var attrib = attributes[name]; var geoAttrib = geometry.attributes[name]; if (!geoAttrib) { geoAttrib = geometry.attributes[name] = { type: attrib.type, size: attrib.size, value: [] }; if (attrib.semantic) { geoAttrib.semantic = attrib.semantic; } } for (var i = 0; i < attrib.value.length; i += attrib.size) { if (attrib.size === 1) { geoAttrib.value.push(attrib.array[i]); } else { geoAttrib.value.push(attrib.subarray(i, i + attrib.size)); } } } if (this.boundingBox) { geometry.boundingBox = new BoundingBox(); geometry.boundingBox.min.copy(this.boundingBox.min); geometry.boundingBox.max.copy(this.boundingBox.max); } return geometry; }, applyTransform: function (matrix) { if (this.boundingBox) { this.boundingBox.applyTransform(matrix); } var positions = this.attributes.position.value; var normals = this.attributes.normal.value; var tangents = this.attributes.tangent.value; matrix = matrix._array; var inverseTransposeMatrix = mat4.create(); mat4.invert(inverseTransposeMatrix, matrix); mat4.transpose(inverseTransposeMatrix, inverseTransposeMatrix); vec3.forEach(positions, 3, 0, null, vec3.transformMat4, matrix); if (normals) { vec3.forEach(normals, 3, 0, null, vec3.transformMat4, inverseTransposeMatrix); } if (tangents) { vec3.forEach(tangents, 4, 0, null, vec3.transformMat4, inverseTransposeMatrix); } }, dispose: function (_gl) { this._cache.use(_gl.__GLID__); var chunks = this._cache.get('chunks'); if (chunks) { for (var c = 0; c < chunks.length; c++) { var chunk = chunks[c]; for (var k = 0; k < chunk.attributeBuffers.length; k++) { var attribs = chunk.attributeBuffers[k]; _gl.deleteBuffer(attribs.buffer); } } } this._cache.deleteContext(_gl.__GLID__); } }); return StaticGeometry; });define('qtek/core/LinkedList', ['require'], function (require) { 'use strict'; var LinkedList = function () { this.head = null; this.tail = null; this._length = 0; }; LinkedList.prototype.insert = function (val) { var entry = new LinkedList.Entry(val); this.insertEntry(entry); return entry; }; LinkedList.prototype.insertAt = function (idx, val) { if (idx < 0) { return; } var next = this.head; var cursor = 0; while (next && cursor != idx) { next = next.next; cursor++; } if (next) { var entry = new LinkedList.Entry(val); var prev = next.prev; prev.next = entry; entry.prev = prev; entry.next = next; next.prev = entry; } else { this.insert(val); } }; LinkedList.prototype.insertEntry = function (entry) { if (!this.head) { this.head = this.tail = entry; } else { this.tail.next = entry; entry.prev = this.tail; this.tail = entry; } this._length++; }; LinkedList.prototype.remove = function (entry) { var prev = entry.prev; var next = entry.next; if (prev) { prev.next = next; } else { this.head = next; } if (next) { next.prev = prev; } else { this.tail = prev; } entry.next = entry.prev = null; this._length--; }; LinkedList.prototype.removeAt = function (idx) { if (idx < 0) { return; } var curr = this.head; var cursor = 0; while (curr && cursor != idx) { curr = curr.next; cursor++; } if (curr) { this.remove(curr); return curr.value; } }; LinkedList.prototype.getHead = function () { if (this.head) { return this.head.value; } }; LinkedList.prototype.getTail = function () { if (this.tail) { return this.tail.value; } }; LinkedList.prototype.getAt = function (idx) { if (idx < 0) { return; } var curr = this.head; var cursor = 0; while (curr && cursor != idx) { curr = curr.next; cursor++; } return curr.value; }; LinkedList.prototype.indexOf = function (value) { var curr = this.head; var cursor = 0; while (curr) { if (curr.value === value) { return cursor; } curr = curr.next; cursor++; } }; LinkedList.prototype.length = function () { return this._length; }; LinkedList.prototype.isEmpty = function () { return this._length === 0; }; LinkedList.prototype.forEach = function (cb, context) { var curr = this.head; var idx = 0; var haveContext = typeof context != 'undefined'; while (curr) { if (haveContext) { cb.call(context, curr.value, idx); } else { cb(curr.value, idx); } curr = curr.next; idx++; } }; LinkedList.prototype.clear = function () { this.tail = this.head = null; this._length = 0; }; LinkedList.Entry = function (val) { this.value = val; this.next = null; this.prev = null; }; return LinkedList; });define('echarts-x/entity/marker/Base', ['require'], function (require) { var MarkerBase = function (chart) { this.chart = chart; }; MarkerBase.prototype.setSeries = function (series, seriesIndex) { }; MarkerBase.prototype.clear = function () { }; MarkerBase.prototype.onframe = function (deltaTime) { }; MarkerBase.prototype.getSceneNode = function () { }; MarkerBase.prototype.dispose = function () { var renderer = this.chart.baseLayer.renderer; renderer.dispose(this.getSceneNode(), true, true); }; return MarkerBase; });define('echarts-x/util/geometry/Lines', [ 'require', 'qtek/DynamicGeometry', 'qtek/Geometry', 'qtek/math/Vector3', 'qtek/dep/glmatrix' ], function (require) { var DynamicGeometry = require('qtek/DynamicGeometry'); var Geometry = require('qtek/Geometry'); var Vector3 = require('qtek/math/Vector3'); var vec3 = require('qtek/dep/glmatrix').vec3; var LinesGeometry = DynamicGeometry.derive(function () { return { attributes: { position: new Geometry.Attribute('position', 'float', 3, 'POSITION', true), color: new Geometry.Attribute('color', 'float', 4, 'COLOR', true) } }; }, { clearLines: function () { this.attributes.position.value.length = 0; this.attributes.color.value.length = 0; }, addLine: function (p0, p1, color) { this.attributes.position.value.push(p0._array, p1._array); this.attributes.color.value.push(color, color); }, addCubicCurve: function (p0, p1, p2, p3, color) { p0 = p0._array; p1 = p1._array; p2 = p2._array; p3 = p3._array; var x0 = p0[0], y0 = p0[1], z0 = p0[2]; var x1 = p1[0], y1 = p1[1], z1 = p1[2]; var x2 = p2[0], y2 = p2[1], z2 = p2[2]; var x3 = p3[0], y3 = p3[1], z3 = p3[2]; var len = vec3.dist(p0, p1) + vec3.len(p2, p1) + vec3.len(p3, p2); var step = 1 / (len + 1) * 15; var step2 = step * step; var step3 = step2 * step; var pre1 = 3 * step; var pre2 = 3 * step2; var pre4 = 6 * step2; var pre5 = 6 * step3; var tmp1x = x0 - x1 * 2 + x2; var tmp1y = y0 - y1 * 2 + y2; var tmp1z = z0 - z1 * 2 + z2; var tmp2x = (x1 - x2) * 3 - x0 + x3; var tmp2y = (y1 - y2) * 3 - y0 + y3; var tmp2z = (z1 - z2) * 3 - z0 + z3; var fx = x0; var fy = y0; var fz = z0; var dfx = (x1 - x0) * pre1 + tmp1x * pre2 + tmp2x * step3; var dfy = (y1 - y0) * pre1 + tmp1y * pre2 + tmp2y * step3; var dfz = (z1 - z0) * pre1 + tmp1z * pre2 + tmp2z * step3; var ddfx = tmp1x * pre4 + tmp2x * pre5; var ddfy = tmp1y * pre4 + tmp2y * pre5; var ddfz = tmp1z * pre4 + tmp2z * pre5; var dddfx = tmp2x * pre5; var dddfy = tmp2y * pre5; var dddfz = tmp2z * pre5; var positionArr = this.attributes.position.value; var colorArr = this.attributes.color.value; var offset = positionArr.length; var len = 0; var t = 0; while (t < 1 + step) { if (len > 1) { positionArr.push(positionArr[offset + len - 1]); colorArr.push(colorArr[offset + len - 1]); len++; } positionArr.push(vec3.fromValues(fx, fy, fz)); colorArr.push(color); len++; fx += dfx; fy += dfy; fz += dfz; dfx += ddfx; dfy += ddfy; dfz += ddfz; ddfx += dddfx; ddfy += dddfy; ddfz += dddfz; t += step; } } }); return LinesGeometry; });define('echarts-x/util/geometry/CurveAnimatingPoints', [ 'require', 'qtek/DynamicGeometry', 'qtek/Geometry' ], function (require) { var DynamicGeometry = require('qtek/DynamicGeometry'); var Geometry = require('qtek/Geometry'); var Attribute = Geometry.Attribute; var CurveAnimatingPoints = DynamicGeometry.derive(function () { return { attributes: { p0: new Attribute('p0', 'float', 3, '', true), p1: new Attribute('p1', 'float', 3, '', true), p2: new Attribute('p2', 'float', 3, '', true), p3: new Attribute('p3', 'float', 3, '', true), offset: new Attribute('offset', 'float', 1, '', true), size: new Attribute('size', 'float', 1, '', true), color: new Attribute('color', 'float', 4, 'COLOR', true) }, mainAttribute: 'p0' }; }, { clearPoints: function () { var attributes = this.attributes; attributes.p0.value.length = 0; attributes.p1.value.length = 0; attributes.p2.value.length = 0; attributes.p3.value.length = 0; attributes.offset.value.length = 0; attributes.size.value.length = 0; attributes.color.value.length = 0; }, addPoint: function (p0, p1, p2, p3, color) { var attributes = this.attributes; var offset = Math.random(); for (var i = 0; i < 15; i++) { attributes.p0.value.push(p0._array); attributes.p1.value.push(p1._array); attributes.p2.value.push(p2._array); attributes.p3.value.push(p3._array); attributes.offset.value.push(offset); attributes.size.value.push(i / 15); attributes.color.value.push(color); offset += 0.004; } } }); return CurveAnimatingPoints; });define('echarts-x/util/geometry/Bars', [ 'require', 'qtek/DynamicGeometry', 'qtek/Geometry', 'qtek/geometry/Cube', 'qtek/math/Matrix4', 'qtek/math/Vector3', 'qtek/dep/glmatrix' ], function (require) { var DynamicGeometry = require('qtek/DynamicGeometry'); var Geometry = require('qtek/Geometry'); var CubeGeometry = require('qtek/geometry/Cube'); var Matrix4 = require('qtek/math/Matrix4'); var Vector3 = require('qtek/math/Vector3'); var glMatrix = require('qtek/dep/glmatrix'); var vec3 = glMatrix.vec3; var cubePositions = [ [ -1, -1, 0 ], [ 1, -1, 0 ], [ 1, 1, 0 ], [ -1, 1, 0 ], [ -1, -1, -2 ], [ 1, -1, -2 ], [ 1, 1, -2 ], [ -1, 1, -2 ] ]; var cubeFaces = [ [ 1, 5, 6 ], [ 1, 6, 2 ], [ 0, 3, 7 ], [ 0, 7, 4 ], [ 3, 2, 7 ], [ 2, 6, 7 ], [ 1, 4, 5 ], [ 1, 0, 4 ], [ 4, 6, 5 ], [ 4, 7, 6 ] ]; var BarsGeometry = DynamicGeometry.derive(function () { return { _barMat: new Matrix4(), _barScaleVec: new Vector3() }; }, { clearBars: function () { this.attributes.position.value.length = 0; this.attributes.color.value.length = 0; this.faces.length = 0; }, addBar: function (start, end, size, color) { var cubeGeo = this._cubeGeometry; var barMat = this._barMat; var scaleVec = this._barScaleVec; var height = Vector3.dist(start, end); if (height <= 0) { return; } Vector3.set(scaleVec, size * 0.5, size * 0.5, height * 0.5); Matrix4.identity(barMat); Matrix4.lookAt(barMat, start, end, Vector3.UP); Matrix4.invert(barMat, barMat); Matrix4.scale(barMat, barMat, scaleVec); var nVertexBase = this.getVertexNumber(); for (var i = 0; i < cubeFaces.length; i++) { var face = vec3.clone(cubeFaces[i]); face[0] += nVertexBase; face[1] += nVertexBase; face[2] += nVertexBase; this.faces.push(face); } for (var i = 0; i < cubePositions.length; i++) { var pos = vec3.clone(cubePositions[i]); vec3.transformMat4(pos, pos, barMat._array); this.attributes.position.value.push(pos); this.attributes.color.value.push(color); } } }); return BarsGeometry; });define('qtek/geometry/Cube', [ 'require', '../DynamicGeometry', './Plane', '../math/Matrix4', '../math/Vector3', '../math/BoundingBox' ], function (require) { 'use strict'; var DynamicGeometry = require('../DynamicGeometry'); var Plane = require('./Plane'); var Matrix4 = require('../math/Matrix4'); var Vector3 = require('../math/Vector3'); var BoundingBox = require('../math/BoundingBox'); var planeMatrix = new Matrix4(); var Cube = DynamicGeometry.derive({ widthSegments: 1, heightSegments: 1, depthSegments: 1, inside: false }, function () { this.build(); }, { build: function () { this.faces.length = 0; this.attributes.position.value.length = 0; this.attributes.texcoord0.value.length = 0; this.attributes.normal.value.length = 0; var planes = { 'px': createPlane('px', this.depthSegments, this.heightSegments), 'nx': createPlane('nx', this.depthSegments, this.heightSegments), 'py': createPlane('py', this.widthSegments, this.depthSegments), 'ny': createPlane('ny', this.widthSegments, this.depthSegments), 'pz': createPlane('pz', this.widthSegments, this.heightSegments), 'nz': createPlane('nz', this.widthSegments, this.heightSegments) }; var cursor = 0; var attrList = [ 'position', 'texcoord0', 'normal' ]; for (var pos in planes) { for (var k = 0; k < attrList.length; k++) { var attrName = attrList[k]; var attrArray = planes[pos].attributes[attrName].value; for (var i = 0; i < attrArray.length; i++) { var value = attrArray[i]; if (this.inside && attrName === 'normal') { value[0] = -value[0]; value[1] = -value[1]; value[2] = -value[2]; } this.attributes[attrName].value.push(value); } } var plane = planes[pos]; for (var i = 0; i < plane.faces.length; i++) { var face = plane.faces[i]; this.faces.push([ face[0] + cursor, face[1] + cursor, face[2] + cursor ]); } cursor += planes[pos].getVertexNumber(); } this.boundingBox = new BoundingBox(); this.boundingBox.max.set(1, 1, 1); this.boundingBox.min.set(-1, -1, -1); } }); function createPlane(pos, widthSegments, heightSegments) { planeMatrix.identity(); var plane = new Plane({ widthSegments: widthSegments, heightSegments: heightSegments }); switch (pos) { case 'px': Matrix4.translate(planeMatrix, planeMatrix, Vector3.POSITIVE_X); Matrix4.rotateY(planeMatrix, planeMatrix, Math.PI / 2); break; case 'nx': Matrix4.translate(planeMatrix, planeMatrix, Vector3.NEGATIVE_X); Matrix4.rotateY(planeMatrix, planeMatrix, -Math.PI / 2); break; case 'py': Matrix4.translate(planeMatrix, planeMatrix, Vector3.POSITIVE_Y); Matrix4.rotateX(planeMatrix, planeMatrix, -Math.PI / 2); break; case 'ny': Matrix4.translate(planeMatrix, planeMatrix, Vector3.NEGATIVE_Y); Matrix4.rotateX(planeMatrix, planeMatrix, Math.PI / 2); break; case 'pz': Matrix4.translate(planeMatrix, planeMatrix, Vector3.POSITIVE_Z); break; case 'nz': Matrix4.translate(planeMatrix, planeMatrix, Vector3.NEGATIVE_Z); Matrix4.rotateY(planeMatrix, planeMatrix, Math.PI); break; } plane.applyTransform(planeMatrix); return plane; } return Cube; });define('qtek/geometry/Plane', [ 'require', '../DynamicGeometry', '../math/BoundingBox' ], function (require) { 'use strict'; var DynamicGeometry = require('../DynamicGeometry'); var BoundingBox = require('../math/BoundingBox'); var Plane = DynamicGeometry.derive({ widthSegments: 1, heightSegments: 1 }, function () { this.build(); }, { build: function () { var heightSegments = this.heightSegments; var widthSegments = this.widthSegments; var positions = this.attributes.position.value; var texcoords = this.attributes.texcoord0.value; var normals = this.attributes.normal.value; var faces = this.faces; positions.length = 0; texcoords.length = 0; normals.length = 0; faces.length = 0; for (var y = 0; y <= heightSegments; y++) { var t = y / heightSegments; for (var x = 0; x <= widthSegments; x++) { var s = x / widthSegments; positions.push([ 2 * s - 1, 2 * t - 1, 0 ]); if (texcoords) { texcoords.push([ s, t ]); } if (normals) { normals.push([ 0, 0, 1 ]); } if (x < widthSegments && y < heightSegments) { var i = x + y * (widthSegments + 1); faces.push([ i, i + 1, i + widthSegments + 1 ]); faces.push([ i + widthSegments + 1, i + 1, i + widthSegments + 2 ]); } } } this.boundingBox = new BoundingBox(); this.boundingBox.min.set(-1, -1, 0); this.boundingBox.max.set(1, 1, 0); } }); return Plane; });define('echarts-x/surface/TextureAtlasSurface', [ 'require', 'qtek/Texture2D', './ZRenderSurface' ], function (require) { var Texture2D = require('qtek/Texture2D'); var ZRenderSurface = require('./ZRenderSurface'); var TextureAtlasSurface = function (zr, width, height) { this.zr = zr; this._x = 0; this._y = 0; this._width = width || 1024; this._height = height || 1024; this._rowHeight = 0; this._coords = {}; this._zrenderSurface = new ZRenderSurface(width, height); this._zrenderSurface.onrefresh = function () { zr.refreshNextFrame(); }; }; TextureAtlasSurface.prototype = { clear: function () { this._x = 0; this._y = 0; this._rowHeight = 0; this._zrenderSurface.clearElements(); this._coords = {}; }, getWidth: function () { return this._width; }, getHeight: function () { return this._height; }, getTexture: function () { return this._zrenderSurface.getTexture(); }, resize: function (width, height) { this._zrenderSurface.resize(width, height); }, addShape: function (shape, width, height) { this._fitShape(shape, width, height); var x = this._x; var y = this._y; if (x + width > this._width && y + this._rowHeight > this._height) { return null; } if (x + width > this._width) { x = this._x = 0; y += this._rowHeight; this._y = y; this._rowHeight = 0; } this._x += width; this._rowHeight = Math.max(this._rowHeight, height); shape.position[0] += x; shape.position[1] += y; this._zrenderSurface.addElement(shape); var coords = [ [ x / this._width, y / this._height ], [ (x + width) / this._width, (y + height) / this._height ] ]; this._coords[shape.id] = coords; return coords; }, refresh: function () { this._zrenderSurface.refresh(); }, _fitShape: function (shape, width, height) { var rect = shape.getRect(shape.style); var lineWidth = shape.style.lineWidth || 0; var shadowBlur = shape.style.shadowBlur || 0; var margin = lineWidth + shadowBlur; rect.x -= margin; rect.y -= margin; rect.width += margin * 2; rect.height += margin * 2; var scaleX = width / rect.width; var scaleY = height / rect.height; var x = rect.x; var y = rect.y; shape.position = [ -rect.x * scaleX, -rect.y * scaleY ]; shape.scale = [ scaleX, scaleY ]; shape.updateTransform(); }, getImageCoords: function (id) { return this._coords[id]; } }; return TextureAtlasSurface; });define('echarts-x/util/geometry/Sprites', [ 'require', 'qtek/DynamicGeometry', 'qtek/math/Matrix4', 'qtek/math/Vector3', 'qtek/dep/glmatrix' ], function (require) { var DynamicGeometry = require('qtek/DynamicGeometry'); var Matrix4 = require('qtek/math/Matrix4'); var Vector3 = require('qtek/math/Vector3'); var vec3 = require('qtek/dep/glmatrix').vec3; var vec2 = require('qtek/dep/glmatrix').vec2; var squarePositions = [ [ -1, -1, 0 ], [ 1, -1, 0 ], [ 1, 1, 0 ], [ -1, 1, 0 ] ]; var squareTexcoords = [ [ 0, 0 ], [ 1, 0 ], [ 1, 1 ], [ 0, 1 ] ]; var squareFaces = [ [ 0, 1, 2 ], [ 0, 2, 3 ] ]; var SpritesGeometry = DynamicGeometry.derive({}, { clearSprites: function () { var attributes = this.attributes; attributes.position.value.length = 0; attributes.texcoord0.value.length = 0; }, addSprite: function (matrix, coords) { var nVertexBase = this.getVertexNumber(); for (var i = 0; i < squareFaces.length; i++) { var face = Array.prototype.slice.call(squareFaces[i]); face[0] += nVertexBase; face[1] += nVertexBase; face[2] += nVertexBase; this.faces.push(face); } for (var i = 0; i < squarePositions.length; i++) { var pos = vec3.clone(squarePositions[i]); vec3.transformMat4(pos, pos, matrix._array); this.attributes.position.value.push(pos); } var texcoord0 = this.attributes.texcoord0.value; var create = vec2.fromValues; texcoord0.push(create(coords[0][0], coords[1][1])); texcoord0.push(create(coords[1][0], coords[1][1])); texcoord0.push(create(coords[1][0], coords[0][1])); texcoord0.push(create(coords[0][0], coords[0][1])); } }); return SpritesGeometry; });define('echarts-x/util/sprite', ['require'], function (require) { function makeSprite(size, inCanvas, draw) { var canvas = inCanvas || document.createElement('canvas'); canvas.width = size; canvas.height = size; var ctx = canvas.getContext('2d'); draw && draw(ctx); return canvas; } var spriteUtil = { makeSpriteFromShape: function (size, shape, inCanvas) { var rect = shape.getRect(shape.style); var lineWidth = shape.style.lineWidth || 0; var shadowBlur = shape.style.shadowBlur || 0; var margin = lineWidth + shadowBlur; rect.x -= margin; rect.y -= margin; rect.width += margin * 2; rect.height += margin * 2; var scaleX = size / rect.width; var scaleY = size / rect.height; var x = rect.x; var y = rect.y; shape.position = [ -rect.x * scaleX, -rect.y * scaleY ]; shape.scale = [ scaleX, scaleY ]; shape.updateTransform(); return makeSprite(size, inCanvas, function (ctx) { shape.brush(ctx); }); }, makeSimpleSprite: function (size, inCanvas) { return makeSprite(size, inCanvas, function (ctx) { var halfSize = size / 2; ctx.beginPath(); ctx.arc(halfSize, halfSize, 60, 0, Math.PI * 2, false); ctx.closePath(); var gradient = ctx.createRadialGradient(halfSize, halfSize, 0, halfSize, halfSize, halfSize); gradient.addColorStop(0, 'rgba(255, 255, 255, 1)'); gradient.addColorStop(0.5, 'rgba(255, 255, 255, 0.5)'); gradient.addColorStop(1, 'rgba(255, 255, 255, 0)'); ctx.fillStyle = gradient; ctx.fill(); }); } }; return spriteUtil; });define('echarts-x/util/geometry/Points', [ 'require', 'qtek/DynamicGeometry', 'qtek/Geometry' ], function (require) { var DynamicGeometry = require('qtek/DynamicGeometry'); var Geometry = require('qtek/Geometry'); var PointsGeometry = DynamicGeometry.derive(function () { return { attributes: { position: new Geometry.Attribute('position', 'float', 3, 'POSITION', true), size: new Geometry.Attribute('size', 'float', 1, '', true), color: new Geometry.Attribute('color', 'float', 4, 'COLOR', true) } }; }, { clearPoints: function () { var attributes = this.attributes; attributes.position.value.length = 0; attributes.color.value.length = 0; attributes.size.value.length = 0; }, addPoint: function (position, color, size) { var attributes = this.attributes; attributes.position.value.push(position._array); attributes.color.value.push(color); attributes.size.value.push(size); } }); return PointsGeometry; });define('echarts-x/util/geometry/AnimatingPoints', [ 'require', 'qtek/DynamicGeometry', 'qtek/Geometry' ], function (require) { var DynamicGeometry = require('qtek/DynamicGeometry'); var Geometry = require('qtek/Geometry'); var AnimatingPointsGeometry = DynamicGeometry.derive(function () { return { attributes: { position: new Geometry.Attribute('position', 'float', 3, 'POSITION', true), size: new Geometry.Attribute('size', 'float', 1, '', true), delay: new Geometry.Attribute('delay', 'float', 1, '', true), color: new Geometry.Attribute('color', 'float', 4, 'COLOR', true) } }; }, { clearPoints: function () { var attributes = this.attributes; attributes.position.value.length = 0; attributes.color.value.length = 0; attributes.size.value.length = 0; attributes.delay.value.length = 0; }, addPoint: function (position, color, size, delayTime) { var attributes = this.attributes; attributes.position.value.push(position._array); attributes.color.value.push(color); attributes.size.value.push(size); attributes.delay.value.push(delayTime); } }); return AnimatingPointsGeometry; });define('qtek/compositor/Pass', [ 'require', '../core/Base', '../camera/Orthographic', '../geometry/Plane', '../Shader', '../Material', '../Mesh', '../core/glinfo', '../core/glenum', '../shader/source/compositor/vertex.essl' ], function (require) { 'use strict'; var Base = require('../core/Base'); var OrthoCamera = require('../camera/Orthographic'); var Plane = require('../geometry/Plane'); var Shader = require('../Shader'); var Material = require('../Material'); var Mesh = require('../Mesh'); var glinfo = require('../core/glinfo'); var glenum = require('../core/glenum'); Shader['import'](require('../shader/source/compositor/vertex.essl')); var planeGeo = new Plane(); var mesh = new Mesh({ geometry: planeGeo }); var camera = new OrthoCamera(); var Pass = Base.derive(function () { return { fragment: '', outputs: null, material: null }; }, function () { var shader = new Shader({ vertex: Shader.source('buildin.compositor.vertex'), fragment: this.fragment }); var material = new Material({ shader: shader }); shader.enableTexturesAll(); this.material = material; }, { setUniform: function (name, value) { var uniform = this.material.uniforms[name]; if (uniform) { uniform.value = value; } }, getUniform: function (name) { var uniform = this.material.uniforms[name]; if (uniform) { return uniform.value; } }, attachOutput: function (texture, attachment) { if (!this.outputs) { this.outputs = {}; } attachment = attachment || glenum.COLOR_ATTACHMENT0; this.outputs[attachment] = texture; }, detachOutput: function (texture) { for (var attachment in this.outputs) { if (this.outputs[attachment] === texture) { this.outputs[attachment] = null; } } }, bind: function (renderer, frameBuffer) { if (this.outputs) { for (var attachment in this.outputs) { var texture = this.outputs[attachment]; if (texture) { frameBuffer.attach(renderer.gl, texture, attachment); } } } if (frameBuffer) { frameBuffer.bind(renderer); } }, unbind: function (renderer, frameBuffer) { frameBuffer.unbind(renderer); }, render: function (renderer, frameBuffer) { var _gl = renderer.gl; mesh.material = this.material; if (frameBuffer) { this.bind(renderer, frameBuffer); var ext = glinfo.getExtension(_gl, 'EXT_draw_buffers'); if (ext && this.outputs) { var bufs = []; for (var attachment in this.outputs) { attachment = +attachment; if (attachment >= _gl.COLOR_ATTACHMENT0 && attachment <= _gl.COLOR_ATTACHMENT0 + 8) { bufs.push(attachment); } } ext.drawBuffersEXT(bufs); } } this.trigger('beforerender', this, renderer); _gl.disable(_gl.BLEND); _gl.clear(_gl.DEPTH_BUFFER_BIT); renderer.renderQueue([mesh], camera); this.trigger('afterrender', this, renderer); if (frameBuffer) { this.unbind(renderer, frameBuffer); } } }); return Pass; });define('qtek/FrameBuffer', [ 'require', './core/Base', './TextureCube', './core/glinfo', './core/glenum', './core/Cache' ], function (require) { 'use strict'; var Base = require('./core/Base'); var TextureCube = require('./TextureCube'); var glinfo = require('./core/glinfo'); var glenum = require('./core/glenum'); var Cache = require('./core/Cache'); var FrameBuffer = Base.derive({ depthBuffer: true, _attachedTextures: null, _width: 0, _height: 0, _depthTextureAttached: false, _renderBufferWidth: 0, _renderBufferHeight: 0, _binded: false }, function () { this._cache = new Cache(); this._attachedTextures = {}; }, { resize: function (width, height) { this._width = width; this._height = height; }, bind: function (renderer) { var _gl = renderer.gl; if (!this._binded) { _gl.bindFramebuffer(_gl.FRAMEBUFFER, this.getFrameBuffer(_gl)); this._binded = true; } this._cache.put('viewport', renderer.viewport); renderer.setViewport(0, 0, this._width, this._height, 1); if (this._cache.miss('renderbuffer') && this.depthBuffer && !this._depthTextureAttached) { this._cache.put('renderbuffer', _gl.createRenderbuffer()); } if (!this._depthTextureAttached && this.depthBuffer) { var width = this._width; var height = this._height; var renderbuffer = this._cache.get('renderbuffer'); if (width !== this._renderBufferWidth || height !== this._renderBufferHeight) { _gl.bindRenderbuffer(_gl.RENDERBUFFER, renderbuffer); _gl.renderbufferStorage(_gl.RENDERBUFFER, _gl.DEPTH_COMPONENT16, width, height); this._renderBufferWidth = width; this._renderBufferHeight = height; _gl.bindRenderbuffer(_gl.RENDERBUFFER, null); } if (!this._cache.get('renderbuffer_attached')) { _gl.framebufferRenderbuffer(_gl.FRAMEBUFFER, _gl.DEPTH_ATTACHMENT, _gl.RENDERBUFFER, renderbuffer); this._cache.put('renderbuffer_attached', true); } } }, unbind: function (renderer) { var _gl = renderer.gl; _gl.bindFramebuffer(_gl.FRAMEBUFFER, null); this._binded = false; this._cache.use(_gl.__GLID__); var viewport = this._cache.get('viewport'); if (viewport) { renderer.setViewport(viewport.x, viewport.y, viewport.width, viewport.height); } for (var attachment in this._attachedTextures) { var texture = this._attachedTextures[attachment]; if (!texture.NPOT && texture.useMipmap) { var target = texture instanceof TextureCube ? _gl.TEXTURE_CUBE_MAP : _gl.TEXTURE_2D; _gl.bindTexture(target, texture.getWebGLTexture(_gl)); _gl.generateMipmap(target); _gl.bindTexture(target, null); } } }, getFrameBuffer: function (_gl) { this._cache.use(_gl.__GLID__); if (this._cache.miss('framebuffer')) { this._cache.put('framebuffer', _gl.createFramebuffer()); } return this._cache.get('framebuffer'); }, attach: function (_gl, texture, attachment, target, mipmapLevel) { if (!texture.width) { throw new Error('The texture attached to color buffer is not a valid.'); } if (!this._binded) { _gl.bindFramebuffer(_gl.FRAMEBUFFER, this.getFrameBuffer(_gl)); this._binded = true; } this._width = texture.width; this._height = texture.height; attachment = attachment || _gl.COLOR_ATTACHMENT0; target = target || _gl.TEXTURE_2D; mipmapLevel = mipmapLevel || 0; if (attachment === _gl.DEPTH_ATTACHMENT) { var extension = glinfo.getExtension(_gl, 'WEBGL_depth_texture'); if (!extension) { console.error(' Depth texture is not supported by the browser '); return; } if (texture.format !== glenum.DEPTH_COMPONENT) { console.error('The texture attached to depth buffer is not a valid.'); return; } this._cache.put('renderbuffer_attached', false); this._depthTextureAttached = true; } this._attachedTextures[attachment] = texture; _gl.framebufferTexture2D(_gl.FRAMEBUFFER, attachment, target, texture.getWebGLTexture(_gl), mipmapLevel); }, detach: function () { }, dispose: function (_gl) { this._cache.use(_gl.__GLID__); var renderBuffer = this._cache.get('renderbuffer'); if (renderBuffer) { _gl.deleteRenderbuffer(renderBuffer); } var frameBuffer = this._cache.get('framebuffer'); if (frameBuffer) { _gl.deleteFramebuffer(frameBuffer); } this._cache.deleteContext(_gl.__GLID__); } }); FrameBuffer.COLOR_ATTACHMENT0 = glenum.COLOR_ATTACHMENT0; FrameBuffer.DEPTH_ATTACHMENT = glenum.DEPTH_ATTACHMENT; FrameBuffer.STENCIL_ATTACHMENT = glenum.STENCIL_ATTACHMENT; FrameBuffer.DEPTH_STENCIL_ATTACHMENT = glenum.DEPTH_STENCIL_ATTACHMENT; return FrameBuffer; });; define('qtek/shader/source/compositor/vertex.essl', function() { return '\n@export buildin.compositor.vertex\n\nuniform mat4 worldViewProjection : WORLDVIEWPROJECTION;\n\nattribute vec3 position : POSITION;\nattribute vec2 texcoord : TEXCOORD_0;\n\nvarying vec2 v_Texcoord;\n\nvoid main()\n{\n v_Texcoord = texcoord;\n gl_Position = worldViewProjection * vec4(position, 1.0);\n}\n\n@end'}); define('qtek/TextureCube', [ 'require', './Texture', './core/glinfo', './core/glenum', './core/util' ], function (require) { var Texture = require('./Texture'); var glinfo = require('./core/glinfo'); var glenum = require('./core/glenum'); var util = require('./core/util'); var targetMap = { 'px': 'TEXTURE_CUBE_MAP_POSITIVE_X', 'py': 'TEXTURE_CUBE_MAP_POSITIVE_Y', 'pz': 'TEXTURE_CUBE_MAP_POSITIVE_Z', 'nx': 'TEXTURE_CUBE_MAP_NEGATIVE_X', 'ny': 'TEXTURE_CUBE_MAP_NEGATIVE_Y', 'nz': 'TEXTURE_CUBE_MAP_NEGATIVE_Z' }; var TextureCube = Texture.derive(function () { return { image: { px: null, nx: null, py: null, ny: null, pz: null, nz: null }, pixels: { px: null, nx: null, py: null, ny: null, pz: null, nz: null } }; }, { update: function (_gl) { _gl.bindTexture(_gl.TEXTURE_CUBE_MAP, this._cache.get('webgl_texture')); this.beforeUpdate(_gl); var glFormat = this.format; var glType = this.type; _gl.texParameteri(_gl.TEXTURE_CUBE_MAP, _gl.TEXTURE_WRAP_S, this.wrapS); _gl.texParameteri(_gl.TEXTURE_CUBE_MAP, _gl.TEXTURE_WRAP_T, this.wrapT); _gl.texParameteri(_gl.TEXTURE_CUBE_MAP, _gl.TEXTURE_MAG_FILTER, this.magFilter); _gl.texParameteri(_gl.TEXTURE_CUBE_MAP, _gl.TEXTURE_MIN_FILTER, this.minFilter); var anisotropicExt = glinfo.getExtension(_gl, 'EXT_texture_filter_anisotropic'); if (anisotropicExt && this.anisotropic > 1) { _gl.texParameterf(_gl.TEXTURE_CUBE_MAP, anisotropicExt.TEXTURE_MAX_ANISOTROPY_EXT, this.anisotropic); } if (glType === 36193) { var halfFloatExt = glinfo.getExtension(_gl, 'OES_texture_half_float'); if (!halfFloatExt) { glType = glenum.FLOAT; } } for (var target in this.image) { var img = this.image[target]; if (img) { _gl.texImage2D(_gl[targetMap[target]], 0, glFormat, glFormat, glType, img); } else { _gl.texImage2D(_gl[targetMap[target]], 0, glFormat, this.width, this.height, 0, glFormat, glType, this.pixels[target]); } } if (!this.NPOT && this.useMipmap) { _gl.generateMipmap(_gl.TEXTURE_CUBE_MAP); } _gl.bindTexture(_gl.TEXTURE_CUBE_MAP, null); }, generateMipmap: function (_gl) { _gl.bindTexture(_gl.TEXTURE_CUBE_MAP, this._cache.get('webgl_texture')); _gl.generateMipmap(_gl.TEXTURE_CUBE_MAP); }, bind: function (_gl) { _gl.bindTexture(_gl.TEXTURE_CUBE_MAP, this.getWebGLTexture(_gl)); }, unbind: function (_gl) { _gl.bindTexture(_gl.TEXTURE_CUBE_MAP, null); }, isPowerOfTwo: function () { if (this.image.px) { return isPowerOfTwo(this.image.px.width) && isPowerOfTwo(this.image.px.height); } else { return isPowerOfTwo(this.width) && isPowerOfTwo(this.height); } function isPowerOfTwo(value) { return value & value - 1 === 0; } }, isRenderable: function () { if (this.image.px) { return isImageRenderable(this.image.px) && isImageRenderable(this.image.nx) && isImageRenderable(this.image.py) && isImageRenderable(this.image.ny) && isImageRenderable(this.image.pz) && isImageRenderable(this.image.nz); } else { return this.width && this.height; } }, load: function (imageList) { var loading = 0; var self = this; util.each(imageList, function (src, target) { var image = new Image(); image.onload = function () { loading--; if (loading === 0) { self.dirty(); self.trigger('success', self); } image.onload = null; }; image.onerror = function () { loading--; image.onerror = null; }; loading++; image.src = src; self.image[target] = image; }); return this; } }); function isImageRenderable(image) { return image.nodeName === 'CANVAS' || image.complete; } return TextureCube; });define('zrender/shape/ShapeBundle', [ 'require', './Base', '../tool/util' ], function (require) { var Base = require('./Base'); var ShapeBundle = function (options) { Base.call(this, options); }; ShapeBundle.prototype = { constructor: ShapeBundle, type: 'shape-bundle', brush: function (ctx, isHighlight) { var style = this.beforeBrush(ctx, isHighlight); ctx.beginPath(); for (var i = 0; i < style.shapeList.length; i++) { var subShape = style.shapeList[i]; var subShapeStyle = subShape.style; if (isHighlight) { subShapeStyle = subShape.getHighlightStyle(subShapeStyle, subShape.highlightStyle || {}, subShape.brushTypeOnly); } subShape.buildPath(ctx, subShapeStyle); } switch (style.brushType) { case 'both': ctx.fill(); case 'stroke': style.lineWidth > 0 && ctx.stroke(); break; default: ctx.fill(); } this.drawText(ctx, style, this.style); this.afterBrush(ctx); }, getRect: function (style) { if (style.__rect) { return style.__rect; } var minX = Number.MAX_VALUE; var maxX = Number.MIN_VALUE; var minY = Number.MAX_VALUE; var maxY = Number.MIN_VALUE; for (var i = 0; i < style.shapeList.length; i++) { var subShape = style.shapeList[i]; var subRect = subShape.getRect(subShape.style); var minX = Math.min(subRect.x, minX); var minY = Math.min(subRect.y, minY); var maxX = Math.max(subRect.x + subRect.width, maxX); var maxY = Math.max(subRect.y + subRect.height, maxY); } style.__rect = { x: minX, y: minY, width: maxX - minX, height: maxY - minY }; return style.__rect; }, isCover: function (x, y) { var originPos = this.getTansform(x, y); x = originPos[0]; y = originPos[1]; var rect = this.style.__rect; if (!rect) { rect = this.getRect(this.style); } if (x >= rect.x && x <= rect.x + rect.width && y >= rect.y && y <= rect.y + rect.height) { for (var i = 0; i < this.style.shapeList.length; i++) { var subShape = this.style.shapeList[i]; if (subShape.isCover(x, y)) { return true; } } } return false; } }; require('../tool/util').inherits(ShapeBundle, Base); return ShapeBundle; });
Map3D
list.js
import { queryFakeList,getDecorationList } from '../services/api'; export default { namespace: 'list', state: { list: [], arr : [] }, effects: { *fetch({ payload }, { call, put }) { const response = yield call(queryFakeList, payload); yield put({ type: 'queryList', payload: Array.isArray(response) ? response : [], }); }, *appendFetch({ payload }, { call, put }) { const response = yield call(queryFakeList, payload); yield put({ type: 'appendList', payload: Array.isArray(response) ? response : [], }); }, *getList({ payload }, { call, put }) { const response = yield call(getDecorationList, payload); const list = response.data.list yield put({ type: 'queryList',
}); }, }, reducers: { queryList(state, action) { return { ...state, list: action.payload, }; }, appendList(state, action) { return { ...state, list: state.list.concat(action.payload), }; }, }, };
payload: Array.isArray(list) ? list : [],
ident.go
package flect import ( "encoding" "regexp" "strings" "unicode" "unicode/utf8" ) // Ident represents the string and it's parts type Ident struct { Original string Parts []string } // String implements fmt.Stringer and returns the original string func (i Ident) String() string { return i.Original } // New creates a new Ident from the string func
(s string) Ident { i := Ident{ Original: s, Parts: toParts(s), } return i } var splitRx = regexp.MustCompile("[^\\p{L}]") func toParts(s string) []string { parts := []string{} s = strings.TrimSpace(s) if len(s) == 0 { return parts } if _, ok := baseAcronyms[strings.ToUpper(s)]; ok { return []string{strings.ToUpper(s)} } var prev rune var x string for _, c := range s { cs := string(c) // fmt.Println("### cs ->", cs) // fmt.Println("### unicode.IsControl(c) ->", unicode.IsControl(c)) // fmt.Println("### unicode.IsDigit(c) ->", unicode.IsDigit(c)) // fmt.Println("### unicode.IsGraphic(c) ->", unicode.IsGraphic(c)) // fmt.Println("### unicode.IsLetter(c) ->", unicode.IsLetter(c)) // fmt.Println("### unicode.IsLower(c) ->", unicode.IsLower(c)) // fmt.Println("### unicode.IsMark(c) ->", unicode.IsMark(c)) // fmt.Println("### unicode.IsPrint(c) ->", unicode.IsPrint(c)) // fmt.Println("### unicode.IsPunct(c) ->", unicode.IsPunct(c)) // fmt.Println("### unicode.IsSpace(c) ->", unicode.IsSpace(c)) // fmt.Println("### unicode.IsTitle(c) ->", unicode.IsTitle(c)) // fmt.Println("### unicode.IsUpper(c) ->", unicode.IsUpper(c)) if !utf8.ValidRune(c) { continue } if isSpace(c) { parts = xappend(parts, x) x = cs prev = c continue } if unicode.IsUpper(c) && !unicode.IsUpper(prev) { parts = xappend(parts, x) x = cs prev = c continue } if unicode.IsLetter(c) || unicode.IsDigit(c) || unicode.IsPunct(c) || c == '`' { prev = c x += cs continue } parts = xappend(parts, x) x = "" prev = c } parts = xappend(parts, x) return parts } var _ encoding.TextUnmarshaler = &Ident{} var _ encoding.TextMarshaler = &Ident{} func (i *Ident) UnmarshalText(data []byte) error { (*i) = New(string(data)) return nil } func (i Ident) MarshalText() ([]byte, error) { return []byte(i.Original), nil }
New
mtdev.py
''' Native support for Multitouch devices on Linux, using libmtdev. =============================================================== The Mtdev project is a part of the Ubuntu Maverick multitouch architecture. You can read more on http://wiki.ubuntu.com/Multitouch To configure MTDev, it's preferable to use probesysfs providers.
[input] # devicename = hidinput,/dev/input/eventXX acert230h = mtdev,/dev/input/event2 .. note:: You must have read access to the input event. You can use a custom range for the X, Y and pressure values. On some drivers, the range reported is invalid. To fix that, you can add these options to the argument line: * invert_x : 1 to invert X axis * invert_y : 1 to invert Y axis * min_position_x : X minimum * max_position_x : X maximum * min_position_y : Y minimum * max_position_y : Y maximum * min_pressure : pressure minimum * max_pressure : pressure maximum * min_touch_major : width shape minimum * max_touch_major : width shape maximum * min_touch_minor : width shape minimum * max_touch_minor : height shape maximum * rotation : 0,90,180 or 270 to rotate ''' __all__ = ('MTDMotionEventProvider', 'MTDMotionEvent') import os from kivy.input.motionevent import MotionEvent from kivy.input.shape import ShapeRect class MTDMotionEvent(MotionEvent): def depack(self, args): self.is_touch = True if 'x' in args: self.sx = args['x'] else: self.sx = -1 if 'y' in args: self.sy = args['y'] else: self.sy = -1 self.profile = ['pos'] if 'size_w' in args and 'size_h' in args: self.shape = ShapeRect() self.shape.width = args['size_w'] self.shape.height = args['size_h'] self.profile.append('shape') if 'pressure' in args: self.pressure = args['pressure'] self.profile.append('pressure') super(MTDMotionEvent, self).depack(args) def __str__(self): i, sx, sy, d = (self.id, self.sx, self.sy, self.device) return '<MTDMotionEvent id=%d pos=(%f, %f) device=%s>' % (i, sx, sy, d) if 'KIVY_DOC' in os.environ: # documentation hack MTDMotionEventProvider = None else: import threading import collections from kivy.lib.mtdev import Device, \ MTDEV_TYPE_EV_ABS, MTDEV_CODE_SLOT, MTDEV_CODE_POSITION_X, \ MTDEV_CODE_POSITION_Y, MTDEV_CODE_PRESSURE, \ MTDEV_CODE_TOUCH_MAJOR, MTDEV_CODE_TOUCH_MINOR, \ MTDEV_CODE_TRACKING_ID, MTDEV_ABS_POSITION_X, \ MTDEV_ABS_POSITION_Y, MTDEV_ABS_TOUCH_MINOR, \ MTDEV_ABS_TOUCH_MAJOR from kivy.input.provider import MotionEventProvider from kivy.input.factory import MotionEventFactory from kivy.logger import Logger class MTDMotionEventProvider(MotionEventProvider): options = ('min_position_x', 'max_position_x', 'min_position_y', 'max_position_y', 'min_pressure', 'max_pressure', 'min_touch_major', 'max_touch_major', 'min_touch_minor', 'max_touch_minor', 'invert_x', 'invert_y', 'rotation') def __init__(self, device, args): super(MTDMotionEventProvider, self).__init__(device, args) self._device = None self.input_fn = None self.default_ranges = dict() # split arguments args = args.split(',') if not args: Logger.error('MTD: No filename pass to MTD configuration') Logger.error('MTD: Use /dev/input/event0 for example') return # read filename self.input_fn = args[0] Logger.info('MTD: Read event from <%s>' % self.input_fn) # read parameters for arg in args[1:]: if arg == '': continue arg = arg.split('=') # ensure it's a key = value if len(arg) != 2: err = 'MTD: Bad parameter %s: Not in key=value format' %\ arg Logger.error(err) continue # ensure the key exist key, value = arg if key not in MTDMotionEventProvider.options: Logger.error('MTD: unknown %s option' % key) continue # ensure the value try: self.default_ranges[key] = int(value) except ValueError: err = 'MTD: invalid value %s for option %s' % (key, value) Logger.error(err) continue # all good! Logger.info('MTD: Set custom %s to %d' % (key, int(value))) if 'rotation' not in self.default_ranges: self.default_ranges['rotation'] = 0 elif self.default_ranges['rotation'] not in (0, 90, 180, 270): Logger.error('HIDInput: invalid rotation value ({})'.format( self.default_ranges['rotation'])) self.default_ranges['rotation'] = 0 def start(self): if self.input_fn is None: return self.uid = 0 self.queue = collections.deque() self.thread = threading.Thread( target=self._thread_run, kwargs=dict( queue=self.queue, input_fn=self.input_fn, device=self.device, default_ranges=self.default_ranges)) self.thread.daemon = True self.thread.start() def _thread_run(self, **kwargs): input_fn = kwargs.get('input_fn') queue = kwargs.get('queue') device = kwargs.get('device') drs = kwargs.get('default_ranges').get touches = {} touches_sent = [] point = {} l_points = {} def assign_coord(point, value, invert, coords): cx, cy = coords if invert: value = 1. - value if rotation == 0: point[cx] = value elif rotation == 90: point[cy] = value elif rotation == 180: point[cx] = 1. - value elif rotation == 270: point[cy] = 1. - value def process(points): for args in points: # this can happen if we have a touch going on already at # the start of the app if 'id' not in args: continue tid = args['id'] try: touch = touches[tid] except KeyError: touch = MTDMotionEvent(device, tid, args) touches[touch.id] = touch touch.move(args) action = 'update' if tid not in touches_sent: action = 'begin' touches_sent.append(tid) if 'delete' in args: action = 'end' del args['delete'] del touches[touch.id] touches_sent.remove(tid) touch.update_time_end() queue.append((action, touch)) def normalize(value, vmin, vmax): return (value - vmin) / float(vmax - vmin) # open mtdev device _fn = input_fn _slot = 0 try: _device = Device(_fn) except OSError as e: if e.errno == 13: # Permission denied Logger.warn( 'MTD: Unable to open device "{0}". Please ensure you' ' have the appropriate permissions.'.format(_fn)) return else: raise _changes = set() # prepare some vars to get limit of some component ab = _device.get_abs(MTDEV_ABS_POSITION_X) range_min_position_x = drs('min_position_x', ab.minimum) range_max_position_x = drs('max_position_x', ab.maximum) Logger.info('MTD: <%s> range position X is %d - %d' % (_fn, range_min_position_x, range_max_position_x)) ab = _device.get_abs(MTDEV_ABS_POSITION_Y) range_min_position_y = drs('min_position_y', ab.minimum) range_max_position_y = drs('max_position_y', ab.maximum) Logger.info('MTD: <%s> range position Y is %d - %d' % (_fn, range_min_position_y, range_max_position_y)) ab = _device.get_abs(MTDEV_ABS_TOUCH_MAJOR) range_min_major = drs('min_touch_major', ab.minimum) range_max_major = drs('max_touch_major', ab.maximum) Logger.info('MTD: <%s> range touch major is %d - %d' % (_fn, range_min_major, range_max_major)) ab = _device.get_abs(MTDEV_ABS_TOUCH_MINOR) range_min_minor = drs('min_touch_minor', ab.minimum) range_max_minor = drs('max_touch_minor', ab.maximum) Logger.info('MTD: <%s> range touch minor is %d - %d' % (_fn, range_min_minor, range_max_minor)) range_min_pressure = drs('min_pressure', 0) range_max_pressure = drs('max_pressure', 255) Logger.info('MTD: <%s> range pressure is %d - %d' % (_fn, range_min_pressure, range_max_pressure)) invert_x = int(bool(drs('invert_x', 0))) invert_y = int(bool(drs('invert_y', 0))) Logger.info('MTD: <%s> axes invertion: X is %d, Y is %d' % (_fn, invert_x, invert_y)) rotation = drs('rotation', 0) Logger.info('MTD: <%s> rotation set to %d' % (_fn, rotation)) while _device: # idle as much as we can. while _device.idle(1000): continue # got data, read all without redoing idle while True: data = _device.get() if data is None: break # set the working slot if data.type == MTDEV_TYPE_EV_ABS and \ data.code == MTDEV_CODE_SLOT: _slot = data.value continue # fill the slot if _slot not in l_points: l_points[_slot] = dict() point = l_points[_slot] ev_value = data.value ev_code = data.code if ev_code == MTDEV_CODE_POSITION_X: val = normalize(ev_value, range_min_position_x, range_max_position_x) assign_coord(point, val, invert_x, 'xy') elif ev_code == MTDEV_CODE_POSITION_Y: val = 1. - normalize(ev_value, range_min_position_y, range_max_position_y) assign_coord(point, val, invert_y, 'yx') elif ev_code == MTDEV_CODE_PRESSURE: point['pressure'] = normalize(ev_value, range_min_pressure, range_max_pressure) elif ev_code == MTDEV_CODE_TOUCH_MAJOR: point['size_w'] = normalize(ev_value, range_min_major, range_max_major) elif ev_code == MTDEV_CODE_TOUCH_MINOR: point['size_h'] = normalize(ev_value, range_min_minor, range_max_minor) elif ev_code == MTDEV_CODE_TRACKING_ID: if ev_value == -1: point['delete'] = True # force process of changes here, as the slot can be # reused. _changes.add(_slot) process([l_points[x] for x in _changes]) _changes.clear() continue else: point['id'] = ev_value else: # unrecognized command, ignore. continue _changes.add(_slot) # push all changes if _changes: process([l_points[x] for x in _changes]) _changes.clear() def update(self, dispatch_fn): # dispatch all event from threads try: while True: event_type, touch = self.queue.popleft() dispatch_fn(event_type, touch) except: pass MotionEventFactory.register('mtdev', MTDMotionEventProvider)
Check :py:class:`~kivy.input.providers.probesysfs` for more information. Otherwise, add this to your configuration::
encrypted.rs
use crate::encryption::{Ciphertext, PublicKey}; use crate::gang::Scalar; use crate::unit_vector::UnitVector; use rand_core::{CryptoRng, RngCore}; // Power of Two Padded vector structure #[derive(Clone)] pub struct
<A> { pub elements: Vec<A>, pub orig_len: usize, } impl<A: Clone> Ptp<A> { pub fn len(&self) -> usize { self.elements.len() } pub fn bits(&self) -> usize { let len = self.elements.len(); assert!(len.is_power_of_two()); len.trailing_zeros() as usize } pub fn new<F>(mut vec: Vec<A>, extended_value: F) -> Ptp<A> where A: Clone, F: Fn() -> A, { let orig_len = vec.len(); let expected_len = orig_len.next_power_of_two(); if orig_len < expected_len { let a = extended_value(); while vec.len() < expected_len { vec.push(a.clone()); } } Ptp { orig_len, elements: vec, } } pub fn iter(&self) -> std::slice::Iter<'_, A> { self.elements.iter() } } impl<A> AsRef<[A]> for Ptp<A> { fn as_ref(&self) -> &[A] { &self.elements } } #[derive(Clone)] pub struct EncryptedVote(Vec<Ciphertext>); /// Encrypted vote is a unit vector where each element is encrypted with ElGamal Ciphertext to /// the tally opener. #[derive(Clone)] pub struct EncryptingVote { pub(crate) unit_vector: UnitVector, pub ciphertexts: Vec<Ciphertext>, pub random_elements: Vec<Scalar>, } impl EncryptingVote { pub fn prepare<R: RngCore + CryptoRng>( rng: &mut R, public_key: &PublicKey, vote: &UnitVector, ) -> Self { let mut rs = Vec::new(); let mut ciphers = Vec::new(); for vote_element in vote.iter() { let (cipher, r) = public_key.encrypt_return_r(&vote_element.into(), rng); rs.push(r); ciphers.push(cipher); } Self { unit_vector: *vote, ciphertexts: ciphers, random_elements: rs, } } /* pub fn pad<F>(mut self, extended_value: F) -> PtpEncryptingVote where F: Fn() -> (Scalar, Ciphertext), { let orig_len = self.ciphertexts.len(); let expected_len = orig_len.next_power_of_two(); if orig_len < expected_len { let (field_element, zero_cipher) = extended_value(); while self.ciphertexts.len() < expected_len { self.ciphertexts.push(zero_cipher.clone()); self.random_elements.push(field_element); } } PtpEncryptingVote { actual_length: orig_len, encrypting_vote: self, } } */ }
Ptp
ent_exp.py
#!/usr/bin/env python """ Import experiments into the database * Configuration parameters: - The ones required by intogen.data.entity.EntityManagerFactory """ from wok.task import Task from wok.element import DataElementList from intogen.data.entity import types from intogen.data.entity.server import EntityServer from intogen.biomart import biomart_db_connect, DEFAULT_INSERT_SIZE, DEFAULT_DB_ENGINE from intogen.sql import BatchInsert from pubmed import Pubmed task = Task() @task.main() def
(): task.check_conf(["entities", "repositories", "biomart.db"]) conf = task.conf insert_size = conf.get("biomart.insert_size", DEFAULT_INSERT_SIZE, dtype=int) if "biomart.study_source" in conf: study_source_map = conf["biomart.study_source"] else: study_source_map = conf.create_element() log = task.logger() exp_port = task.ports("experiment") es = EntityServer(conf["entities"]) em = es.manager() conn = biomart_db_connect(conf["biomart.db"], log) db_engine = conf.get("biomart.db.engine", DEFAULT_DB_ENGINE) cursor = conn.cursor() cursor.execute(""" CREATE TABLE ent_experiment ( id int(11) NOT NULL, exp_name varchar(64) NOT NULL, study_id varchar(32) NOT NULL, study_source varchar(32) DEFAULT NULL, study_source_url varchar(512) DEFAULT NULL, study_link varchar(512) DEFAULT NULL, pub_pubmed varchar(32) DEFAULT NULL, pub_title varchar(300) DEFAULT NULL, pub_authors varchar(300) DEFAULT NULL, pub_year varchar(16) DEFAULT NULL, pub_journal varchar(200) DEFAULT NULL, platf_id varchar(32) NOT NULL, platf_title varchar(250) DEFAULT NULL, platf_technology varchar(96) DEFAULT NULL, PRIMARY KEY (id), KEY exp_name (exp_name), KEY pub_pubmed (pub_pubmed), KEY pub_title (pub_title), KEY pub_authors (pub_authors), KEY pub_year (pub_year), KEY pub_journal (pub_journal), KEY platf_title (platf_title), KEY platf_technology (platf_technology) ) ENGINE={} CHARACTER SET utf8 COLLATE utf8_general_ci""".format(db_engine)) ib = BatchInsert(cursor, "ent_experiment", ["id", "exp_name", "study_id", "study_source", "study_source_url", "study_link", "pub_title", "pub_authors", "pub_year", "pub_pubmed", "pub_journal", "platf_id", "platf_title", "platf_technology"], insert_size) pubmed = Pubmed() for i, exp in enumerate(exp_port, 1): study_id = exp[0] platform_id = exp[1] study = em.find(study_id, types.SOURCE_STUDY) if study is None: log.error("{} not found: {}".format(types.SOURCE_STUDY, study_id)) continue platf = em.find(platform_id, types.SOURCE_PLATFORM) if platf is None: log.error("{} not found: {}".format(types.SOURCE_PLATFORM, platform_id)) continue log.info("Experiment for study {} and platform {} ...".format(study_id, platform_id)) pub = {} for k in ["title", "short_authors", "date", "journal"]: pub[k] = None if "pubmed" in study: pmid = study["pubmed"] if isinstance(pmid, (DataElementList, list)): pmid = pmid[0] log.warn("Study {} with many pubmed_id's, only the first {} will be considered".format(study_id, pmid)) log.debug("Retrieving information for pubmed_id '{}' ...".format(pmid)) try: pub = pubmed.find(pmid) if len(pub) == 0: log.error("No publication information found for pubmed_id '{}' in experiment ({}, {})".format(pmid, study_id, platform_id)) else: pub = pub[0] except Exception as ex: log.error("Error retrieving pubmed information for experiment ({}, {}) with pubmed_id '{}'".format(study_id, platform_id, pmid)) log.exception(ex) else: pmid = None log.warn("Study {} has no 'pubmed_id' annotation".format(study_id)) if "title" not in study: log.error("Study {} doesn't have annotation for 'pubmed_id' nor 'title'".format(study_id)) elif "SO/contact_details[0]/contact_name" not in study \ and "SO/contact_details/contact_name" not in study: log.error("Study {} doesn't have annotation for 'pubmed_id' nor 'SO.contact_details[0].contact_name'".format(study_id)) else: try: pub["title"] = study["title"] if "SO/contact_details[0]/contact_name" in study: pub["short_authors"] = study["SO/contact_details[0]/contact_name"] else: pub["short_authors"] = study["SO/contact_details/contact_name"] if "SO/submission/pub_date" in study: pub["date"] = study["SO/submission/pub_date"] else: pub["date"] = "" except Exception as ex: log.debug(study) log.execption(ex) for k, v in pub.items(): if v is not None and isinstance(v, basestring): pub[k] = v.replace("'", r"\'") exp_name = "{}; {}".format(study_id, platform_id) study_source = None study_source_url = None study_link = None parts = study_id.split("-") if len(parts) >= 2 and parts[0] in study_source_map: ss = study_source_map[parts[0]] study_source = ss.get("name") study_source_url = ss.get("home_url") try: study_link = ss.get("link", "").format(parts[1]) except: pass ib.insert(i, exp_name, study_id, study_source, study_source_url, study_link, pub["title"], pub["short_authors"], pub["date"], pmid, pub["journal"], platform_id, platf["SO/platform_title"], "") log.debug("{} experiments inserted".format(ib.count)) ib.close() cursor.close() conn.close() em.close() es.close() task.start()
main
ezstack.go
// Easy Zigbee Stack - the goal is to be easiest-to-understand Zigbee codebase available. package ezstack import ( "context" "errors" "fmt" "github.com/davecgh/go-spew/spew" "github.com/function61/gokit/log/logex" "github.com/function61/gokit/sync/taskrunner" "github.com/function61/hautomo/pkg/ezstack/binstruct" "github.com/function61/hautomo/pkg/ezstack/coordinator" "github.com/function61/hautomo/pkg/ezstack/zcl" "github.com/function61/hautomo/pkg/ezstack/zcl/cluster" "github.com/function61/hautomo/pkg/ezstack/zcl/frame" "github.com/function61/hautomo/pkg/ezstack/zigbee" "github.com/function61/hautomo/pkg/ezstack/znp" "github.com/function61/hautomo/pkg/ezstack/znp/unp" "go.bug.st/serial" ) const ( DefaultSingleEndpointId = 1 // for simple single-endpoint devices, its endpoint ID usually is 1 ) // FIXME: these are all bad var ( logger = logex.StandardLogger() log = logex.Prefix("ezstack", logger) logl = logex.Levels(log) ) type Channels struct { onDeviceRegistered chan *Device onDeviceUnregistered chan *Device onDeviceBecameAvailable chan *Device onDeviceIncomingMessage chan *DeviceIncomingMessage } func (c *Channels) OnDeviceRegistered() chan *Device { return c.onDeviceRegistered } // TODO: document what's the difference between available and registered // seems to be signalled only when device's network address changes func (c *Channels) OnDeviceBecameAvailable() chan *Device { return c.onDeviceBecameAvailable } func (c *Channels) OnDeviceUnregistered() chan *Device { return c.onDeviceUnregistered } // "application-level" message, i.e. sensor sending data // TODO: rename to reduce confusion between device registration (name sounds like device is incoming to the cluster..) func (c *Channels) OnDeviceIncomingMessage() chan *DeviceIncomingMessage { return c.onDeviceIncomingMessage } type NodeDatabase interface { InsertDevice(*Device) error GetDeviceByNetworkAddress(nwkAddress string) (*Device, bool) GetDevice(address zigbee.IEEEAddress) (*Device, bool) RemoveDevice(address zigbee.IEEEAddress) error } type Stack struct { db NodeDatabase configuration coordinator.Configuration coordinator *coordinator.Coordinator registrationQueue chan *znp.ZdoEndDeviceAnnceInd zcl *zcl.Zcl channels *Channels } func New(configuration coordinator.Configuration, db NodeDatabase) *Stack { coordinator := coordinator.New(&configuration) zcl := zcl.Library return &Stack{ db: db, configuration: configuration, coordinator: coordinator, registrationQueue: make(chan *znp.ZdoEndDeviceAnnceInd), zcl: zcl, channels: &Channels{ onDeviceRegistered: make(chan *Device, 10), onDeviceBecameAvailable: make(chan *Device, 10), onDeviceUnregistered: make(chan *Device, 10), onDeviceIncomingMessage: make(chan *DeviceIncomingMessage, 100), }, } } // if *packetCaptureFile* non-empty, specifies a file to log inbound UNP frames func (s *Stack) Run(ctx context.Context, joinEnable bool, packetCaptureFilename string, settingsFlash bool) error { logl.Debug.Printf( "opening Zigbee radio %s at %d bauds/s", s.configuration.Serial.Port, s.configuration.Serial.BaudRateOrDefault()) port, err := openPort( s.configuration.Serial.Port, s.configuration.Serial.BaudRateOrDefault()) if err != nil { return fmt.Errorf("openPort: %s: %w", s.configuration.Serial.Port, err) } defer port.Close() // connect to ZNP using UNP protocol with serial port as a transport networkProcessor := znp.New(unp.NewWith8BitsPayloadLength(port), logex.Prefix("znp", logger)) tasks := taskrunner.New(ctx, log) if packetCaptureFilename != "" { tasks.Start("packetcapture", func(ctx context.Context) error { return runPacketCapture(ctx, packetCaptureFilename, networkProcessor) }) } // multiple ways for us to need port closing, so this is mainly a hack tasks.Start("portcloser", func(ctx context.Context) error { <-ctx.Done() // ZNP is most likely blocking on an UNP read return port.Close() // double close intentional }) tasks.Start("znp", func(ctx context.Context) error { return networkProcessor.Run(ctx) }) tasks.Start("coordinator", func(ctx context.Context) error { return s.coordinator.Run(ctx, joinEnable, networkProcessor, settingsFlash) }) // to have expensive operation in separate non-blocking thread, but still do multiple registrations // sequentially tasks.Start("registrationqueue", func(ctx context.Context) error { for { select { case <-ctx.Done(): return nil case announcedDevice, ok := <-s.registrationQueue: if !ok { // queue closed return nil } if err := s.registerDevice(announcedDevice); err != nil { logl.Error.Printf("registerDevice: %s", err.Error()) } } } }) for { select { case <-ctx.Done(): return tasks.Wait() case err := <-tasks.Done(): return err case err := <-s.coordinator.OnError(): logl.Error.Printf("coordinator: %s", err) // TODO: shut down the system? are there non-fatal coordinator errors? case announcedDevice := <-s.coordinator.OnDeviceAnnounce(): s.registrationQueue <- announcedDevice case deviceLeave := <-s.coordinator.OnDeviceLeave(): ieeeAddress := zigbee.IEEEAddress(deviceLeave.ExtAddr) logl.Info.Printf("Unregistering device: [%s]", ieeeAddress) if err := s.unregisterDevice(ieeeAddress); err != nil { logl.Error.Printf("unregisterDevice: %s", err.Error()) } case msg := <-s.coordinator.OnDeviceTc(): logl.Debug.Printf("device online change: %s", msg.SrcIEEEAddr) case incomingMessage := <-s.coordinator.OnIncomingMessage(): if err := s.processIncomingMessage(incomingMessage); err != nil { logl.Error.Println(err) } } } } func (s *Stack) Channels() *Channels { return s.channels } func (f *Stack) LocalCommand(dev DeviceAndEndpoint, command cluster.LocalCommand) error { clusterId, commandId := command.CommandClusterAndId() frm, err := frame.New(). DisableDefaultResponse(false). FrameType(frame.FrameTypeLocal). Direction(frame.DirectionClientServer). CommandId(commandId). Command(command). Build() if err != nil { return err } response, err := f.coordinator.DataRequest( dev.NetworkAddress, dev.EndpointId, 1, uint16(clusterId), &znp.AfDataRequestOptions{}, 15, binstruct.Encode(frm)) if err != nil { return err } zclIncomingMessage, err := f.zcl.ToZclIncomingMessage(response) if err != nil { logl.Error.Printf("Unsupported data response message:\n%s\n", spew.Sdump(response)) return err } zclCommand := zclIncomingMessage.Data.Command.(*cluster.DefaultResponseCommand) if err := zclCommand.Status.Error(); err != nil { return fmt.Errorf("unable to run command [%d] on cluster [%d]. Status: %v", commandId, clusterId, err) } return nil } func (s *Stack) processIncomingMessage(incomingMessage *znp.AfIncomingMessage) error { zclIncomingMessage, err := s.zcl.ToZclIncomingMessage(incomingMessage) if err != nil { return fmt.Errorf("Unsupported incoming message: %w: %s", err, spew.Sdump(incomingMessage)) } device, ok := s.db.GetDeviceByNetworkAddress(incomingMessage.SrcAddr) if !ok { return fmt.Errorf("Received message from unknown device: %s", incomingMessage.SrcAddr) } select { case s.channels.onDeviceIncomingMessage <- &DeviceIncomingMessage{ Device: device, IncomingMessage: zclIncomingMessage, }: return nil default: return errors.New("onDeviceIncomingMessage channel has no capacity. Maybe channel has no subscribers") } } func (s *Stack) registerDevice(announcedDevice *znp.ZdoEndDeviceAnnceInd) error { address := zigbee.IEEEAddress(announcedDevice.IEEEAddr) logl.Info.Printf("Registering device [%s]", address) if device, alreadyExists := s.db.GetDevice(address); alreadyExists { logl.Debug.Printf("device %s already exists in DB. Updating network address", address) // updating NwkAddr because when re-joining, device most likel has changed its network address, // (but not its IEEEAddr, e.g. "MAC address") device.NetworkAddress = announcedDevice.NwkAddr if err := s.db.InsertDevice(device); err != nil { return fmt.Errorf("InsertDevice: %w", err) } select { case s.channels.onDeviceBecameAvailable <- device: return nil default: return errors.New("onDeviceBecameAvailable channel has no capacity. Maybe channel has no subscribers") } } device, err := s.interrogateDevice(announcedDevice) if err != nil { return fmt.Errorf("interrogateDevice: %w", err) } if err := s.db.InsertDevice(device); err != nil { return fmt.Errorf("InsertDevice: %w", err) } select { case s.channels.onDeviceRegistered <- device: logl.Info.Printf( "Registered new device [%s]. Manufacturer: [%s], Model: [%s], Logical type: [%s]", device.IEEEAddress, device.Manufacturer, device.Model, device.LogicalType) return nil default: return errors.New("onDeviceRegistered channel has no capacity. Maybe channel has no subscribers") } } func (s *Stack) unregisterDevice(ieeeAddress zigbee.IEEEAddress) error { device, found := s.db.GetDevice(ieeeAddress) if !found { return fmt.Errorf("not found: %s", ieeeAddress) } if err := s.db.RemoveDevice(ieeeAddress); err != nil { return err } select { case s.channels.onDeviceUnregistered <- device: logl.Info.Printf( "Unregistered device [%s]. Manufacturer: [%s], Model: [%s], Logical type: [%s]", ieeeAddress, device.Manufacturer, device.Model, device.LogicalType) return nil default: return errors.New("channel has no capacity. Maybe channel has no subscribers") } } func castClusterIds(clusterIdsInt []uint16) []cluster.ClusterId { clusterIds := []cluster.ClusterId{} for _, clusterId := range clusterIdsInt { clusterIds = append(clusterIds, cluster.ClusterId(clusterId)) } return clusterIds } func openPort(portName string, baudRate int) (port serial.Port, err error) { port, err = serial.Open(portName, &serial.Mode{BaudRate: baudRate}) if err != nil
return port, port.SetRTS(true) }
{ return nil, err }
main.go
package main import ( "context" "database/sql" "flag" _ "net/http/pprof" _ "github.com/mattn/go-sqlite3" "github.com/pkg/errors" migrate "github.com/rubenv/sql-migrate" "github.com/sirupsen/logrus" cachecash "github.com/cachecashproject/go-cachecash" "github.com/cachecashproject/go-cachecash/cache" "github.com/cachecashproject/go-cachecash/cache/migrations" "github.com/cachecashproject/go-cachecash/common" "github.com/cachecashproject/go-cachecash/dbtx" "github.com/cachecashproject/go-cachecash/keypair" "github.com/cachecashproject/go-cachecash/ledger" "github.com/cachecashproject/go-cachecash/ledgerclient" "github.com/cachecashproject/go-cachecash/log" ) var ( configPath = flag.String("config", "cache.config.json", "Path to configuration file") keypairPath = flag.String("keypair", "cache.keypair.json", "Path to keypair file") traceAPI = flag.String("trace", "", "Jaeger API for tracing") ) func loadConfigFile(l *logrus.Logger, path string) (*cache.ConfigFile, error) { conf := cache.ConfigFile{} p, err := common.NewConfigParser(l, "cache") if err != nil { return nil, err } err = p.ReadFile(path) if err != nil { return nil, err } conf.ClientProtocolGrpcAddr = p.GetString("grpc_addr", ":9000") conf.ClientProtocolHttpAddr = p.GetString("http_addr", ":9443") conf.StatusAddr = p.GetString("status_addr", ":9100") conf.BootstrapAddr = p.GetString("bootstrap_addr", "bootstrapd:7777") conf.LedgerAddr = p.GetString("ledger_addr", "ledger:7778") conf.BadgerDirectory = p.GetString("badger_directory", "./chunks/") conf.Database = p.GetString("database", "cache.db") conf.ContactUrl = p.GetString("contact_url", "") conf.MetricsEndpoint = p.GetString("metrics_endpoint", "") conf.SyncInterval = p.GetSeconds("sync-interval", ledgerclient.DEFAULT_SYNC_INTERVAL) conf.Insecure = p.GetInsecure() return &conf, nil } func main() { common.Main(mainC) } func mainC() error { l := log.NewCLILogger("cached", log.CLIOpt{JSON: true}) flag.Parse() cf, err := loadConfigFile(&l.Logger, *configPath) if err != nil { return errors.Wrap(err, "failed to load configuration file") } if err := l.ConfigureLogger(); err != nil { return errors.Wrap(err, "failed to configure logger") } l.Info("Starting CacheCash cached ", cachecash.CurrentVersion) kp, err := keypair.LoadOrGenerate(&l.Logger, *keypairPath) if err != nil { return errors.Wrap(err, "failed to get keypair") } if err := l.Connect(cf.Insecure, kp); err != nil
defer common.SetupTracing(*traceAPI, "cachecash-cached", &l.Logger).Flush() db, err := sql.Open("sqlite3", cf.Database) if err != nil { return errors.Wrap(err, "failed to open database") } l.Info("applying migrations") n, err := migrate.Exec(db, "sqlite3", migrations.Migrations, migrate.Up) if err != nil { return errors.Wrap(err, "failed to apply migrations") } l.Infof("applied %d migrations", n) persistence := ledger.NewChainStorageSQL(&l.Logger, ledger.NewChainStorageSqlite()) if err := persistence.RunMigrations(db); err != nil { return err } storage := ledger.NewDatabase(persistence) r, err := ledgerclient.NewReplicator(&l.Logger, storage, cf.LedgerAddr, cf.Insecure) if err != nil { return errors.Wrap(err, "failed to create replicator") } c, err := cache.NewCache(&l.Logger, cf, kp) if err != nil { return err } defer c.Close() ctx := dbtx.ContextWithExecutor(context.Background(), db) num, err := c.LoadFromDatabase(ctx) if err != nil { return errors.Wrap(err, "failed to load state from database") } l.WithFields(logrus.Fields{ "len(escrows)": num, }).Info("loaded escrows from database") app, err := cache.NewApplication(&l.Logger, c, db, cf, kp, r) if err != nil { return errors.Wrap(err, "failed to create cache application") } if err := common.RunStarterShutdowner(&l.Logger, app); err != nil { return err } return nil }
{ return errors.Wrap(err, "failed to connect to logpipe") }
eval_LFW.py
from nets.arcface import arcface from utils.dataloader import LFWDataset from utils.utils_metrics import test if __name__ == "__main__": #--------------------------------------# # 主干特征提取网络的选择 # mobilefacenet # mobilenetv1 # mobilenetv2
#--------------------------------------# # 输入图像大小 #--------------------------------------# input_shape = [112, 112, 3] #--------------------------------------# # 训练好的权值文件 #--------------------------------------# model_path = "model_data/arcface_mobilefacenet.h5" #--------------------------------------# # LFW评估数据集的文件路径 # 以及对应的txt文件 #--------------------------------------# lfw_dir_path = "lfw" lfw_pairs_path = "model_data/lfw_pair.txt" #--------------------------------------# # 评估的批次大小和记录间隔 #--------------------------------------# batch_size = 256 log_interval = 1 #--------------------------------------# # ROC图的保存路径 #--------------------------------------# png_save_path = "model_data/roc_test.png" test_loader = LFWDataset(dir=lfw_dir_path,pairs_path=lfw_pairs_path, batch_size=batch_size, input_shape=input_shape) model = arcface(input_shape, None, backbone=backbone, mode="predict") model.load_weights(model_path, by_name=True) test(test_loader, model, png_save_path, log_interval, batch_size)
# mobilenetv3 # iresnet50 #--------------------------------------# backbone = "mobilefacenet"
test_cli.py
from keepr.__main__ import run_application from click.testing import CliRunner import sys sys.path.append('..') def test_install_package(): runner = CliRunner() result = runner.invoke(run_application, ['install', 'click']) assert result.exit_code == 0 def test_install_package_req(): runner = CliRunner() result = runner.invoke( run_application, [ 'install', '-r', 'requirements_test.txt']) assert result.exit_code == 0 def test_uninstall_package(): runner = CliRunner() result = runner.invoke(run_application, ['uninstall', 'click']) assert result.exit_code == 0 def test_update_package():
runner = CliRunner() result = runner.invoke(run_application, ['install', '-u', 'click']) assert result.exit_code == 0
bind-w-mult-calls-p-type.py
from Tkinter import * import string # This program shows how to use a simple type-in box class App(Frame): def __init__(self, master=None): Frame.__init__(self, master) self.pack() self.entrythingy = Entry() self.entrythingy.pack() # and here we get a callback when the user hits return. we could # make the key that triggers the callback anything we wanted to. # other typical options might be <Key-Tab> or <Key> (for anything) self.entrythingy.bind('<Key-Return>', self.print_contents) # Note that here is where we bind a completely different callback to # the same event. We pass "+" here to indicate that we wish to ADD # this callback to the list associated with this event type. # Not specifying "+" would simply override whatever callback was # defined on this event. self.entrythingy.bind('<Key-Return>', self.print_something_else, "+") def print_contents(self, event): print("hi. contents of entry is now ---->", self.entrythingy.get()) def print_something_else(self, event):
root = App() root.master.title("Foo") root.mainloop() # secret tip for experts: if you pass *any* non-false value as # the third parameter to bind(), Tkinter.py will accumulate # callbacks instead of overwriting. I use "+" here because that's # the Tk notation for getting this sort of behavior. The perfect GUI # interface would use a less obscure notation.
print("hi. Now doing something completely different")
component.js
import Hook from '../hook'; import { useState, useEffect, useRef } from 'react';
* @apiDescription A React hook used to define React component(s) that can be overrided by Reactium plugins, using the Reactium.Component.register() function. * @apiParam {String} hookName the unique string used to register component(s). * @apiParam {Component} defaultComponent the default React component(s) to be returned by the hook. * @apiParam {Mixed} params variadic list of parameters to be passed to the Reactium hook specified by hookName. * @apiName useHookComponent * @apiGroup ReactHook * @apiExample parent.js import React from 'react'; import { useHookComponent } from 'reactium-core/sdk'; // component to be used unless overriden by Reactium.Component.register() const DefaultComponent = () => <div>Default or Placeholder component</div> export props => { const MyComponent = useHookComponent('my-component', DefaultComponent); return ( <div> <MyComponent {...props} /> </div> ); }; * @apiExample reactium-hooks.js import React from 'react'; import Reactium from 'reactium-core/sdk'; // component to be used unless overriden by Reactium.Component.register() const ReplacementComponent = () => <div>My Plugin's Component</div> Reactium.Component.register('my-component', ReplacementComponent); */ export const useHookComponent = ( hook = 'component', defaultComponent = () => null, ...params ) => { const component = useRef({ component: defaultComponent }); const [version, update] = useState(1); const setComponent = newComponent => { if ( newComponent && newComponent !== op.get(component, 'current.component') ) { op.set(component, 'current.component', newComponent); update(version + 1); } }; useEffect(() => { const getComponent = async () => { const context = await Hook.run(hook, ...params); setComponent(op.get(context, 'component')); }; getComponent(); }, [hook, defaultComponent, ...params]); return op.get(component, 'current.component'); };
import op from 'object-path'; /** * @api {ReactHook} useHookComponent(hookName,defaultComponent,...params) useHookComponent()
builtin_json.rs
// Copyright 2017 TiKV Project Authors. Licensed under Apache-2.0. use super::{Error, EvalContext, Expression, Result, ScalarFunc}; use crate::codec::mysql::json::{parse_json_path_expr, ModifyType, PathExpression}; use crate::codec::mysql::Json; use crate::codec::Datum; use std::borrow::Cow; use std::collections::BTreeMap; impl ScalarFunc { #[inline] pub fn json_depth<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<i64>> { let j = try_opt!(self.children[0].eval_json(ctx, row)); Ok(Some(j.as_ref().as_ref().depth()?)) } #[inline] pub fn json_type<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, [u8]>>> { let j = try_opt!(self.children[0].eval_json(ctx, row)); Ok(Some(Cow::Borrowed(j.as_ref().as_ref().json_type()))) } #[inline] pub fn json_unquote<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, [u8]>>> { let j = try_opt!(self.children[0].eval_json(ctx, row)); j.as_ref() .as_ref() .unquote() .map_err(Error::from) .map(|s| Some(Cow::Owned(s.into_bytes()))) } pub fn json_array<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { let parser = JsonFuncArgsParser::new(row); let elems = try_opt!(self .children .iter() .map(|e| parser.get_json(ctx, e)) .collect()); Ok(Some(Cow::Owned(Json::from_array(elems)?))) } pub fn json_object<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { let mut pairs = BTreeMap::new(); let parser = JsonFuncArgsParser::new(row); for chunk in self.children.chunks(2) { let key = try_opt!(chunk[0].eval_string_and_decode(ctx, row)).into_owned(); let val = try_opt!(parser.get_json(ctx, &chunk[1])); pairs.insert(key, val); } Ok(Some(Cow::Owned(Json::from_object(pairs)?))) } pub fn json_extract<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { // TODO: We can cache the PathExpressions if children are Constant. let j = try_opt!(self.children[0].eval_json(ctx, row)); let parser = JsonFuncArgsParser::new(row); let path_exprs: Vec<_> = try_opt!(parser.get_path_exprs(ctx, &self.children[1..])); Ok(j.as_ref().as_ref().extract(&path_exprs)?.map(Cow::Owned)) } pub fn json_length<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<i64>> { let j = try_opt!(self.children[0].eval_json(ctx, row)); let parser = JsonFuncArgsParser::new(row); let path_exprs: Vec<_> = match parser.get_path_exprs(ctx, &self.children[1..])? { Some(list) => list, None => return Ok(None), }; j.as_ref().as_ref().json_length(&path_exprs) } #[inline] pub fn
<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { self.json_modify(ctx, row, ModifyType::Set) } #[inline] pub fn json_insert<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { self.json_modify(ctx, row, ModifyType::Insert) } #[inline] pub fn json_replace<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { self.json_modify(ctx, row, ModifyType::Replace) } pub fn json_remove<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { let j = try_opt!(self.children[0].eval_json(ctx, row)).into_owned(); let parser = JsonFuncArgsParser::new(row); let path_exprs: Vec<_> = try_opt!(parser.get_path_exprs(ctx, &self.children[1..])); j.as_ref() .remove(&path_exprs) .map(|j| Some(Cow::Owned(j))) .map_err(Error::from) } pub fn json_merge<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { let parser = JsonFuncArgsParser::new(row); let mut jsons = vec![]; let head = try_opt!(self.children[0].eval_json(ctx, row)).into_owned(); jsons.push(head); for e in &self.children[1..] { let j = try_opt!(parser.get_json_not_none(ctx, e)); jsons.push(j); } let refs = jsons.iter().map(|j| j.as_ref()).collect::<Vec<_>>(); Json::merge(refs).map(|j| Some(Cow::Owned(j))) } fn json_modify<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], mt: ModifyType, ) -> Result<Option<Cow<'a, Json>>> { let j = try_opt!(self.children[0].eval_json(ctx, row)).into_owned(); let parser = JsonFuncArgsParser::new(row); let mut path_exprs = Vec::with_capacity(self.children.len() / 2); let mut values = Vec::with_capacity(self.children.len() / 2); for chunk in self.children[1..].chunks(2) { path_exprs.push(try_opt!(parser.get_path_expr(ctx, &chunk[0]))); values.push(try_opt!(parser.get_json(ctx, &chunk[1]))); } j.as_ref() .modify(&path_exprs, values, mt) .map(|j| Some(Cow::Owned(j))) .map_err(Error::from) } } struct JsonFuncArgsParser<'a> { row: &'a [Datum], } impl<'a> JsonFuncArgsParser<'a> { #[inline] fn new(row: &'a [Datum]) -> Self { JsonFuncArgsParser { row } } fn get_path_expr( &self, ctx: &mut EvalContext, e: &Expression, ) -> Result<Option<PathExpression>> { let s = try_opt!(e.eval_string_and_decode(ctx, self.row)); let expr = parse_json_path_expr(&s)?; Ok(Some(expr)) } fn get_path_exprs( &self, ctx: &mut EvalContext, es: &[Expression], ) -> Result<Option<Vec<PathExpression>>> { es.iter().map(|e| self.get_path_expr(ctx, e)).collect() } fn get_json(&self, ctx: &mut EvalContext, e: &Expression) -> Result<Option<Json>> { let j = e .eval_json(ctx, self.row)? .map_or(Json::none(), |x| Ok(Cow::into_owned(x)))?; Ok(Some(j)) } fn get_json_not_none(&self, ctx: &mut EvalContext, e: &Expression) -> Result<Option<Json>> { let j = try_opt!(e.eval_json(ctx, self.row)).into_owned(); Ok(Some(j)) } } #[cfg(test)] mod tests { use crate::codec::mysql::Json; use crate::codec::Datum; use crate::expr::tests::{datum_expr, make_null_datums, scalar_func_expr}; use crate::expr::{EvalContext, Expression}; use tipb::ScalarFuncSig; #[test] fn test_json_length() { let cases = vec![ (None, None, None), (None, Some(Datum::Null), None), (Some(r#"{}"#), Some(Datum::Null), None), (Some("null"), None, Some(1)), ( Some(r#"{"a":{"a":1},"b":2}"#), Some(Datum::Bytes(b"$".to_vec())), Some(2), ), (Some("1"), None, Some(1)), ( Some(r#"{"a": [1, 2, {"aa": "xx"}]}"#), Some(Datum::Bytes(b"$.*".to_vec())), None, ), ( Some(r#"{"a":{"a":1},"b":2}"#), Some(Datum::Bytes(b"$".to_vec())), Some(2), ), // Tests with path expression ( Some(r#"[1,2,[1,[5,[3]]]]"#), Some(Datum::Bytes(b"$[2]".to_vec())), Some(2), ), ( Some(r#"[{"a":1}]"#), Some(Datum::Bytes(b"$".to_vec())), Some(1), ), ( Some(r#"[{"a":1,"b":2}]"#), Some(Datum::Bytes(b"$[0].a".to_vec())), Some(1), ), ( Some(r#"{"a":{"a":1},"b":2}"#), Some(Datum::Bytes(b"$".to_vec())), Some(2), ), ( Some(r#"{"a":{"a":1},"b":2}"#), Some(Datum::Bytes(b"$.a".to_vec())), Some(1), ), ( Some(r#"{"a":{"a":1},"b":2}"#), Some(Datum::Bytes(b"$.a.a".to_vec())), Some(1), ), ( Some(r#"{"a": [1, 2, {"aa": "xx"}]}"#), Some(Datum::Bytes(b"$.a[2].aa".to_vec())), Some(1), ), // Tests without path expression (Some(r#"{}"#), None, Some(0)), (Some(r#"{"a":1}"#), None, Some(1)), (Some(r#"{"a":[1]}"#), None, Some(1)), (Some(r#"{"b":2, "c":3}"#), None, Some(2)), (Some(r#"[1]"#), None, Some(1)), (Some(r#"[1,2]"#), None, Some(2)), (Some(r#"[1,2,[1,3]]"#), None, Some(3)), (Some(r#"[1,2,[1,[5,[3]]]]"#), None, Some(3)), (Some(r#"[1,2,[1,[5,{"a":[2,3]}]]]"#), None, Some(3)), (Some(r#"[{"a":1}]"#), None, Some(1)), (Some(r#"[{"a":1,"b":2}]"#), None, Some(1)), (Some(r#"[{"a":{"a":1},"b":2}]"#), None, Some(1)), // Tests path expression contains any asterisk ( Some(r#"{"a": [1, 2, {"aa": "xx"}]}"#), Some(Datum::Bytes(b"$.*".to_vec())), None, ), ( Some(r#"{"a": [1, 2, {"aa": "xx"}]}"#), Some(Datum::Bytes(b"$[*]".to_vec())), None, ), ( Some(r#"{"a": [1, 2, {"aa": "xx"}]}"#), Some(Datum::Bytes(b"$**.a".to_vec())), None, ), // Tests path expression does not identify a section of the target document ( Some(r#"{"a": [1, 2, {"aa": "xx"}]}"#), Some(Datum::Bytes(b"$.c".to_vec())), None, ), ( Some(r#"{"a": [1, 2, {"aa": "xx"}]}"#), Some(Datum::Bytes(b"$.a[3]".to_vec())), None, ), ( Some(r#"{"a": [1, 2, {"aa": "xx"}]}"#), Some(Datum::Bytes(b"$.a[2].b".to_vec())), None, ), ]; let mut ctx = EvalContext::default(); for (input, param, exp) in cases { let json = datum_expr(match input { None => Datum::Null, Some(s) => Datum::Json(s.parse().unwrap()), }); let op = if let Some(b) = param { scalar_func_expr(ScalarFuncSig::JsonLengthSig, &[json, datum_expr(b)]) } else { scalar_func_expr(ScalarFuncSig::JsonLengthSig, &[json]) }; let op = Expression::build(&mut ctx, op).unwrap(); let got = op.eval(&mut ctx, &[]).unwrap(); let exp = match exp { None => Datum::Null, Some(e) => Datum::I64(e), }; assert_eq!(got, exp); } } #[test] fn test_json_depth() { let cases = vec![ (None, None), (Some("null"), Some(1)), (Some("[true, 2017]"), Some(2)), ( Some(r#"{"a": {"a1": [3]}, "b": {"b1": {"c": {"d": [5]}}}}"#), Some(6), ), (Some("{}"), Some(1)), (Some("[]"), Some(1)), (Some("true"), Some(1)), (Some("1"), Some(1)), (Some("-1"), Some(1)), (Some(r#""a""#), Some(1)), (Some(r#"[10, 20]"#), Some(2)), (Some(r#"[[], {}]"#), Some(2)), (Some(r#"[10, {"a": 20}]"#), Some(3)), (Some(r#"[[2], 3, [[[4]]]]"#), Some(5)), (Some(r#"{"Name": "Homer"}"#), Some(2)), (Some(r#"[10, {"a": 20}]"#), Some(3)), ( Some( r#"{"Person": {"Name": "Homer", "Age": 39, "Hobbies": ["Eating", "Sleeping"]} }"#, ), Some(4), ), (Some(r#"{"a":1}"#), Some(2)), (Some(r#"{"a":[1]}"#), Some(3)), (Some(r#"{"b":2, "c":3}"#), Some(2)), (Some(r#"[1]"#), Some(2)), (Some(r#"[1,2]"#), Some(2)), (Some(r#"[1,2,[1,3]]"#), Some(3)), (Some(r#"[1,2,[1,[5,[3]]]]"#), Some(5)), (Some(r#"[1,2,[1,[5,{"a":[2,3]}]]]"#), Some(6)), (Some(r#"[{"a":1}]"#), Some(3)), (Some(r#"[{"a":1,"b":2}]"#), Some(3)), (Some(r#"[{"a":{"a":1},"b":2}]"#), Some(4)), ]; let mut ctx = EvalContext::default(); for (input, exp) in cases { let input = match input { None => Datum::Null, Some(s) => Datum::Json(s.parse().unwrap()), }; let exp = match exp { None => Datum::Null, Some(s) => Datum::I64(s.to_owned()), }; let arg = datum_expr(input); let op = scalar_func_expr(ScalarFuncSig::JsonDepthSig, &[arg]); let op = Expression::build(&mut ctx, op).unwrap(); let got = op.eval(&mut ctx, &[]).unwrap(); assert_eq!(got, exp); } } #[test] fn test_json_type() { let cases = vec![ (None, None), (Some(r#"true"#), Some("BOOLEAN")), (Some(r#"null"#), Some("NULL")), (Some(r#"-3"#), Some("INTEGER")), (Some(r#"3"#), Some("INTEGER")), (Some(r#"3.14"#), Some("DOUBLE")), (Some(r#"9223372036854775808"#), Some("DOUBLE")), (Some(r#"[1, 2, 3]"#), Some("ARRAY")), (Some(r#"{"name": 123}"#), Some("OBJECT")), ]; let mut ctx = EvalContext::default(); for (input, exp) in cases { let input = match input { None => Datum::Null, Some(s) => Datum::Json(s.parse().unwrap()), }; let exp = match exp { None => Datum::Null, Some(s) => Datum::Bytes(s.to_owned().into_bytes()), }; let arg = datum_expr(input); let op = scalar_func_expr(ScalarFuncSig::JsonTypeSig, &[arg]); let op = Expression::build(&mut ctx, op).unwrap(); let got = op.eval(&mut ctx, &[]).unwrap(); assert_eq!(got, exp); } } #[test] fn test_json_unquote() { let cases = vec![ (None, false, None), (Some(r"a"), false, Some("a")), (Some(r#""3""#), false, Some(r#""3""#)), (Some(r#""3""#), true, Some(r#"3"#)), (Some(r#"{"a": "b"}"#), false, Some(r#"{"a": "b"}"#)), (Some(r#"{"a": "b"}"#), true, Some(r#"{"a":"b"}"#)), ( Some(r#"hello,\"quoted string\",world"#), false, Some(r#"hello,"quoted string",world"#), ), ]; let mut ctx = EvalContext::default(); for (input, parse, exp) in cases { let input = match input { None => Datum::Null, Some(s) => { if parse { Datum::Json(s.parse().unwrap()) } else { Datum::Json(Json::from_string(s.to_owned()).unwrap()) } } }; let exp = match exp { None => Datum::Null, Some(s) => Datum::Bytes(s.to_owned().into_bytes()), }; let arg = datum_expr(input); let op = scalar_func_expr(ScalarFuncSig::JsonUnquoteSig, &[arg]); let op = Expression::build(&mut ctx, op).unwrap(); let got = op.eval(&mut ctx, &[]).unwrap(); assert_eq!(got, exp); } } #[test] fn test_json_object() { let cases = vec![ (vec![], Datum::Json(r#"{}"#.parse().unwrap())), ( vec![Datum::Bytes(b"1".to_vec()), Datum::Null], Datum::Json(r#"{"1":null}"#.parse().unwrap()), ), ( vec![ Datum::Bytes(b"1".to_vec()), Datum::Null, Datum::Bytes(b"2".to_vec()), Datum::Json(Json::from_string("sdf".to_owned()).unwrap()), Datum::Bytes(b"k1".to_vec()), Datum::Json(Json::from_string("v1".to_owned()).unwrap()), ], Datum::Json(r#"{"1":null,"2":"sdf","k1":"v1"}"#.parse().unwrap()), ), ]; let mut ctx = EvalContext::default(); for (inputs, exp) in cases { let args = inputs.into_iter().map(datum_expr).collect::<Vec<_>>(); let op = scalar_func_expr(ScalarFuncSig::JsonObjectSig, &args); let op = Expression::build(&mut ctx, op).unwrap(); let got = op.eval(&mut ctx, &[]).unwrap(); assert_eq!(got, exp); } } #[test] fn test_json_array() { let cases = vec![ (vec![], Datum::Json(r#"[]"#.parse().unwrap())), ( vec![Datum::Json("1".parse().unwrap()), Datum::Null], Datum::Json(r#"[1, null]"#.parse().unwrap()), ), ( vec![ Datum::Json("1".parse().unwrap()), Datum::Null, Datum::Json("2".parse().unwrap()), Datum::Json(Json::from_string("sdf".to_owned()).unwrap()), Datum::Json(Json::from_string("k1".to_owned()).unwrap()), Datum::Json(Json::from_string("v1".to_owned()).unwrap()), ], Datum::Json(r#"[1, null, 2, "sdf", "k1", "v1"]"#.parse().unwrap()), ), ]; let mut ctx = EvalContext::default(); for (inputs, exp) in cases { let args = inputs.into_iter().map(datum_expr).collect::<Vec<_>>(); let op = scalar_func_expr(ScalarFuncSig::JsonArraySig, &args); let op = Expression::build(&mut ctx, op).unwrap(); let got = op.eval(&mut ctx, &[]).unwrap(); assert_eq!(got, exp); } } #[test] fn test_json_modify() { let cases = vec![ ( ScalarFuncSig::JsonSetSig, vec![Datum::Null, Datum::Null, Datum::Null], Datum::Null, ), ( ScalarFuncSig::JsonSetSig, vec![ Datum::Json(Json::from_i64(9).unwrap()), Datum::Bytes(b"$[1]".to_vec()), Datum::Json(Json::from_u64(3).unwrap()), ], Datum::Json(r#"[9,3]"#.parse().unwrap()), ), ( ScalarFuncSig::JsonInsertSig, vec![ Datum::Json(Json::from_i64(9).unwrap()), Datum::Bytes(b"$[1]".to_vec()), Datum::Json(Json::from_u64(3).unwrap()), ], Datum::Json(r#"[9,3]"#.parse().unwrap()), ), ( ScalarFuncSig::JsonReplaceSig, vec![ Datum::Json(Json::from_i64(9).unwrap()), Datum::Bytes(b"$[1]".to_vec()), Datum::Json(Json::from_u64(3).unwrap()), ], Datum::Json(r#"9"#.parse().unwrap()), ), ( ScalarFuncSig::JsonSetSig, vec![ Datum::Json(r#"{"a":"x"}"#.parse().unwrap()), Datum::Bytes(b"$.a".to_vec()), Datum::Null, ], Datum::Json(r#"{"a":null}"#.parse().unwrap()), ), ]; let mut ctx = EvalContext::default(); for (sig, inputs, exp) in cases { let args: Vec<_> = inputs.into_iter().map(datum_expr).collect(); let op = scalar_func_expr(sig, &args); let op = Expression::build(&mut ctx, op).unwrap(); let got = op.eval(&mut ctx, &[]).unwrap(); assert_eq!(got, exp); } } #[test] fn test_json_merge() { let cases = vec![ (vec![Datum::Null, Datum::Null], Datum::Null), ( vec![ Datum::Json("{}".parse().unwrap()), Datum::Json("[]".parse().unwrap()), ], Datum::Json("[{}]".parse().unwrap()), ), ( vec![ Datum::Json("{}".parse().unwrap()), Datum::Json("[]".parse().unwrap()), Datum::Json("3".parse().unwrap()), Datum::Json(r#""4""#.parse().unwrap()), ], Datum::Json(r#"[{}, 3, "4"]"#.parse().unwrap()), ), ]; let mut ctx = EvalContext::default(); for (inputs, exp) in cases { let args: Vec<_> = inputs.into_iter().map(datum_expr).collect(); let op = scalar_func_expr(ScalarFuncSig::JsonMergeSig, &args); let op = Expression::build(&mut ctx, op).unwrap(); let got = op.eval(&mut ctx, &[]).unwrap(); assert_eq!(got, exp); } } #[test] fn test_json_invalid_arguments() { let cases = vec![ (ScalarFuncSig::JsonObjectSig, make_null_datums(3)), (ScalarFuncSig::JsonSetSig, make_null_datums(4)), (ScalarFuncSig::JsonInsertSig, make_null_datums(6)), (ScalarFuncSig::JsonReplaceSig, make_null_datums(8)), ]; let mut ctx = EvalContext::default(); for (sig, args) in cases { let args: Vec<_> = args.into_iter().map(datum_expr).collect(); let op = Expression::build(&mut ctx, scalar_func_expr(sig, &args)); assert!(op.is_err()); } } }
json_set
deserialize.rs
// Copyright 2020 - developers of the `grammers` project. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // https://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::fmt; #[derive(Clone, Debug, PartialEq)] pub enum Error { /// The end of the buffer was reached earlier than anticipated, which /// implies there is not enough data to complete the deserialization. UnexpectedEof, /// The error type indicating an unexpected constructor was found, /// for example, when reading data that doesn't represent the /// correct type (e.g. reading a `bool` when we expect a `Vec`). /// In particular, it can occur in the following situations: /// /// * When reading a boolean. /// * When reading a boxed vector. /// * When reading an arbitrary boxed type. /// /// It is important to note that unboxed or bare [`types`] lack the /// constructor information, and as such they cannot be validated. /// /// [`types`]: types/index.html UnexpectedConstructor { /// The unexpected constructor identifier. id: u32, }, } impl std::error::Error for Error {} impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Self::UnexpectedEof => write!(f, "unexpected eof"), Self::UnexpectedConstructor { id } => write!(f, "unexpected constructor: {:08x}", id), } } } /// Re-implement `Cursor` to only work over in-memory buffers and greatly /// narrow the possible error cases. pub struct Cursor<'a> { buf: &'a [u8], pos: usize, } impl<'a> Cursor<'a> { pub fn from_slice(buf: &'a [u8]) -> Self { Self { buf, pos: 0 } } // TODO not a fan we need to expose this (and a way to create `Cursor`), // but crypto needs it because it needs to know where deserialization // of some inner data ends. pub fn pos(&self) -> usize { self.pos } pub fn read_byte(&mut self) -> Result<u8> { if self.pos < self.buf.len() { let byte = self.buf[self.pos]; self.pos += 1; Ok(byte) } else { Err(Error::UnexpectedEof) } } pub fn read_exact(&mut self, buf: &mut [u8]) -> Result<()> { if self.pos + buf.len() > self.buf.len() { Err(Error::UnexpectedEof) } else { buf.copy_from_slice(&self.buf[self.pos..self.pos + buf.len()]); self.pos += buf.len(); Ok(()) } } pub fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize> { buf.extend(&self.buf[self.pos..]); let old = self.pos; self.pos = self.buf.len(); Ok(self.pos - old) } } /// The problem with being generic over `std::io::Read` is that it's /// fallible, but in practice, we're always going to serialize in-memory, /// so instead we just use a `[u8]` as our buffer. // TODO this is only public for session pub type Buffer<'a, 'b> = &'a mut Cursor<'b>; pub type Result<T> = std::result::Result<T, Error>; /// This trait allows for data serialized according to the /// [Binary Data Serialization] to be deserialized into concrete instances. /// /// [Binary Data Serialization]: https://core.telegram.org/mtproto/serialize pub trait Deserializable { /// Deserializes an instance of the type from a given buffer. fn deserialize(buf: Buffer) -> Result<Self> where Self: std::marker::Sized; /// Convenience function to deserialize an instance from a given buffer. /// /// # Examples /// /// ``` /// use grammers_tl_types::Deserializable; /// /// assert_eq!(bool::from_bytes(&[0x37, 0x97, 0x79, 0xbc]).unwrap(), false); /// ``` fn from_bytes(buf: &[u8]) -> Result<Self> where Self: std::marker::Sized, { Self::deserialize(&mut Cursor::from_slice(buf)) } } impl Deserializable for bool { /// Deserializes a boolean according to the following definitions: /// /// * `boolFalse#bc799737 = Bool;` deserializes into `false`. /// * `boolTrue#997275b5 = Bool;` deserializes into `true`. /// /// # Examples /// /// ``` /// use grammers_tl_types::Deserializable; /// /// assert_eq!(bool::from_bytes(&[0xb5, 0x75, 0x72, 0x99]).unwrap(), true); /// assert_eq!(bool::from_bytes(&[0x37, 0x97, 0x79, 0xbc]).unwrap(), false); /// ``` #[allow(clippy::unreadable_literal)] fn deserialize(buf: Buffer) -> Result<Self> { let id = u32::deserialize(buf)?; match id { 0x997275b5u32 => Ok(true), 0xbc799737u32 => Ok(false), _ => Err(Error::UnexpectedConstructor { id }), } } } impl Deserializable for i32 { /// Deserializes a 32-bit signed integer according to the following /// definition: /// /// * `int ? = Int;`. /// /// # Examples /// /// ``` /// use grammers_tl_types::Deserializable; /// /// assert_eq!(i32::from_bytes(&[0x00, 0x00, 0x00, 0x00]).unwrap(), 0i32); /// assert_eq!(i32::from_bytes(&[0x01, 0x00, 0x00, 0x00]).unwrap(), 1i32); /// assert_eq!(i32::from_bytes(&[0xff, 0xff, 0xff, 0xff]).unwrap(), -1i32); /// assert_eq!(i32::from_bytes(&[0xff, 0xff, 0xff, 0x7f]).unwrap(), i32::max_value()); /// assert_eq!(i32::from_bytes(&[0x00, 0x00, 0x00, 0x80]).unwrap(), i32::min_value()); /// ``` fn deserialize(buf: Buffer) -> Result<Self> { let mut buffer = [0u8; 4]; buf.read_exact(&mut buffer)?; Ok(Self::from_le_bytes(buffer)) } } impl Deserializable for u32 { /// Deserializes a 32-bit unsigned integer according to the following /// definition: /// /// * `int ? = Int;`. /// /// # Examples /// /// ``` /// use grammers_tl_types::Deserializable; /// /// assert_eq!(u32::from_bytes(&[0x00, 0x00, 0x00, 0x00]).unwrap(), 0u32); /// assert_eq!(u32::from_bytes(&[0x01, 0x00, 0x00, 0x00]).unwrap(), 1u32); /// assert_eq!(u32::from_bytes(&[0xff, 0xff, 0xff, 0xff]).unwrap(), u32::max_value()); /// assert_eq!(u32::from_bytes(&[0x00, 0x00, 0x00, 0x00]).unwrap(), u32::min_value()); /// ``` fn deserialize(buf: Buffer) -> Result<Self> { let mut buffer = [0u8; 4]; buf.read_exact(&mut buffer)?; Ok(Self::from_le_bytes(buffer)) } } impl Deserializable for i64 { /// Deserializes a 64-bit signed integer according to the following /// definition: /// /// * `long ? = Long;`. /// /// # Examples /// /// ``` /// use grammers_tl_types::Deserializable; /// /// assert_eq!(i64::from_bytes(&[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]).unwrap(), 0i64); /// assert_eq!(i64::from_bytes(&[0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]).unwrap(), 1i64); /// assert_eq!(i64::from_bytes(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]).unwrap(), (-1i64)); /// assert_eq!(i64::from_bytes(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f]).unwrap(), i64::max_value()); /// assert_eq!(i64::from_bytes(&[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80]).unwrap(), i64::min_value()); /// ``` fn deserialize(buf: Buffer) -> Result<Self> { let mut buffer = [0u8; 8]; buf.read_exact(&mut buffer)?; Ok(Self::from_le_bytes(buffer)) } } impl Deserializable for [u8; 16] { /// Deserializes the 128-bit integer according to the following /// definition: /// /// * `int128 4*[ int ] = Int128;`. /// /// # Examples /// /// ``` /// use grammers_tl_types::Deserializable; /// /// let data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; /// /// assert_eq!(<[u8; 16]>::from_bytes(&data).unwrap(), data); /// ``` fn deserialize(buf: Buffer) -> Result<Self> { let mut buffer = [0u8; 16]; buf.read_exact(&mut buffer)?; Ok(buffer) } } impl Deserializable for [u8; 32] { /// Deserializes the 128-bit integer according to the following /// definition: /// /// * `int256 8*[ int ] = Int256;`. /// /// # Examples /// /// ``` /// use grammers_tl_types::Deserializable; /// /// let data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, /// 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32]; /// /// assert_eq!(<[u8; 32]>::from_bytes(&data).unwrap(), data); /// ``` fn deserialize(buf: Buffer) -> Result<Self> { let mut buffer = [0u8; 32]; buf.read_exact(&mut buffer)?; Ok(buffer) } } impl Deserializable for f64 { /// Deserializes a 64-bit floating point according to the /// following definition: /// /// * `double ? = Double;`. /// /// # Examples /// /// ``` /// use std::f64; /// use grammers_tl_types::Deserializable; /// /// assert_eq!(f64::from_bytes(&[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]).unwrap(), 0f64); /// assert_eq!(f64::from_bytes(&[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf8, 0x3f]).unwrap(), 1.5f64); /// assert_eq!(f64::from_bytes(&[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf8, 0xbf]).unwrap(), -1.5f64); /// assert_eq!(f64::from_bytes(&[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf0, 0x7f]).unwrap(), f64::INFINITY); /// assert_eq!(f64::from_bytes(&[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf0, 0xff]).unwrap(), f64::NEG_INFINITY); /// ``` fn deserialize(buf: Buffer) -> Result<Self> { let mut buffer = [0u8; 8]; buf.read_exact(&mut buffer)?; Ok(Self::from_le_bytes(buffer)) } } impl<T: Deserializable> Deserializable for Vec<T> { /// Deserializes a vector of deserializable items according to the /// following definition: /// /// * `vector#1cb5c415 {t:Type} # [ t ] = Vector t;`. /// /// # Examples /// /// ``` /// use grammers_tl_types::Deserializable; /// /// assert_eq!(Vec::<i32>::from_bytes(&[0x15, 0xc4, 0xb5, 0x1c, 0x0, 0x0, 0x0, 0x0]).unwrap(), Vec::new()); /// assert_eq!(Vec::<i32>::from_bytes(&[0x15, 0xc4, 0xb5, 0x1c, 0x1, 0x0, 0x0, 0x0, 0x7f, 0x0, 0x0, 0x0]).unwrap(), /// vec![0x7f_i32]); /// ``` #[allow(clippy::unreadable_literal)] fn
(buf: Buffer) -> Result<Self> { let id = u32::deserialize(buf)?; if id != 0x1cb5c415u32 { return Err(Error::UnexpectedConstructor { id }); } let len = u32::deserialize(buf)?; Ok((0..len) .map(|_| T::deserialize(buf)) .collect::<Result<Vec<T>>>()?) } } impl<T: Deserializable> Deserializable for crate::RawVec<T> { /// Deserializes a vector of deserializable items according to the /// following definition: /// /// * `vector#1cb5c415 {t:Type} # [ t ] = Vector t;`. /// /// # Examples /// /// ``` /// use grammers_tl_types::{RawVec, Deserializable}; /// /// assert_eq!(RawVec::<i32>::from_bytes(&[0x0, 0x0, 0x0, 0x0]).unwrap().0, Vec::<i32>::new()); /// assert_eq!(RawVec::<i32>::from_bytes(&[0x1, 0x0, 0x0, 0x0, 0x7f, 0x0, 0x0, 0x0]).unwrap().0, vec![0x7f_i32]); /// ``` fn deserialize(buf: Buffer) -> Result<Self> { let len = u32::deserialize(buf)?; Ok(Self( (0..len) .map(|_| T::deserialize(buf)) .collect::<Result<Vec<T>>>()?, )) } } impl Deserializable for String { /// Deserializes a UTF-8 string according to the following definition: /// /// * `string ? = String;`. /// /// # Examples /// /// ``` /// use grammers_tl_types::Deserializable; /// /// fn test_string(string: &str, prefix: &[u8], suffix: &[u8]) { /// let bytes = { /// let mut tmp = prefix.to_vec(); /// tmp.extend(string.as_bytes()); /// tmp.extend(suffix); /// tmp /// }; /// let expected = string.to_owned(); /// /// assert_eq!(String::from_bytes(&bytes).unwrap(), expected); /// } /// /// test_string("", &[0x00], &[0x00, 0x00, 0x00]); /// test_string("Hi", &[0x02], &[0x0]); /// test_string("Hi!", &[0x03], &[]); /// test_string("Hello", &[0x05], &[0x0, 0x0]); /// test_string("Hello, world!", &[0xd], &[0x0, 0x0]); /// test_string( /// "This is a very long string, and it has to be longer than 253 \ /// characters, which are quite a few but we can make it! Although, \ /// it is quite challenging. The quick brown fox jumps over the lazy \ /// fox. There is still some more text we need to type. Oh, this \ /// sentence made it past!", /// &[0xfe, 0x11, 0x01, 0x00], /// &[0x00, 0x00, 0x00] /// ); /// ``` fn deserialize(buf: Buffer) -> Result<Self> { Ok(String::from_utf8_lossy(&Vec::<u8>::deserialize(buf)?).into()) } } impl Deserializable for Vec<u8> { /// Deserializes a vector of bytes as a byte-string according to the /// following definition: /// /// * `string ? = String;`. /// /// # Examples /// /// ``` /// use grammers_tl_types::{Deserializable}; /// /// assert_eq!(Vec::<u8>::from_bytes(&[0x00, 0x00, 0x00, 0x00]).unwrap(), Vec::new()); /// assert_eq!(Vec::<u8>::from_bytes(&[0x01, 0x7f, 0x00, 0x00]).unwrap(), vec![0x7f_u8]); /// ``` fn deserialize(buf: Buffer) -> Result<Self> { let first_byte = buf.read_byte()?; let (len, padding) = if first_byte == 254 { let mut buffer = [0u8; 3]; buf.read_exact(&mut buffer)?; let len = (buffer[0] as usize) | ((buffer[1] as usize) << 8) | ((buffer[2] as usize) << 16); (len, len % 4) } else { let len = first_byte as usize; (len, (len + 1) % 4) }; let mut result = vec![0u8; len]; buf.read_exact(&mut result)?; if padding > 0 { for _ in 0..(4 - padding) { buf.read_byte()?; } } Ok(result) } }
deserialize
lib.rs
//The quote macro can require a high recursion limit #![recursion_limit = "256"] extern crate proc_macro; use butane_core::*; use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use proc_macro2::TokenTree; use quote::quote; use std::path::PathBuf; use syn::{Expr, Ident}; mod filter; /// Attribute macro which marks a struct as being a data model and /// generates an implementation of [`DataObject`](butane_core::DataObject). This /// macro will also write information to disk at compile time necessary to /// generate migrations /// /// ## Restrictions on model types: /// 1. The type of each field must implement [`FieldType`] or be [`Many`]. /// 2. There must be a primary key field. This must be either annotated with a `#[pk]` attribute or named `id`. /// /// ## Helper Attributes /// * `#[table = "NAME"]` used on the struct to specify the name of the table (defaults to struct name) /// * `#[pk]` on a field to specify that it is the primary key. /// * `#[auto]` on a field indicates that the field's value is /// initialized based on serial/autoincrement. Currently supported /// only on the primary key and only if the primary key is an integer /// type /// * `#[unique]` on a field indicates that the field's value must be unique /// (perhaps implemented as the SQL UNIQUE constraint by some backends). /// * `[default]` should be used on fields added by later migrations to avoid errors on existing objects. /// Unnecessary if the new field is an `Option<>` /// /// For example /// ```ignore /// #[model] /// #[table = "posts"] /// pub struct Post { /// #[auto] /// #[pk] // unnecessary if identifier were named id instead /// pub identifier: i32, /// pub title: String, /// pub content: String, /// #[default = false] /// pub published: bool, /// } /// ``` /// /// /// [`FieldType`]: crate::FieldType /// [`Many`]: butane_core::many::Many #[proc_macro_attribute] pub fn model(_args: TokenStream, input: TokenStream) -> TokenStream { codegen::model_with_migrations(input.into(), &mut migrations_for_dir()).into() } /// Attribute macro which generates an implementation of /// [`DataResult`](butane_core::DataResult). Continuing with our blog /// post example from [model](macro@model), we could create a `DataResult` with /// only some of the fields from `Post` (to avoid fetching all of them in a query). /// /// ```ignore /// #[dataresult(Post)] /// pub struct PostMetadata { /// pub id: i64, /// pub title: String, /// pub pub_time: Option<NaiveDateTime>, /// } /// ``` /// /// Note that the attribute takes a parameter saying which Model this /// result is a subset of. Every field named in the DataResult must be /// present in the Model. #[proc_macro_attribute] pub fn dataresult(args: TokenStream, input: TokenStream) -> TokenStream { codegen::dataresult(args.into(), input.into()).into() } #[proc_macro] pub fn filter(input: TokenStream) -> TokenStream { let input: TokenStream2 = input.into(); let args: Vec<TokenTree> = input.into_iter().collect(); if args.len() < 2 { return make_compile_error!("Expected filter!(Type, expression)").into(); } let tyid: Ident = match &args[0] { TokenTree::Ident(tyid) => tyid.clone(), TokenTree::Group(g) => match syn::parse2::<Ident>(g.stream()) { Ok(ident) => ident, Err(_) => { return make_compile_error!("Unexpected tokens in database object type {:?}", &g) .into() } }, _ => { return make_compile_error!("Unexpected tokens in database object type {:?}", &args[0]) .into() } }; if let TokenTree::Punct(_) = args[1] { } else
let expr: TokenStream2 = args.into_iter().skip(2).collect(); let expr: Expr = match syn::parse2(expr) { Ok(expr) => expr, Err(_) => { return make_compile_error!( "Expected filter!(Type, expression) but could not parse expression" ) .into() } }; filter::for_expr(&tyid, &expr).into() } /// Attribute macro which marks a type as being available to butane /// for use in models. /// /// May be used on type aliases, structs, or enums. Except when used /// on type aliases, it must be given a parameter specifying the /// SqlType it can be converted to. /// /// E.g. /// ```ignore /// #[butane_type] /// pub type CurrencyAmount = f64; /// /// #[butane_type(Text)] /// pub enum Currency { /// Dollars, /// Pounds, /// Euros, /// } /// impl ToSql for Currency { /// fn to_sql(&self) -> SqlVal { /// SqlVal::Text( /// match self { /// Self::Dollars => "dollars", /// Self::Pounds => "pounds", /// Self::Euros => "euros", /// } /// .to_string()) /// } /// } /// ``` #[proc_macro_attribute] pub fn butane_type(args: TokenStream, input: TokenStream) -> TokenStream { codegen::butane_type_with_migrations(args.into(), input.into(), &mut migrations_for_dir()) .into() } fn migrations_for_dir() -> migrations::FsMigrations { migrations::from_root(&migrations_dir()) } fn migrations_dir() -> PathBuf { let mut dir = PathBuf::from( std::env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR expected to be set"), ); dir.push(".butane"); dir.push("migrations"); dir }
{ return make_compile_error!("Expected filter!(Type, expression)").into(); }
index.js
import React from 'react'; import TransactionsList from './TransactionsList'; const TransactionsPage = () => { return ( <div className='page-transactions'> <TransactionsList /> </div> ) }
export default TransactionsPage;
app.e2e-spec.ts
import { AppPage } from './app.po'; import { browser, logging } from 'protractor'; describe('workspace-project App', () => { let page: AppPage; beforeEach(() => { page = new AppPage(); });
it('should display welcome message', () => { page.navigateTo(); expect(page.getTitleText()).toEqual('oms-oh-sample-store app is running!'); }); afterEach(async () => { // Assert that there are no errors emitted from the browser const logs = await browser.manage().logs().get(logging.Type.BROWSER); expect(logs).not.toContain(jasmine.objectContaining({ level: logging.Level.SEVERE, } as logging.Entry)); }); });
mod.rs
use std::fmt; use std::time::Duration; pub use self::canonical_url::CanonicalUrl; pub use self::config::{homedir, Config, ConfigValue}; pub use self::dependency_queue::DependencyQueue; pub use self::diagnostic_server::RustfixDiagnosticServer; pub use self::errors::{internal, CargoResult, CliResult, Test}; pub use self::errors::{CargoTestError, CliError}; pub use self::flock::{FileLock, Filesystem}; pub use self::graph::Graph; pub use self::hasher::StableHasher; pub use self::hex::{hash_u64, short_hash, to_hex}; pub use self::into_url::IntoUrl; pub use self::into_url_with_base::IntoUrlWithBase; pub use self::lev_distance::{closest, closest_msg, lev_distance}; pub use self::lockserver::{LockServer, LockServerClient, LockServerStarted}; pub use self::progress::{Progress, ProgressStyle}; pub use self::queue::Queue; pub use self::restricted_names::validate_package_name; pub use self::rustc::Rustc; pub use self::to_semver::ToSemver; pub use self::vcs::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo}; pub use self::workspace::{ add_path_args, path_args, print_available_benches, print_available_binaries, print_available_examples, print_available_packages, print_available_tests, }; mod canonical_url; pub mod command_prelude; pub mod config; pub mod cpu; mod dependency_queue; pub mod diagnostic_server; pub mod errors; mod flock; pub mod graph; mod hasher; pub mod hex; pub mod important_paths; pub mod interning; pub mod into_url; mod into_url_with_base; pub mod job; pub mod lev_distance; mod lockserver; pub mod machine_message; pub mod network; pub mod profile; mod progress; mod queue; pub mod restricted_names; pub mod rustc; pub mod to_semver; pub mod toml; mod vcs; mod workspace; pub fn elapsed(duration: Duration) -> String { let secs = duration.as_secs(); if secs >= 60 { format!("{}m {:02}s", secs / 60, secs % 60) } else { format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000) } } pub fn iter_join_onto<W, I, T>(mut w: W, iter: I, delim: &str) -> fmt::Result where W: fmt::Write, I: IntoIterator<Item = T>, T: std::fmt::Display,
pub fn iter_join<I, T>(iter: I, delim: &str) -> String where I: IntoIterator<Item = T>, T: std::fmt::Display, { let mut s = String::new(); let _ = iter_join_onto(&mut s, iter, delim); s } pub fn indented_lines(text: &str) -> String { text.lines() .map(|line| { if line.is_empty() { String::from("\n") } else { format!(" {}\n", line) } }) .collect() }
{ let mut it = iter.into_iter().peekable(); while let Some(n) = it.next() { write!(w, "{}", n)?; if it.peek().is_some() { write!(w, "{}", delim)?; } } Ok(()) }
ps_inventory.rs
This example shows how to use the qemu_procfs connector in conjunction with a specific OS layer. This example uses the `Inventory` feature of memflow to create the connector itself and the os instance. The example is an adaption of the memflow core process list example: https://github.com/memflow/memflow/blob/next/memflow/examples/process_list.rs # Remarks: To run this example you must have the `qemu_procfs` connector and `win32` plugin installed on your system. Make sure they can be found in one of the following locations: ~/.local/lib/memflow/ /usr/lib/memflow/ or in any other path found in the official memflow documentation. */ use std::env::args; use log::{info, Level}; use memflow::prelude::v1::*; fn main() { simplelog::TermLogger::init( Level::Debug.to_level_filter(), simplelog::Config::default(), simplelog::TerminalMode::Stdout, simplelog::ColorChoice::Auto, ) .unwrap(); let connector_args = args() .nth(1) .map(|arg| str::parse(arg.as_ref()).expect("unable to parse command line arguments")); let inventory = Inventory::scan(); let connector = inventory .create_connector("qemu_procfs", None, connector_args.as_ref()) .expect("unable to create qemu_procfs connector"); let mut os = inventory .create_os("win32", Some(connector), None) .expect("unable to create win32 instance with qemu_procfs connector"); let process_list = os.process_info_list().expect("unable to read process list"); info!( "{:>5} {:>10} {:>10} {:<}", "PID", "SYS ARCH", "PROC ARCH", "NAME" ); for p in process_list { info!( "{:>5} {:^10} {:^10} {}", p.pid, p.sys_arch, p.proc_arch, p.name ); } }
/*!
emit_param.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use hhbc_by_ref_ast_scope::Scope; use hhbc_by_ref_emit_attribute as emit_attribute; use hhbc_by_ref_emit_expression as emit_expression; use hhbc_by_ref_emit_fatal as emit_fatal; use hhbc_by_ref_emit_pos as emit_pos; use hhbc_by_ref_emit_type_hint::{hint_to_type_info, Kind}; use hhbc_by_ref_env::{emitter::Emitter, Env}; use hhbc_by_ref_hhas_param::HhasParam; use hhbc_by_ref_hhas_type::Info; use hhbc_by_ref_hhbc_string_utils::locals::strip_dollar; use hhbc_by_ref_instruction_sequence::{instr, InstrSeq, Result}; use hhbc_by_ref_options::LangFlags; use oxidized::{ aast_defs::{Hint, Hint_}, aast_visitor::{self, AstParams, Node}, ast as a, ast_defs::{Id, ParamKind}, pos::Pos, }; use std::collections::{BTreeMap, BTreeSet}; use std::marker::PhantomData; pub fn from_asts<'a, 'arena>( alloc: &'arena bumpalo::Bump, emitter: &mut Emitter<'arena>, tparams: &mut Vec<&str>, generate_defaults: bool, scope: &Scope<'a>, ast_params: &[a::FunParam], ) -> Result<Vec<HhasParam<'arena>>> { ast_params .iter() .map(|param| from_ast(alloc, emitter, tparams, generate_defaults, scope, param)) .collect::<Result<Vec<_>>>() .map(|params| { params .iter() .filter_map(|p| p.to_owned()) .collect::<Vec<_>>() }) .map(rename_params) } #[allow(clippy::needless_lifetimes)] fn rename_params<'arena>(mut params: Vec<HhasParam<'arena>>) -> Vec<HhasParam<'arena>> { fn rename<'arena>( names: &BTreeSet<String>, param_counts: &mut BTreeMap<String, usize>, param: &mut HhasParam<'arena>, ) { match param_counts.get_mut(&param.name) { None => { param_counts.insert(param.name.clone(), 0); } Some(count) => { let newname = format!("{}{}", param.name, count); *count += 1; if names.contains(&newname) { rename(names, param_counts, param); } else { param.name = newname; } } } } let mut param_counts = BTreeMap::new(); let names = params .iter() .map(|p| p.name.clone()) .collect::<BTreeSet<_>>(); params .iter_mut() .rev() .for_each(|p| rename(&names, &mut param_counts, p)); params.into_iter().collect() } fn from_ast<'a, 'arena>( alloc: &'arena bumpalo::Bump, emitter: &mut Emitter<'arena>, tparams: &mut Vec<&str>, generate_defaults: bool, scope: &Scope<'a>, param: &a::FunParam, ) -> Result<Option<HhasParam<'arena>>> { if param.is_variadic && param.name == "..." { return Ok(None); }; if param.is_variadic { tparams.push("array"); }; let nullable = param .expr .as_ref() .map_or(false, |a::Expr(_, e)| e.is_null()); let type_info = { let param_type_hint = if param.is_variadic { Some(Hint( Pos::make_none(), Box::new(Hint_::mk_happly( Id(Pos::make_none(), "array".to_string()), param .type_hint .get_hint() .as_ref() .map_or(vec![], |h| vec![h.clone()]), )), )) } else if emitter .options() .hhvm .hack_lang .flags .contains(LangFlags::ENABLE_ENUM_CLASSES) && param.user_attributes.iter().any(|a| match &a.name { Id(_, s) => s == "__Atom", }) { Some(Hint( Pos::make_none(), Box::new(Hint_::mk_happly( Id(Pos::make_none(), "HH\\string".to_string()), vec![], )), )) } else { param.type_hint.get_hint().clone() }; if let Some(h) = param_type_hint { Some(hint_to_type_info( alloc, &Kind::Param, false, nullable, &tparams[..], &h, )?) } else { None } }; // Do the type check for default value type and hint type if !nullable { if let Some(err_msg) = default_type_check(&param.name, &type_info, &param.expr) { return Err(emit_fatal::raise_fatal_parse(&param.pos, err_msg)); } }; aast_visitor::visit( &mut ResolverVisitor { phantom_a: PhantomData, phantom_b: PhantomData, }, &mut Ctx { emitter, scope }, &param.expr, ) .unwrap(); let default_value = if generate_defaults { param .expr .as_ref() .map(|expr| (emitter.label_gen_mut().next_default_arg(), expr.clone())) } else { None }; let is_inout = match param.callconv { Some(ParamKind::Pinout) => true, _ => false, }; Ok(Some(HhasParam { name: param.name.clone(), is_variadic: param.is_variadic, is_inout, user_attributes: emit_attribute::from_asts(alloc, emitter, &param.user_attributes)?, type_info, default_value, })) } pub fn
<'a, 'arena>( emitter: &mut Emitter<'arena>, env: &Env<'a, 'arena>, pos: &Pos, params: &[HhasParam<'arena>], ) -> Result<(InstrSeq<'arena>, InstrSeq<'arena>)> { let alloc = env.arena; let param_to_setter = |param: &HhasParam<'arena>| { param.default_value.as_ref().map(|(lbl, expr)| { let instrs = InstrSeq::gather( alloc, vec![ emit_expression::emit_expr(emitter, env, &expr)?, emit_pos::emit_pos(alloc, pos), instr::setl( alloc, hhbc_by_ref_local::Type::Named( bumpalo::collections::String::from_str_in(param.name.as_str(), alloc) .into_bump_str(), ), ), instr::popc(alloc), ], ); Ok(InstrSeq::gather( alloc, vec![instr::label(alloc, lbl.to_owned()), instrs], )) }) }; let setters = params .iter() .filter_map(param_to_setter) .collect::<Result<Vec<_>>>()?; if setters.is_empty() { Ok((instr::empty(alloc), instr::empty(alloc))) } else { let l = emitter.label_gen_mut().next_regular(); Ok(( instr::label(alloc, l), InstrSeq::gather( alloc, vec![InstrSeq::gather(alloc, setters), instr::jmpns(alloc, l)], ), )) } } //struct ResolverVisitor<'a, 'arena>(PhantomData<&'a ()>); struct ResolverVisitor<'a, 'arena: 'a> { phantom_a: PhantomData<&'a ()>, phantom_b: PhantomData<&'arena ()>, } #[allow(dead_code)] struct Ctx<'a, 'arena: 'a> { emitter: &'a mut Emitter<'arena>, scope: &'a Scope<'a>, } impl<'ast, 'a, 'arena> aast_visitor::Visitor<'ast> for ResolverVisitor<'a, 'arena> { type P = AstParams<Ctx<'a, 'arena>, ()>; fn object(&mut self) -> &mut dyn aast_visitor::Visitor<'ast, P = Self::P> { self } fn visit_expr(&mut self, c: &mut Ctx<'a, 'arena>, p: &a::Expr) -> std::result::Result<(), ()> { p.recurse(c, self.object()) // TODO(hrust) implement on_CIexpr & remove dead_code on struct Ctx } } // Return None if it passes type check, otherwise return error msg fn default_type_check( param_name: &str, param_type_info: &Option<Info>, param_expr: &Option<a::Expr>, ) -> Option<String> { let hint_type = get_hint_display_name( param_type_info .as_ref() .and_then(|ti| ti.user_type.as_ref()), ); // If matches, return None, otherwise return default_type let default_type = hint_type.and_then(|ht| match_default_and_hint(ht, param_expr)); let param_true_name = strip_dollar(param_name); default_type.and_then(|dt| hint_type.map(|ht| match ht { "class" => format!( "Default value for parameter {} with a class type hint can only be NULL", param_true_name), _ => format!( "Default value for parameter {} with type {} needs to have the same type as the type hint {}", param_true_name, dt, ht) })) } fn get_hint_display_name(hint: Option<&String>) -> Option<&str> { hint.map(|h| match h.as_str() { "HH\\bool" => "bool", "HH\\varray" => "HH\\varray", "HH\\darray" => "HH\\darray", "HH\\varray_or_darray" => "HH\\varray_or_darray", "HH\\vec_or_dict" => "HH\\vec_or_dict", "HH\\AnyArray" => "HH\\AnyArray", "HH\\int" => "int", "HH\\num" => "num", "HH\\arraykey" => "arraykey", "HH\\float" => "float", "HH\\string" => "string", _ => "class", }) } // By now only check default type for bool, array, int, float and string. // Return None when hint_type and default_value matches (in hh mode, // "class" type matches anything). If not, return default_value type string // for printing fatal parse error fn match_default_and_hint(hint_type: &str, param_expr: &Option<a::Expr>) -> Option<&'static str> { if hint_type == "class" { return None; } match &param_expr.as_ref() { None => None, Some(e) => match e.1 { a::Expr_::True | a::Expr_::False => match hint_type { "bool" => None, _ => Some("Boolean"), }, a::Expr_::Int(_) => match hint_type { "int" | "num" | "arraykey" | "float" => None, _ => Some("Int64"), }, a::Expr_::Float(_) => match hint_type { "float" | "num" => None, _ => Some("Double"), }, a::Expr_::String(_) => match hint_type { "string" | "arraykey" => None, _ => Some("String"), }, _ => None, }, } }
emit_param_default_value_setter
graph.go
package main import ( "bytes" "fmt" "io" "os" "github.com/mb0/daql/dom" ) func graph(args []string) error
{ pr, err := project() if err != nil { return err } ss := pr.Schemas if len(args) > 0 { ss, err = filterSchemas(pr, args) if err != nil { return err } } var b bytes.Buffer fmt.Fprintf(&b, "digraph %s {\ngraph [rankdir=LR]\n", pr.Name) for _, s := range ss { fmt.Fprintf(&b, "subgraph cluster_%s {\n", s.Name) fmt.Fprintf(&b, "node[shape=record]\ncolor=gray\nlabel=\"%s\"\n", s.Name) for _, m := range s.Models { fmt.Fprintf(&b, "\"%s\" [label=\"%s\"]\n", m.Qualified(), m.Name) } fmt.Fprintf(&b, "}\n") } rels, err := dom.Relate(pr.Project) if err != nil { return err } for _, s := range ss { for _, m := range s.Models { key := m.Qualified() rel := rels[key] if rel == nil { continue } for _, r := range rel.Out { fmt.Fprintf(&b, "\"%s\"->\"%s\"\n", key, r.B.Qualified()) } } } fmt.Fprintf(&b, "}\n") _, err = io.Copy(os.Stdout, &b) return err }
index.js
export default Loading;
import React from 'react'; const Loading = () => { return <img alt='' src='https://media.giphy.com/media/3o7TKtbdY5oZuiyucg/giphy.gif' />; };
monitor_darwin_tailscaled.go
// Copyright (c) 2021 Tailscale Inc & AUTHORS All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build darwin,!redo package monitor import ( "bufio" "errors" "os/exec" "tailscale.com/syncs" "tailscale.com/types/logger" ) // unspecifiedMessage is a minimal message implementation that should not // be ignored. In general, OS-specific implementations should use better // types and avoid this if they can. type unspecifiedMessage struct{} func (unspecifiedMessage) ignore() bool { return false } func
(logf logger.Logf) (osMon, error) { return new(routeMonitorSubProcMon), nil } // routeMonitorSubProcMon is a very simple (temporary? but I know // better) monitor implementation for darwin in tailscaled-mode where // we can just shell out to "route -n monitor". It waits for any input // but doesn't parse it. Then we poll to see if something is different. type routeMonitorSubProcMon struct { closed syncs.AtomicBool cmd *exec.Cmd // of "/sbin/route -n monitor" br *bufio.Reader buf []byte } func (m *routeMonitorSubProcMon) Close() error { m.closed.Set(true) if m.cmd != nil { m.cmd.Process.Kill() m.cmd = nil } return nil } func (m *routeMonitorSubProcMon) Receive() (message, error) { if m.closed.Get() { return nil, errors.New("monitor closed") } if m.cmd == nil { cmd := exec.Command("/sbin/route", "-n", "monitor") outPipe, err := cmd.StdoutPipe() if err != nil { return nil, err } if err := cmd.Start(); err != nil { return nil, err } m.br = bufio.NewReader(outPipe) m.cmd = cmd m.buf = make([]byte, 16<<10) } _, err := m.br.Read(m.buf) if err != nil { m.Close() return nil, err } return unspecifiedMessage{}, nil }
newOSMon
methods_strings.go
package query import ( "bytes" "crypto/aes" "crypto/cipher" "crypto/hmac" "crypto/md5" "crypto/sha1" "crypto/sha256" "crypto/sha512" "encoding/ascii85" "encoding/base64" "encoding/csv" "encoding/hex" "encoding/json" "errors" "fmt" "html" "io/ioutil" "net/url" "path/filepath" "regexp" "strconv" "strings" "time" "github.com/Jeffail/benthos/v3/internal/xml" "github.com/OneOfOne/xxhash" "github.com/itchyny/timefmt-go" "github.com/microcosm-cc/bluemonday" "github.com/tilinna/z85" "gopkg.in/yaml.v3" ) var _ = registerSimpleMethod( NewMethodSpec( "bytes", "", ).InCategory( MethodCategoryCoercion, "Marshal a value into a byte array. If the value is already a byte array it is unchanged.", NewExampleSpec("", `root.first_byte = this.name.bytes().index(0)`, `{"name":"foobar bazson"}`, `{"first_byte":102}`, ), ), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { return IToBytes(v), nil }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "capitalize", "", ).InCategory( MethodCategoryStrings, "Takes a string value and returns a copy with all Unicode letters that begin words mapped to their Unicode title case.", NewExampleSpec("", `root.title = this.title.capitalize()`, `{"title":"the foo bar"}`, `{"title":"The Foo Bar"}`, ), ), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: return strings.Title(t), nil case []byte: return bytes.Title(t), nil } return nil, NewTypeError(v, ValueString) }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "encode", "", ).InCategory( MethodCategoryEncoding, "Encodes a string or byte array target according to a chosen scheme and returns a string result. Available schemes are: `base64`, `base64url`, `hex`, `ascii85`.", // NOTE: z85 has been removed from the list until we can support // misaligned data automatically. It'll still be supported for backwards // compatibility, but given it behaves differently to `ascii85` I think // it's a poor user experience to expose it. NewExampleSpec("", `root.encoded = this.value.encode("hex")`, `{"value":"hello world"}`, `{"encoded":"68656c6c6f20776f726c64"}`, ), NewExampleSpec("", `root.encoded = content().encode("ascii85")`, `this is totally unstructured data`, "{\"encoded\":\"FD,B0+DGm>FDl80Ci\\\"A>F`)8BEckl6F`M&(+Cno&@/\"}", ), ).Param(ParamString("scheme", "The encoding scheme to use.")), func(args *ParsedParams) (simpleMethod, error) { schemeStr, err := args.FieldString("scheme") if err != nil { return nil, err } var schemeFn func([]byte) (string, error) switch schemeStr { case "base64": schemeFn = func(b []byte) (string, error) { var buf bytes.Buffer e := base64.NewEncoder(base64.StdEncoding, &buf) e.Write(b) e.Close() return buf.String(), nil } case "base64url": schemeFn = func(b []byte) (string, error) { var buf bytes.Buffer e := base64.NewEncoder(base64.URLEncoding, &buf) e.Write(b) e.Close() return buf.String(), nil } case "hex": schemeFn = func(b []byte) (string, error) { var buf bytes.Buffer e := hex.NewEncoder(&buf) if _, err := e.Write(b); err != nil { return "", err } return buf.String(), nil } case "ascii85": schemeFn = func(b []byte) (string, error) { var buf bytes.Buffer e := ascii85.NewEncoder(&buf) if _, err := e.Write(b); err != nil { return "", err } if err := e.Close(); err != nil { return "", err } return buf.String(), nil } case "z85": schemeFn = func(b []byte) (string, error) { // TODO: Update this to support misaligned input data similar to the // ascii85 encoder. enc := make([]byte, z85.EncodedLen(len(b))) if _, err := z85.Encode(enc, b); err != nil { return "", err } return string(enc), nil } default: return nil, fmt.Errorf("unrecognized encoding type: %v", schemeStr) } return func(v interface{}, ctx FunctionContext) (interface{}, error) { var res string var err error switch t := v.(type) { case string: res, err = schemeFn([]byte(t)) case []byte: res, err = schemeFn(t) default: err = NewTypeError(v, ValueString) } return res, err }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "decode", "", ).InCategory( MethodCategoryEncoding, "Decodes an encoded string target according to a chosen scheme and returns the result as a byte array. When mapping the result to a JSON field the value should be cast to a string using the method [`string`][methods.string], or encoded using the method [`encode`][methods.encode], otherwise it will be base64 encoded by default.\n\nAvailable schemes are: `base64`, `base64url`, `hex`, `ascii85`.", // NOTE: z85 has been removed from the list until we can support // misaligned data automatically. It'll still be supported for backwards // compatibility, but given it behaves differently to `ascii85` I think // it's a poor user experience to expose it. NewExampleSpec("", `root.decoded = this.value.decode("hex").string()`, `{"value":"68656c6c6f20776f726c64"}`, `{"decoded":"hello world"}`, ), NewExampleSpec("", `root = this.encoded.decode("ascii85")`, "{\"encoded\":\"FD,B0+DGm>FDl80Ci\\\"A>F`)8BEckl6F`M&(+Cno&@/\"}", `this is totally unstructured data`, ), ).Param(ParamString("scheme", "The decoding scheme to use.")), func(args *ParsedParams) (simpleMethod, error) { schemeStr, err := args.FieldString("scheme") if err != nil { return nil, err } var schemeFn func([]byte) ([]byte, error) switch schemeStr { case "base64": schemeFn = func(b []byte) ([]byte, error) { e := base64.NewDecoder(base64.StdEncoding, bytes.NewReader(b)) return ioutil.ReadAll(e) } case "base64url": schemeFn = func(b []byte) ([]byte, error) { e := base64.NewDecoder(base64.URLEncoding, bytes.NewReader(b)) return ioutil.ReadAll(e) } case "hex": schemeFn = func(b []byte) ([]byte, error) { e := hex.NewDecoder(bytes.NewReader(b)) return ioutil.ReadAll(e) } case "ascii85": schemeFn = func(b []byte) ([]byte, error) { e := ascii85.NewDecoder(bytes.NewReader(b)) return ioutil.ReadAll(e) } case "z85": schemeFn = func(b []byte) ([]byte, error) { // TODO: Update this to support misaligned input data similar to the // ascii85 decoder. dec := make([]byte, z85.DecodedLen(len(b))) if _, err := z85.Decode(dec, b); err != nil { return nil, err } return dec, nil } default: return nil, fmt.Errorf("unrecognized encoding type: %v", schemeStr) } return func(v interface{}, ctx FunctionContext) (interface{}, error) { var res []byte var err error switch t := v.(type) { case string: res, err = schemeFn([]byte(t)) case []byte: res, err = schemeFn(t) default: err = NewTypeError(v, ValueString) } return res, err }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "encrypt_aes", "", ).InCategory( MethodCategoryEncoding, "Encrypts a string or byte array target according to a chosen AES encryption method and returns a string result. The algorithms require a key and an initialization vector / nonce. Available schemes are: `ctr`, `ofb`, `cbc`.", NewExampleSpec("", `let key = "2b7e151628aed2a6abf7158809cf4f3c".decode("hex") let vector = "f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff".decode("hex") root.encrypted = this.value.encrypt_aes("ctr", $key, $vector).encode("hex")`, `{"value":"hello world!"}`, `{"encrypted":"84e9b31ff7400bdf80be7254"}`, ), ). Param(ParamString("scheme", "The scheme to use for encryption, one of `ctr`, `ofb`, `cbc`.")). Param(ParamString("key", "A key to encrypt with.")). Param(ParamString("iv", "An initialization vector / nonce.")), func(args *ParsedParams) (simpleMethod, error) { schemeStr, err := args.FieldString("scheme") if err != nil { return nil, err } keyStr, err := args.FieldString("key") if err != nil { return nil, err } ivStr, err := args.FieldString("iv") if err != nil { return nil, err } key := []byte(keyStr) iv := []byte(ivStr) block, err := aes.NewCipher(key) if err != nil { return nil, err } var schemeFn func([]byte) (string, error) switch schemeStr { case "ctr": schemeFn = func(b []byte) (string, error) { ciphertext := make([]byte, len(b)) stream := cipher.NewCTR(block, iv) stream.XORKeyStream(ciphertext, b) return string(ciphertext), nil } case "ofb": schemeFn = func(b []byte) (string, error) { ciphertext := make([]byte, len(b)) stream := cipher.NewOFB(block, iv) stream.XORKeyStream(ciphertext, b) return string(ciphertext), nil } case "cbc": schemeFn = func(b []byte) (string, error) { if len(b)%aes.BlockSize != 0 { return "", fmt.Errorf("plaintext is not a multiple of the block size") } ciphertext := make([]byte, len(b)) stream := cipher.NewCBCEncrypter(block, iv) stream.CryptBlocks(ciphertext, b) return string(ciphertext), nil } default: return nil, fmt.Errorf("unrecognized encryption type: %v", schemeStr) } return func(v interface{}, ctx FunctionContext) (interface{}, error) { var res string var err error switch t := v.(type) { case string: res, err = schemeFn([]byte(t)) case []byte: res, err = schemeFn(t) default: err = NewTypeError(v, ValueString) } return res, err }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "decrypt_aes", "", ).InCategory( MethodCategoryEncoding, "Decrypts an encrypted string or byte array target according to a chosen AES encryption method and returns the result as a byte array. The algorithms require a key and an initialization vector / nonce. Available schemes are: `ctr`, `ofb`, `cbc`.", NewExampleSpec("", `let key = "2b7e151628aed2a6abf7158809cf4f3c".decode("hex") let vector = "f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff".decode("hex") root.decrypted = this.value.decode("hex").decrypt_aes("ctr", $key, $vector).string()`, `{"value":"84e9b31ff7400bdf80be7254"}`, `{"decrypted":"hello world!"}`, ), ). Param(ParamString("scheme", "The scheme to use for decryption, one of `ctr`, `ofb`, `cbc`.")). Param(ParamString("key", "A key to decrypt with.")). Param(ParamString("iv", "An initialization vector / nonce.")), func(args *ParsedParams) (simpleMethod, error) { schemeStr, err := args.FieldString("scheme") if err != nil { return nil, err } keyStr, err := args.FieldString("key") if err != nil { return nil, err } ivStr, err := args.FieldString("iv") if err != nil { return nil, err } key := []byte(keyStr) iv := []byte(ivStr) block, err := aes.NewCipher(key) if err != nil { return nil, err } var schemeFn func([]byte) ([]byte, error) switch schemeStr { case "ctr": schemeFn = func(b []byte) ([]byte, error) { plaintext := make([]byte, len(b)) stream := cipher.NewCTR(block, iv) stream.XORKeyStream(plaintext, b) return plaintext, nil } case "ofb": schemeFn = func(b []byte) ([]byte, error) { plaintext := make([]byte, len(b)) stream := cipher.NewOFB(block, iv) stream.XORKeyStream(plaintext, b) return plaintext, nil } case "cbc": schemeFn = func(b []byte) ([]byte, error) { if len(b)%aes.BlockSize != 0 { return nil, fmt.Errorf("ciphertext is not a multiple of the block size") } stream := cipher.NewCBCDecrypter(block, iv) stream.CryptBlocks(b, b) return b, nil } default: return nil, fmt.Errorf("unrecognized decryption type: %v", schemeStr) } return func(v interface{}, ctx FunctionContext) (interface{}, error) { var res []byte var err error switch t := v.(type) { case string: res, err = schemeFn([]byte(t)) case []byte: res, err = schemeFn(t) default: err = NewTypeError(v, ValueString) } return res, err }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "escape_html", "", ).InCategory( MethodCategoryStrings, "Escapes a string so that special characters like `<` to become `&lt;`. It escapes only five such characters: `<`, `>`, `&`, `'` and `\"` so that it can be safely placed within an HTML entity.", NewExampleSpec("", `root.escaped = this.value.escape_html()`, `{"value":"foo & bar"}`, `{"escaped":"foo &amp; bar"}`, ), ), func(*ParsedParams) (simpleMethod, error) { return stringMethod(func(s string) (interface{}, error) { return html.EscapeString(s), nil }), nil }, ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "index_of", "", ).InCategory( MethodCategoryStrings, "Returns the starting index of the argument substring in a string target, or `-1` if the target doesn't contain the argument.", NewExampleSpec("", `root.index = this.thing.index_of("bar")`, `{"thing":"foobar"}`, `{"index":3}`, ), NewExampleSpec("", `root.index = content().index_of("meow")`, `the cat meowed, the dog woofed`, `{"index":8}`, ), ), func(args ...interface{}) (simpleMethod, error) { substring := args[0].(string) return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: return int64(strings.Index(t, substring)), nil case []byte: return int64(bytes.Index(t, []byte(substring))), nil } return nil, NewTypeError(v, ValueString) }, nil }, true, oldParamsExpectNArgs(1), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "unescape_html", "", ).InCategory( MethodCategoryStrings, "Unescapes a string so that entities like `&lt;` become `<`. It unescapes a larger range of entities than `escape_html` escapes. For example, `&aacute;` unescapes to `á`, as does `&#225;` and `&xE1;`.", NewExampleSpec("", `root.unescaped = this.value.unescape_html()`, `{"value":"foo &amp; bar"}`, `{"unescaped":"foo & bar"}`, ), ), func(*ParsedParams) (simpleMethod, error) { return stringMethod(func(s string) (interface{}, error) { return html.UnescapeString(s), nil }), nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "escape_url_query", "", ).InCategory( MethodCategoryStrings, "Escapes a string so that it can be safely placed within a URL query.", NewExampleSpec("", `root.escaped = this.value.escape_url_query()`, `{"value":"foo & bar"}`, `{"escaped":"foo+%26+bar"}`, ), ), func(*ParsedParams) (simpleMethod, error) { return stringMethod(func(s string) (interface{}, error) { return url.QueryEscape(s), nil }), nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "unescape_url_query", "", ).InCategory( MethodCategoryStrings, "Expands escape sequences from a URL query string.", NewExampleSpec("", `root.unescaped = this.value.unescape_url_query()`, `{"value":"foo+%26+bar"}`, `{"unescaped":"foo & bar"}`, ), ), func(*ParsedParams) (simpleMethod, error) { return stringMethod(func(s string) (interface{}, error) { return url.QueryUnescape(s) }), nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "filepath_join", "", ).InCategory( MethodCategoryStrings, "Joins an array of path elements into a single file path. The separator depends on the operating system of the machine.", NewExampleSpec("", `root.path = this.path_elements.filepath_join()`, strings.ReplaceAll(`{"path_elements":["/foo/","bar.txt"]}`, "/", string(filepath.Separator)), strings.ReplaceAll(`{"path":"/foo/bar.txt"}`, "/", string(filepath.Separator)), ), ), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { arr, ok := v.([]interface{}) if !ok { return nil, NewTypeError(v, ValueArray) } strs := make([]string, 0, len(arr)) for i, ele := range arr { str, err := IGetString(ele) if err != nil { return nil, fmt.Errorf("path element %v: %w", i, err) } strs = append(strs, str) } return filepath.Join(strs...), nil }, nil }, ) var _ = registerSimpleMethod( NewMethodSpec( "filepath_split", "", ).InCategory( MethodCategoryStrings, "Splits a file path immediately following the final Separator, separating it into a directory and file name component returned as a two element array of strings. If there is no Separator in the path, the first element will be empty and the second will contain the path. The separator depends on the operating system of the machine.", NewExampleSpec("", `root.path_sep = this.path.filepath_split()`, strings.ReplaceAll(`{"path":"/foo/bar.txt"}`, "/", string(filepath.Separator)), strings.ReplaceAll(`{"path_sep":["/foo/","bar.txt"]}`, "/", string(filepath.Separator)), `{"path":"baz.txt"}`, `{"path_sep":["","baz.txt"]}`, ), ), func(*ParsedParams) (simpleMethod, error) { return stringMethod(func(s string) (interface{}, error) { dir, file := filepath.Split(s) return []interface{}{dir, file}, nil }), nil }, ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "format", "", ).InCategory( MethodCategoryStrings, "Use a value string as a format specifier in order to produce a new string, using any number of provided arguments.", NewExampleSpec("", `root.foo = "%s(%v): %v".format(this.name, this.age, this.fingers)`, `{"name":"lance","age":37,"fingers":13}`, `{"foo":"lance(37): 13"}`, ), ), func(args ...interface{}) (simpleMethod, error) { return stringMethod(func(s string) (interface{}, error) { return fmt.Sprintf(s, args...), nil }), nil }, true, ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "has_prefix", "", ).InCategory( MethodCategoryStrings, "Checks whether a string has a prefix argument and returns a bool.", NewExampleSpec("", `root.t1 = this.v1.has_prefix("foo") root.t2 = this.v2.has_prefix("foo")`, `{"v1":"foobar","v2":"barfoo"}`, `{"t1":true,"t2":false}`, ), ), func(args ...interface{}) (simpleMethod, error) { prefix := args[0].(string) prefixB := []byte(prefix) return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: return strings.HasPrefix(t, prefix), nil case []byte: return bytes.HasPrefix(t, prefixB), nil } return nil, NewTypeError(v, ValueString) }, nil }, true, oldParamsExpectNArgs(1), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "has_suffix", "", ).InCategory( MethodCategoryStrings, "Checks whether a string has a suffix argument and returns a bool.", NewExampleSpec("", `root.t1 = this.v1.has_suffix("foo") root.t2 = this.v2.has_suffix("foo")`, `{"v1":"foobar","v2":"barfoo"}`, `{"t1":false,"t2":true}`, ), ), func(args ...interface{}) (simpleMethod, error) { prefix := args[0].(string) prefixB := []byte(prefix) return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: return strings.HasSuffix(t, prefix), nil case []byte: return bytes.HasSuffix(t, prefixB), nil } return nil, NewTypeError(v, ValueString) }, nil }, true, oldParamsExpectNArgs(1), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "hash", "", ).InCategory( MethodCategoryEncoding, ` Hashes a string or byte array according to a chosen algorithm and returns the result as a byte array. When mapping the result to a JSON field the value should be cast to a string using the method `+"[`string`][methods.string], or encoded using the method [`encode`][methods.encode]"+`, otherwise it will be base64 encoded by default. Available algorithms are: `+"`hmac_sha1`, `hmac_sha256`, `hmac_sha512`, `md5`, `sha1`, `sha256`, `sha512`, `xxhash64`"+`. The following algorithms require a key, which is specified as a second argument: `+"`hmac_sha1`, `hmac_sha256`, `hmac_sha512`"+`.`, NewExampleSpec("", `root.h1 = this.value.hash("sha1").encode("hex") root.h2 = this.value.hash("hmac_sha1","static-key").encode("hex")`, `{"value":"hello world"}`, `{"h1":"2aae6c35c94fcfb415dbe95f408b9ce91ee846ed","h2":"d87e5f068fa08fe90bb95bc7c8344cb809179d76"}`, ), ), func(args ...interface{}) (simpleMethod, error) { var key []byte if len(args) > 1 { key = []byte(args[1].(string)) } var hashFn func([]byte) ([]byte, error) switch args[0].(string) { case "hmac_sha1", "hmac-sha1": if len(key) == 0 { return nil, fmt.Errorf("hash algorithm %v requires a key argument", args[0].(string)) } hashFn = func(b []byte) ([]byte, error) { hasher := hmac.New(sha1.New, key) hasher.Write(b) return hasher.Sum(nil), nil } case "hmac_sha256", "hmac-sha256": if len(key) == 0 { return nil, fmt.Errorf("hash algorithm %v requires a key argument", args[0].(string)) } hashFn = func(b []byte) ([]byte, error) { hasher := hmac.New(sha256.New, key) hasher.Write(b) return hasher.Sum(nil), nil } case "hmac_sha512", "hmac-sha512": if len(key) == 0 { return nil, fmt.Errorf("hash algorithm %v requires a key argument", args[0].(string)) } hashFn = func(b []byte) ([]byte, error) { hasher := hmac.New(sha512.New, key) hasher.Write(b) return hasher.Sum(nil), nil } case "md5": hashFn = func(b []byte) ([]byte, error) { hasher := md5.New() hasher.Write(b) return hasher.Sum(nil), nil } case "sha1": hashFn = func(b []byte) ([]byte, error) { hasher := sha1.New() hasher.Write(b) return hasher.Sum(nil), nil } case "sha256": hashFn = func(b []byte) ([]byte, error) { hasher := sha256.New() hasher.Write(b) return hasher.Sum(nil), nil } case "sha512": hashFn = func(b []byte) ([]byte, error) { hasher := sha512.New() hasher.Write(b) return hasher.Sum(nil), nil } case "xxhash64": hashFn = func(b []byte) ([]byte, error) { h := xxhash.New64() h.Write(b) return []byte(strconv.FormatUint(h.Sum64(), 10)), nil } default: return nil, fmt.Errorf("unrecognized hash type: %v", args[0]) } return func(v interface{}, ctx FunctionContext) (interface{}, error) { var res []byte var err error switch t := v.(type) { case string: res, err = hashFn([]byte(t)) case []byte: res, err = hashFn(t) default: err = NewTypeError(v, ValueString) } return res, err }, nil }, true, oldParamsExpectAtLeastOneArg(), oldParamsExpectStringArg(0), oldParamsExpectStringArg(1), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "join", "", ).InCategory( MethodCategoryObjectAndArray, "Join an array of strings with an optional delimiter into a single string.", NewExampleSpec("", `root.joined_words = this.words.join() root.joined_numbers = this.numbers.map_each(this.string()).join(",")`, `{"words":["hello","world"],"numbers":[3,8,11]}`, `{"joined_numbers":"3,8,11","joined_words":"helloworld"}`, ), ), func(args ...interface{}) (simpleMethod, error) { var delim string if len(args) > 0 { delim = args[0].(string) } return func(v interface{}, ctx FunctionContext) (interface{}, error) { slice, ok := v.([]interface{}) if !ok { return nil, NewTypeError(v, ValueArray) } var buf bytes.Buffer for i, sv := range slice { if i > 0 { buf.WriteString(delim) } switch t := sv.(type) { case string: buf.WriteString(t) case []byte: buf.Write(t) default: return nil, fmt.Errorf("failed to join element %v: %w", i, NewTypeError(sv, ValueString)) } } return buf.String(), nil }, nil }, true, oldParamsExpectOneOrZeroArgs(), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "uppercase", "", ).InCategory( MethodCategoryStrings, "Convert a string value into uppercase.", NewExampleSpec("", `root.foo = this.foo.uppercase()`, `{"foo":"hello world"}`, `{"foo":"HELLO WORLD"}`, ), ), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: return strings.ToUpper(t), nil case []byte: return bytes.ToUpper(t), nil default: return nil, NewTypeError(v, ValueString) } }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "lowercase", "", ).InCategory( MethodCategoryStrings, "Convert a string value into lowercase.", NewExampleSpec("", `root.foo = this.foo.lowercase()`, `{"foo":"HELLO WORLD"}`, `{"foo":"hello world"}`, ), ), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: return strings.ToLower(t), nil case []byte: return bytes.ToLower(t), nil default: return nil, NewTypeError(v, ValueString) } }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "parse_csv", "", ).InCategory( MethodCategoryParsing, "Attempts to parse a string into an array of objects by following the CSV format described in RFC 4180. The first line is assumed to be a header row, which determines the keys of values in each object.", NewExampleSpec("", `root.orders = this.orders.parse_csv()`, `{"orders":"foo,bar\nfoo 1,bar 1\nfoo 2,bar 2"}`, `{"orders":[{"bar":"bar 1","foo":"foo 1"},{"bar":"bar 2","foo":"foo 2"}]}`, ), ), parseCSVMethod, ) func parseCSVMethod(*ParsedParams) (simpleMethod, error) {
//------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "parse_json", "", ).InCategory( MethodCategoryParsing, "Attempts to parse a string as a JSON document and returns the result.", NewExampleSpec("", `root.doc = this.doc.parse_json()`, `{"doc":"{\"foo\":\"bar\"}"}`, `{"doc":{"foo":"bar"}}`, ), ), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { var jsonBytes []byte switch t := v.(type) { case string: jsonBytes = []byte(t) case []byte: jsonBytes = t default: return nil, NewTypeError(v, ValueString) } var jObj interface{} if err := json.Unmarshal(jsonBytes, &jObj); err != nil { return nil, fmt.Errorf("failed to parse value as JSON: %w", err) } return jObj, nil }, nil }, ) var _ = registerSimpleMethod( NewMethodSpec( "parse_xml", "", ).InCategory( MethodCategoryParsing, `Attempts to parse a string as an XML document and returns a structured result, where elements appear as keys of an object according to the following rules: - If an element contains attributes they are parsed by prefixing a hyphen, `+"`-`"+`, to the attribute label. - If the element is a simple element and has attributes, the element value is given the key `+"`#text`"+`. - XML comments, directives, and process instructions are ignored. - When elements are repeated the resulting JSON value is an array.`, NewExampleSpec("", `root.doc = this.doc.parse_xml()`, `{"doc":"<root><title>This is a title</title><content>This is some content</content></root>"}`, `{"doc":{"root":{"content":"This is some content","title":"This is a title"}}}`, ), ).Beta(), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { var xmlBytes []byte switch t := v.(type) { case string: xmlBytes = []byte(t) case []byte: xmlBytes = t default: return nil, NewTypeError(v, ValueString) } xmlObj, err := xml.ToMap(xmlBytes) if err != nil { return nil, fmt.Errorf("failed to parse value as XML: %w", err) } return xmlObj, nil }, nil }, ) var _ = registerSimpleMethod( NewMethodSpec( "parse_yaml", "", ).InCategory( MethodCategoryParsing, "Attempts to parse a string as a single YAML document and returns the result.", NewExampleSpec("", `root.doc = this.doc.parse_yaml()`, `{"doc":"foo: bar"}`, `{"doc":{"foo":"bar"}}`, ), ), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { var yamlBytes []byte switch t := v.(type) { case string: yamlBytes = []byte(t) case []byte: yamlBytes = t default: return nil, NewTypeError(v, ValueString) } var sObj interface{} if err := yaml.Unmarshal(yamlBytes, &sObj); err != nil { return nil, fmt.Errorf("failed to parse value as YAML: %w", err) } return sObj, nil }, nil }, ) var _ = registerSimpleMethod( NewMethodSpec( "format_yaml", "", ).InCategory( MethodCategoryParsing, "Serializes a target value into a YAML byte array.", NewExampleSpec("", `root = this.doc.format_yaml()`, `{"doc":{"foo":"bar"}}`, `foo: bar `, ), NewExampleSpec("Use the `.string()` method in order to coerce the result into a string.", `root.doc = this.doc.format_yaml().string()`, `{"doc":{"foo":"bar"}}`, `{"doc":"foo: bar\n"}`, ), ), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { return yaml.Marshal(v) }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "parse_duration", "", ).InCategory( MethodCategoryTime, `Attempts to parse a string as a duration and returns an integer of nanoseconds. A duration string is a possibly signed sequence of decimal numbers, each with an optional fraction and a unit suffix, such as "300ms", "-1.5h" or "2h45m". Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h".`, NewExampleSpec("", `root.delay_for_ns = this.delay_for.parse_duration()`, `{"delay_for":"50us"}`, `{"delay_for_ns":50000}`, ), NewExampleSpec("", `root.delay_for_s = this.delay_for.parse_duration() / 1000000000`, `{"delay_for":"2h"}`, `{"delay_for_s":7200}`, ), ), func(*ParsedParams) (simpleMethod, error) { return stringMethod(func(s string) (interface{}, error) { d, err := time.ParseDuration(s) if err != nil { return nil, err } return d.Nanoseconds(), nil }), nil }, ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewDeprecatedMethodSpec( "parse_timestamp_unix", "", ).InCategory( MethodCategoryTime, "Attempts to parse a string as a timestamp, following ISO 8601 format by default, and returns the unix epoch.", NewExampleSpec("", `root.doc.timestamp = this.doc.timestamp.parse_timestamp_unix()`, `{"doc":{"timestamp":"2020-08-14T11:45:26.371Z"}}`, `{"doc":{"timestamp":1597405526}}`, ), NewExampleSpec( "An optional string argument can be used in order to specify the expected format of the timestamp. The format is defined by showing how the reference time, defined to be Mon Jan 2 15:04:05 -0700 MST 2006, would be displayed if it were the value.", `root.doc.timestamp = this.doc.timestamp.parse_timestamp_unix("2006-Jan-02")`, `{"doc":{"timestamp":"2020-Aug-14"}}`, `{"doc":{"timestamp":1597363200}}`, ), ), func(args ...interface{}) (simpleMethod, error) { layout := time.RFC3339Nano if len(args) > 0 { layout = args[0].(string) } return func(v interface{}, ctx FunctionContext) (interface{}, error) { var str string switch t := v.(type) { case []byte: str = string(t) case string: str = t default: return nil, NewTypeError(v, ValueString) } ut, err := time.Parse(layout, str) if err != nil { return nil, err } return ut.Unix(), nil }, nil }, true, oldParamsExpectOneOrZeroArgs(), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "parse_timestamp", "", ).InCategory( MethodCategoryTime, "Attempts to parse a string as a timestamp following a specified format and outputs a string following ISO 8601, which can then be fed into `format_timestamp`. The input format is defined by showing how the reference time, defined to be Mon Jan 2 15:04:05 -0700 MST 2006, would be displayed if it were the value.", NewExampleSpec("", `root.doc.timestamp = this.doc.timestamp.parse_timestamp("2006-Jan-02")`, `{"doc":{"timestamp":"2020-Aug-14"}}`, `{"doc":{"timestamp":"2020-08-14T00:00:00Z"}}`, ), ).Beta(), func(args ...interface{}) (simpleMethod, error) { layout := args[0].(string) return func(v interface{}, ctx FunctionContext) (interface{}, error) { var str string switch t := v.(type) { case []byte: str = string(t) case string: str = t default: return nil, NewTypeError(v, ValueString) } ut, err := time.Parse(layout, str) if err != nil { return nil, err } return ut.Format(time.RFC3339Nano), nil }, nil }, true, oldParamsExpectNArgs(1), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "parse_timestamp_strptime", "", ).InCategory( MethodCategoryTime, "Attempts to parse a string as a timestamp following a specified strptime-compatible format and outputs a string following ISO 8601, which can then be fed into `format_timestamp`.", NewExampleSpec( "The format consists of zero or more conversion specifiers and ordinary characters (except `%`). All ordinary characters are copied to the output string without modification. Each conversion specification begins with a `%` character followed by the character that determines the behaviour of the specifier. Please refer to [man 3 strptime](https://linux.die.net/man/3/strptime) for the list of format specifiers.", `root.doc.timestamp = this.doc.timestamp.parse_timestamp_strptime("%Y-%b-%d")`, `{"doc":{"timestamp":"2020-Aug-14"}}`, `{"doc":{"timestamp":"2020-08-14T00:00:00Z"}}`, ), ).Beta(), func(args ...interface{}) (simpleMethod, error) { layout := args[0].(string) return func(v interface{}, ctx FunctionContext) (interface{}, error) { var str string switch t := v.(type) { case []byte: str = string(t) case string: str = t default: return nil, NewTypeError(v, ValueString) } ut, err := timefmt.Parse(str, layout) if err != nil { return nil, err } return ut.Format(time.RFC3339Nano), nil }, nil }, true, oldParamsExpectNArgs(1), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "reverse", "", ).InCategory( MethodCategoryStrings, "Returns the target string in reverse order.", NewExampleSpec("", `root.reversed = this.thing.reverse()`, `{"thing":"backwards"}`, `{"reversed":"sdrawkcab"}`, ), NewExampleSpec("", `root = content().reverse()`, `{"thing":"backwards"}`, `}"sdrawkcab":"gniht"{`, ), ), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: runes := []rune(t) for i, j := 0, len(runes)-1; i < j; i, j = i+1, j-1 { runes[i], runes[j] = runes[j], runes[i] } return string(runes), nil case []byte: result := make([]byte, len(t)) for i, b := range t { result[len(t)-i-1] = b } return result, nil } return nil, NewTypeError(v, ValueString) }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "format_timestamp", "", ).InCategory( MethodCategoryTime, "Attempts to format a timestamp value as a string according to a specified format, or ISO 8601 by default. Timestamp values can either be a numerical unix time in seconds (with up to nanosecond precision via decimals), or a string in ISO 8601 format.", NewExampleSpec("", `root.something_at = (this.created_at + 300).format_timestamp()`, // `{"created_at":1597405526}`, // `{"something_at":"2020-08-14T11:50:26.371Z"}`, ), NewExampleSpec( "An optional string argument can be used in order to specify the output format of the timestamp. The format is defined by showing how the reference time, defined to be Mon Jan 2 15:04:05 -0700 MST 2006, would be displayed if it were the value.", `root.something_at = (this.created_at + 300).format_timestamp("2006-Jan-02 15:04:05")`, // `{"created_at":1597405526}`, // `{"something_at":"2020-Aug-14 11:50:26"}`, ), NewExampleSpec( "A second optional string argument can also be used in order to specify a timezone, otherwise the timezone of the input string is used, or in the case of unix timestamps the local timezone is used.", `root.something_at = this.created_at.format_timestamp(format: "2006-Jan-02 15:04:05", tz: "UTC")`, `{"created_at":1597405526}`, `{"something_at":"2020-Aug-14 11:45:26"}`, `{"created_at":"2020-08-14T11:50:26.371Z"}`, `{"something_at":"2020-Aug-14 11:50:26"}`, ), NewExampleSpec( "And `format_timestamp` supports up to nanosecond precision with floating point timestamp values.", `root.something_at = this.created_at.format_timestamp("2006-Jan-02 15:04:05.999999", "UTC")`, `{"created_at":1597405526.123456}`, `{"something_at":"2020-Aug-14 11:45:26.123456"}`, `{"created_at":"2020-08-14T11:50:26.371Z"}`, `{"something_at":"2020-Aug-14 11:50:26.371"}`, ), ).Beta(). Param(ParamString("format", "The output format to use.").Default(time.RFC3339Nano)). Param(ParamString("tz", "An optional timezone to use, otherwise the timezone of the input string is used, or in the case of unix timestamps the local timezone is used.").Optional()), func(args *ParsedParams) (simpleMethod, error) { layout, err := args.FieldString("format") if err != nil { return nil, err } var timezone *time.Location tzOpt, err := args.FieldOptionalString("tz") if err != nil { return nil, err } if tzOpt != nil { if timezone, err = time.LoadLocation(*tzOpt); err != nil { return nil, fmt.Errorf("failed to parse timezone location name: %w", err) } } return func(v interface{}, ctx FunctionContext) (interface{}, error) { target, err := IGetTimestamp(v) if err != nil { return nil, err } if timezone != nil { target = target.In(timezone) } return target.Format(layout), nil }, nil }, ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "format_timestamp_strftime", "", ).InCategory( MethodCategoryTime, "Attempts to format a timestamp value as a string according to a specified strftime-compatible format. Timestamp values can either be a numerical unix time in seconds (with up to nanosecond precision via decimals), or a string in ISO 8601 format.", NewExampleSpec( "The format consists of zero or more conversion specifiers and ordinary characters (except `%`). All ordinary characters are copied to the output string without modification. Each conversion specification begins with `%` character followed by the character that determines the behaviour of the specifier. Please refer to [man 3 strftime](https://linux.die.net/man/3/strftime) for the list of format specifiers.", `root.something_at = (this.created_at + 300).format_timestamp_strftime("%Y-%b-%d %H:%M:%S")`, // `{"created_at":1597405526}`, // `{"something_at":"2020-Aug-14 11:50:26"}`, ), NewExampleSpec( "A second optional string argument can also be used in order to specify a timezone, otherwise the timezone of the input string is used, or in the case of unix timestamps the local timezone is used.", `root.something_at = this.created_at.format_timestamp_strftime("%Y-%b-%d %H:%M:%S", "UTC")`, `{"created_at":1597405526}`, `{"something_at":"2020-Aug-14 11:45:26"}`, `{"created_at":"2020-08-14T11:50:26.371Z"}`, `{"something_at":"2020-Aug-14 11:50:26"}`, ), ).Beta(), func(args ...interface{}) (simpleMethod, error) { layout := args[0].(string) var timezone *time.Location if len(args) > 1 { var err error if timezone, err = time.LoadLocation(args[1].(string)); err != nil { return nil, fmt.Errorf("failed to parse timezone location name: %w", err) } } return func(v interface{}, ctx FunctionContext) (interface{}, error) { target, err := IGetTimestamp(v) if err != nil { return nil, err } if timezone != nil { target = target.In(timezone) } return timefmt.Format(target, layout), nil }, nil }, true, oldParamsExpectBetweenNAndMArgs(1, 2), oldParamsExpectStringArg(0), oldParamsExpectStringArg(1), ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "format_timestamp_unix", "", ).InCategory( MethodCategoryTime, "Attempts to format a timestamp value as a unix timestamp. Timestamp values can either be a numerical unix time in seconds (with up to nanosecond precision via decimals), or a string in ISO 8601 format. The [`parse_timestamp`](#parse_timestamp) method can be used in order to parse different timestamp formats.", NewExampleSpec("", `root.created_at_unix = this.created_at.format_timestamp_unix()`, `{"created_at":"2009-11-10T23:00:00Z"}`, `{"created_at_unix":1257894000}`, ), ).Beta(), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { target, err := IGetTimestamp(v) if err != nil { return nil, err } return target.Unix(), nil }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "format_timestamp_unix_nano", "", ).InCategory( MethodCategoryTime, "Attempts to format a timestamp value as a unix timestamp with nanosecond precision. Timestamp values can either be a numerical unix time in seconds (with up to nanosecond precision via decimals), or a string in ISO 8601 format. The [`parse_timestamp`](#parse_timestamp) method can be used in order to parse different timestamp formats.", NewExampleSpec("", `root.created_at_unix = this.created_at.format_timestamp_unix_nano()`, `{"created_at":"2009-11-10T23:00:00Z"}`, `{"created_at_unix":1257894000000000000}`, ), ).Beta(), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { target, err := IGetTimestamp(v) if err != nil { return nil, err } return target.UnixNano(), nil }, nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "quote", "", ).InCategory( MethodCategoryStrings, "Quotes a target string using escape sequences (`\\t`, `\\n`, `\\xFF`, `\\u0100`) for control characters and non-printable characters.", NewExampleSpec("", `root.quoted = this.thing.quote()`, `{"thing":"foo\nbar"}`, `{"quoted":"\"foo\\nbar\""}`, ), ), func(*ParsedParams) (simpleMethod, error) { return stringMethod(func(s string) (interface{}, error) { return strconv.Quote(s), nil }), nil }, ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "unquote", "", ).InCategory( MethodCategoryStrings, "Unquotes a target string, expanding any escape sequences (`\\t`, `\\n`, `\\xFF`, `\\u0100`) for control characters and non-printable characters.", NewExampleSpec("", `root.unquoted = this.thing.unquote()`, `{"thing":"\"foo\\nbar\""}`, `{"unquoted":"foo\nbar"}`, ), ), func(*ParsedParams) (simpleMethod, error) { return stringMethod(func(s string) (interface{}, error) { return strconv.Unquote(s) }), nil }, ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "replace", "", ).InCategory( MethodCategoryStrings, "Replaces all occurrences of the first argument in a target string with the second argument.", NewExampleSpec("", `root.new_value = this.value.replace("foo","dog")`, `{"value":"The foo ate my homework"}`, `{"new_value":"The dog ate my homework"}`, ), ), func(args ...interface{}) (simpleMethod, error) { match := args[0].(string) matchB := []byte(match) with := args[1].(string) withB := []byte(with) return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: return strings.ReplaceAll(t, match, with), nil case []byte: return bytes.ReplaceAll(t, matchB, withB), nil } return nil, NewTypeError(v, ValueString) }, nil }, true, oldParamsExpectNArgs(2), oldParamsExpectStringArg(0), oldParamsExpectStringArg(1), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "replace_many", "", ).InCategory( MethodCategoryStrings, "For each pair of strings in an argument array, replaces all occurrences of the first item of the pair with the second. This is a more compact way of chaining a series of `replace` methods.", NewExampleSpec("", `root.new_value = this.value.replace_many([ "<b>", "&lt;b&gt;", "</b>", "&lt;/b&gt;", "<i>", "&lt;i&gt;", "</i>", "&lt;/i&gt;", ])`, `{"value":"<i>Hello</i> <b>World</b>"}`, `{"new_value":"&lt;i&gt;Hello&lt;/i&gt; &lt;b&gt;World&lt;/b&gt;"}`, ), ), func(args ...interface{}) (simpleMethod, error) { items, ok := args[0].([]interface{}) if !ok { return nil, NewTypeError(args[0], ValueArray) } if len(items)%2 != 0 { return nil, fmt.Errorf("invalid arg, replacements should be in pairs and must therefore be even: %v", items) } var replacePairs [][2]string var replacePairsBytes [][2][]byte for i := 0; i < len(items); i += 2 { from, err := IGetString(items[i]) if err != nil { return nil, fmt.Errorf("invalid replacement value at index %v: %w", i, err) } to, err := IGetString(items[i+1]) if err != nil { return nil, fmt.Errorf("invalid replacement value at index %v: %w", i+1, err) } replacePairs = append(replacePairs, [2]string{from, to}) replacePairsBytes = append(replacePairsBytes, [2][]byte{[]byte(from), []byte(to)}) } return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: for _, pair := range replacePairs { t = strings.ReplaceAll(t, pair[0], pair[1]) } return t, nil case []byte: for _, pair := range replacePairsBytes { t = bytes.ReplaceAll(t, pair[0], pair[1]) } return t, nil } return nil, NewTypeError(v, ValueString) }, nil }, true, oldParamsExpectNArgs(1), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "re_find_all", "", ).InCategory( MethodCategoryRegexp, "Returns an array containing all successive matches of a regular expression in a string.", NewExampleSpec("", `root.matches = this.value.re_find_all("a.")`, `{"value":"paranormal"}`, `{"matches":["ar","an","al"]}`, ), ), func(args ...interface{}) (simpleMethod, error) { re, err := regexp.Compile(args[0].(string)) if err != nil { return nil, err } return func(v interface{}, ctx FunctionContext) (interface{}, error) { var result []interface{} switch t := v.(type) { case string: matches := re.FindAllString(t, -1) result = make([]interface{}, 0, len(matches)) for _, str := range matches { result = append(result, str) } case []byte: matches := re.FindAll(t, -1) result = make([]interface{}, 0, len(matches)) for _, str := range matches { result = append(result, string(str)) } default: return nil, NewTypeError(v, ValueString) } return result, nil }, nil }, true, oldParamsExpectNArgs(1), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "re_find_all_submatch", "", ).InCategory( MethodCategoryRegexp, "Returns an array of arrays containing all successive matches of the regular expression in a string and the matches, if any, of its subexpressions.", NewExampleSpec("", `root.matches = this.value.re_find_all_submatch("a(x*)b")`, `{"value":"-axxb-ab-"}`, `{"matches":[["axxb","xx"],["ab",""]]}`, ), ), func(args ...interface{}) (simpleMethod, error) { re, err := regexp.Compile(args[0].(string)) if err != nil { return nil, err } return func(v interface{}, ctx FunctionContext) (interface{}, error) { var result []interface{} switch t := v.(type) { case string: groupMatches := re.FindAllStringSubmatch(t, -1) result = make([]interface{}, 0, len(groupMatches)) for _, matches := range groupMatches { r := make([]interface{}, 0, len(matches)) for _, str := range matches { r = append(r, str) } result = append(result, r) } case []byte: groupMatches := re.FindAllSubmatch(t, -1) result = make([]interface{}, 0, len(groupMatches)) for _, matches := range groupMatches { r := make([]interface{}, 0, len(matches)) for _, str := range matches { r = append(r, string(str)) } result = append(result, r) } default: return nil, NewTypeError(v, ValueString) } return result, nil }, nil }, true, oldParamsExpectNArgs(1), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "re_find_object", "", ).InCategory( MethodCategoryRegexp, "Returns an object containing the first match of the regular expression and the matches of its subexpressions. The key of each match value is the name of the group when specified, otherwise it is the index of the matching group, starting with the expression as a whole at 0.", NewExampleSpec("", `root.matches = this.value.re_find_object("a(?P<foo>x*)b")`, `{"value":"-axxb-ab-"}`, `{"matches":{"0":"axxb","foo":"xx"}}`, ), NewExampleSpec("", `root.matches = this.value.re_find_object("(?P<key>\\w+):\\s+(?P<value>\\w+)")`, `{"value":"option1: value1"}`, `{"matches":{"0":"option1: value1","key":"option1","value":"value1"}}`, ), ), func(args ...interface{}) (simpleMethod, error) { re, err := regexp.Compile(args[0].(string)) if err != nil { return nil, err } groups := re.SubexpNames() for i, k := range groups { if k == "" { groups[i] = fmt.Sprintf("%v", i) } } return func(v interface{}, ctx FunctionContext) (interface{}, error) { result := make(map[string]interface{}, len(groups)) switch t := v.(type) { case string: groupMatches := re.FindStringSubmatch(t) for i, match := range groupMatches { key := groups[i] result[key] = match } case []byte: groupMatches := re.FindSubmatch(t) for i, match := range groupMatches { key := groups[i] result[key] = match } default: return nil, NewTypeError(v, ValueString) } return result, nil }, nil }, true, oldParamsExpectNArgs(1), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "re_find_all_object", "", ).InCategory( MethodCategoryRegexp, "Returns an array of objects containing all matches of the regular expression and the matches of its subexpressions. The key of each match value is the name of the group when specified, otherwise it is the index of the matching group, starting with the expression as a whole at 0.", NewExampleSpec("", `root.matches = this.value.re_find_all_object("a(?P<foo>x*)b")`, `{"value":"-axxb-ab-"}`, `{"matches":[{"0":"axxb","foo":"xx"},{"0":"ab","foo":""}]}`, ), NewExampleSpec("", `root.matches = this.value.re_find_all_object("(?m)(?P<key>\\w+):\\s+(?P<value>\\w+)$")`, `{"value":"option1: value1\noption2: value2\noption3: value3"}`, `{"matches":[{"0":"option1: value1","key":"option1","value":"value1"},{"0":"option2: value2","key":"option2","value":"value2"},{"0":"option3: value3","key":"option3","value":"value3"}]}`, ), ), func(args ...interface{}) (simpleMethod, error) { re, err := regexp.Compile(args[0].(string)) if err != nil { return nil, err } groups := re.SubexpNames() for i, k := range groups { if k == "" { groups[i] = fmt.Sprintf("%v", i) } } return func(v interface{}, ctx FunctionContext) (interface{}, error) { var result []interface{} switch t := v.(type) { case string: reMatches := re.FindAllStringSubmatch(t, -1) result = make([]interface{}, 0, len(reMatches)) for _, matches := range reMatches { obj := make(map[string]interface{}, len(groups)) for i, match := range matches { key := groups[i] obj[key] = match } result = append(result, obj) } case []byte: reMatches := re.FindAllSubmatch(t, -1) result = make([]interface{}, 0, len(reMatches)) for _, matches := range reMatches { obj := make(map[string]interface{}, len(groups)) for i, match := range matches { key := groups[i] obj[key] = match } result = append(result, obj) } default: return nil, NewTypeError(v, ValueString) } return result, nil }, nil }, true, oldParamsExpectNArgs(1), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "re_match", "", ).InCategory( MethodCategoryRegexp, "Checks whether a regular expression matches against any part of a string and returns a boolean.", NewExampleSpec("", `root.matches = this.value.re_match("[0-9]")`, `{"value":"there are 10 puppies"}`, `{"matches":true}`, `{"value":"there are ten puppies"}`, `{"matches":false}`, ), ), func(args ...interface{}) (simpleMethod, error) { re, err := regexp.Compile(args[0].(string)) if err != nil { return nil, err } return func(v interface{}, ctx FunctionContext) (interface{}, error) { var result bool switch t := v.(type) { case string: result = re.MatchString(t) case []byte: result = re.Match(t) default: return nil, NewTypeError(v, ValueString) } return result, nil }, nil }, true, oldParamsExpectNArgs(1), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "re_replace", "", ).InCategory( MethodCategoryRegexp, "Replaces all occurrences of the argument regular expression in a string with a value. Inside the value $ signs are interpreted as submatch expansions, e.g. `$1` represents the text of the first submatch.", NewExampleSpec("", `root.new_value = this.value.re_replace("ADD ([0-9]+)","+($1)")`, `{"value":"foo ADD 70"}`, `{"new_value":"foo +(70)"}`, ), ), func(args ...interface{}) (simpleMethod, error) { re, err := regexp.Compile(args[0].(string)) if err != nil { return nil, err } with := args[1].(string) withBytes := []byte(with) return func(v interface{}, ctx FunctionContext) (interface{}, error) { var result string switch t := v.(type) { case string: result = re.ReplaceAllString(t, with) case []byte: result = string(re.ReplaceAll(t, withBytes)) default: return nil, NewTypeError(v, ValueString) } return result, nil }, nil }, true, oldParamsExpectNArgs(2), oldParamsExpectStringArg(0), oldParamsExpectStringArg(1), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "split", "", ).InCategory( MethodCategoryStrings, "Split a string value into an array of strings by splitting it on a string separator.", NewExampleSpec("", `root.new_value = this.value.split(",")`, `{"value":"foo,bar,baz"}`, `{"new_value":["foo","bar","baz"]}`, ), ), func(args ...interface{}) (simpleMethod, error) { delim := args[0].(string) delimB := []byte(delim) return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: bits := strings.Split(t, delim) vals := make([]interface{}, 0, len(bits)) for _, b := range bits { vals = append(vals, b) } return vals, nil case []byte: bits := bytes.Split(t, delimB) vals := make([]interface{}, 0, len(bits)) for _, b := range bits { vals = append(vals, b) } return vals, nil } return nil, NewTypeError(v, ValueString) }, nil }, true, oldParamsExpectNArgs(1), oldParamsExpectStringArg(0), ) //------------------------------------------------------------------------------ var _ = registerSimpleMethod( NewMethodSpec( "string", "", ).InCategory( MethodCategoryCoercion, "Marshal a value into a string. If the value is already a string it is unchanged.", NewExampleSpec("", `root.nested_json = this.string()`, `{"foo":"bar"}`, `{"nested_json":"{\"foo\":\"bar\"}"}`, ), NewExampleSpec("", `root.id = this.id.string()`, `{"id":228930314431312345}`, `{"id":"228930314431312345"}`, ), ), func(*ParsedParams) (simpleMethod, error) { return func(v interface{}, ctx FunctionContext) (interface{}, error) { return IToString(v), nil }, nil }, ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "strip_html", "", ).InCategory( MethodCategoryStrings, "Attempts to remove all HTML tags from a target string.", NewExampleSpec("", `root.stripped = this.value.strip_html()`, `{"value":"<p>the plain <strong>old text</strong></p>"}`, `{"stripped":"the plain old text"}`, ), NewExampleSpec("It's also possible to provide an explicit list of element types to preserve in the output.", `root.stripped = this.value.strip_html(["article"])`, `{"value":"<article><p>the plain <strong>old text</strong></p></article>"}`, `{"stripped":"<article>the plain old text</article>"}`, ), ), func(args ...interface{}) (simpleMethod, error) { p := bluemonday.NewPolicy() if len(args) > 0 { tags, ok := args[0].([]interface{}) if !ok { return nil, NewTypeError(args[0], ValueArray) } tagStrs := make([]string, len(tags)) for i, ele := range tags { if tagStrs[i], ok = ele.(string); !ok { return nil, fmt.Errorf("invalid arg at index %v: %w", i, NewTypeError(ele, ValueString)) } } p = p.AllowElements(tagStrs...) } return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: return p.Sanitize(t), nil case []byte: return p.SanitizeBytes(t), nil } return nil, NewTypeError(v, ValueString) }, nil }, true, oldParamsExpectOneOrZeroArgs(), ) //------------------------------------------------------------------------------ var _ = registerOldParamsSimpleMethod( NewMethodSpec( "trim", "", ).InCategory( MethodCategoryStrings, "Remove all leading and trailing characters from a string that are contained within an argument cutset. If no arguments are provided then whitespace is removed.", NewExampleSpec("", `root.title = this.title.trim("!?") root.description = this.description.trim()`, `{"description":" something happened and its amazing! ","title":"!!!watch out!?"}`, `{"description":"something happened and its amazing!","title":"watch out"}`, ), ), func(args ...interface{}) (simpleMethod, error) { var cutset string if len(args) > 0 { cutset = args[0].(string) } return func(v interface{}, ctx FunctionContext) (interface{}, error) { switch t := v.(type) { case string: if cutset == "" { return strings.TrimSpace(t), nil } return strings.Trim(t, cutset), nil case []byte: if cutset == "" { return bytes.TrimSpace(t), nil } return bytes.Trim(t, cutset), nil } return nil, NewTypeError(v, ValueString) }, nil }, true, oldParamsExpectOneOrZeroArgs(), oldParamsExpectStringArg(0), )
return func(v interface{}, ctx FunctionContext) (interface{}, error) { var csvBytes []byte switch t := v.(type) { case string: csvBytes = []byte(t) case []byte: csvBytes = t default: return nil, NewTypeError(v, ValueString) } r := csv.NewReader(bytes.NewReader(csvBytes)) strRecords, err := r.ReadAll() if err != nil { return nil, err } if len(strRecords) == 0 { return nil, errors.New("zero records were parsed") } records := make([]interface{}, 0, len(strRecords)-1) headers := strRecords[0] if len(headers) == 0 { return nil, fmt.Errorf("no headers found on first row") } for j, strRecord := range strRecords[1:] { if len(headers) != len(strRecord) { return nil, fmt.Errorf("record on line %v: record mismatch with headers", j) } obj := make(map[string]interface{}, len(strRecord)) for i, r := range strRecord { obj[headers[i]] = r } records = append(records, obj) } return records, nil }, nil }
model_bt_configuration_update_call_2933_all_of.go
/* * Onshape REST API * * The Onshape REST API consumed by all clients. * * API version: 1.113 * Contact: [email protected] * Generated by: OpenAPI Generator (https://openapi-generator.tech) */ package onshape import ( "encoding/json" ) // BTConfigurationUpdateCall2933AllOf struct for BTConfigurationUpdateCall2933AllOf type BTConfigurationUpdateCall2933AllOf struct { BtType *string `json:"btType,omitempty"` ConfigurationParameters *[]BTMConfigurationParameter819 `json:"configurationParameters,omitempty"` CurrentConfiguration *[]BTMParameter1 `json:"currentConfiguration,omitempty"` } // NewBTConfigurationUpdateCall2933AllOf instantiates a new BTConfigurationUpdateCall2933AllOf object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewBTConfigurationUpdateCall2933AllOf() *BTConfigurationUpdateCall2933AllOf { this := BTConfigurationUpdateCall2933AllOf{} return &this } // NewBTConfigurationUpdateCall2933AllOfWithDefaults instantiates a new BTConfigurationUpdateCall2933AllOf object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func
() *BTConfigurationUpdateCall2933AllOf { this := BTConfigurationUpdateCall2933AllOf{} return &this } // GetBtType returns the BtType field value if set, zero value otherwise. func (o *BTConfigurationUpdateCall2933AllOf) GetBtType() string { if o == nil || o.BtType == nil { var ret string return ret } return *o.BtType } // GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTConfigurationUpdateCall2933AllOf) GetBtTypeOk() (*string, bool) { if o == nil || o.BtType == nil { return nil, false } return o.BtType, true } // HasBtType returns a boolean if a field has been set. func (o *BTConfigurationUpdateCall2933AllOf) HasBtType() bool { if o != nil && o.BtType != nil { return true } return false } // SetBtType gets a reference to the given string and assigns it to the BtType field. func (o *BTConfigurationUpdateCall2933AllOf) SetBtType(v string) { o.BtType = &v } // GetConfigurationParameters returns the ConfigurationParameters field value if set, zero value otherwise. func (o *BTConfigurationUpdateCall2933AllOf) GetConfigurationParameters() []BTMConfigurationParameter819 { if o == nil || o.ConfigurationParameters == nil { var ret []BTMConfigurationParameter819 return ret } return *o.ConfigurationParameters } // GetConfigurationParametersOk returns a tuple with the ConfigurationParameters field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTConfigurationUpdateCall2933AllOf) GetConfigurationParametersOk() (*[]BTMConfigurationParameter819, bool) { if o == nil || o.ConfigurationParameters == nil { return nil, false } return o.ConfigurationParameters, true } // HasConfigurationParameters returns a boolean if a field has been set. func (o *BTConfigurationUpdateCall2933AllOf) HasConfigurationParameters() bool { if o != nil && o.ConfigurationParameters != nil { return true } return false } // SetConfigurationParameters gets a reference to the given []BTMConfigurationParameter819 and assigns it to the ConfigurationParameters field. func (o *BTConfigurationUpdateCall2933AllOf) SetConfigurationParameters(v []BTMConfigurationParameter819) { o.ConfigurationParameters = &v } // GetCurrentConfiguration returns the CurrentConfiguration field value if set, zero value otherwise. func (o *BTConfigurationUpdateCall2933AllOf) GetCurrentConfiguration() []BTMParameter1 { if o == nil || o.CurrentConfiguration == nil { var ret []BTMParameter1 return ret } return *o.CurrentConfiguration } // GetCurrentConfigurationOk returns a tuple with the CurrentConfiguration field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTConfigurationUpdateCall2933AllOf) GetCurrentConfigurationOk() (*[]BTMParameter1, bool) { if o == nil || o.CurrentConfiguration == nil { return nil, false } return o.CurrentConfiguration, true } // HasCurrentConfiguration returns a boolean if a field has been set. func (o *BTConfigurationUpdateCall2933AllOf) HasCurrentConfiguration() bool { if o != nil && o.CurrentConfiguration != nil { return true } return false } // SetCurrentConfiguration gets a reference to the given []BTMParameter1 and assigns it to the CurrentConfiguration field. func (o *BTConfigurationUpdateCall2933AllOf) SetCurrentConfiguration(v []BTMParameter1) { o.CurrentConfiguration = &v } func (o BTConfigurationUpdateCall2933AllOf) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.BtType != nil { toSerialize["btType"] = o.BtType } if o.ConfigurationParameters != nil { toSerialize["configurationParameters"] = o.ConfigurationParameters } if o.CurrentConfiguration != nil { toSerialize["currentConfiguration"] = o.CurrentConfiguration } return json.Marshal(toSerialize) } type NullableBTConfigurationUpdateCall2933AllOf struct { value *BTConfigurationUpdateCall2933AllOf isSet bool } func (v NullableBTConfigurationUpdateCall2933AllOf) Get() *BTConfigurationUpdateCall2933AllOf { return v.value } func (v *NullableBTConfigurationUpdateCall2933AllOf) Set(val *BTConfigurationUpdateCall2933AllOf) { v.value = val v.isSet = true } func (v NullableBTConfigurationUpdateCall2933AllOf) IsSet() bool { return v.isSet } func (v *NullableBTConfigurationUpdateCall2933AllOf) Unset() { v.value = nil v.isSet = false } func NewNullableBTConfigurationUpdateCall2933AllOf(val *BTConfigurationUpdateCall2933AllOf) *NullableBTConfigurationUpdateCall2933AllOf { return &NullableBTConfigurationUpdateCall2933AllOf{value: val, isSet: true} } func (v NullableBTConfigurationUpdateCall2933AllOf) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableBTConfigurationUpdateCall2933AllOf) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
NewBTConfigurationUpdateCall2933AllOfWithDefaults
context.py
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os import sys from collections import defaultdict from contextlib import contextmanager from twitter.common.collections import OrderedSet from pants.base.build_environment import get_buildroot, get_scm from pants.base.worker_pool import SubprocPool from pants.base.workunit import WorkUnitLabel from pants.build_graph.target import Target from pants.goal.products import Products from pants.goal.workspace import ScmWorkspace from pants.process.lock import OwnerPrintingInterProcessFileLock from pants.reporting.report import Report from pants.source.source_root import SourceRootConfig class Context(object): """Contains the context for a single run of pants. Task implementations can access configuration data from pants.ini and any flags they have exposed here as well as information about the targets involved in the run. Advanced uses of the context include adding new targets to it for upstream or downstream goals to operate on and mapping of products a goal creates to the targets the products are associated with. :API: public """ class Log(object): """A logger facade that logs into the pants reporting framework.""" def __init__(self, run_tracker): self._run_tracker = run_tracker def debug(self, *msg_elements): self._run_tracker.log(Report.DEBUG, *msg_elements) def info(self, *msg_elements): self._run_tracker.log(Report.INFO, *msg_elements) def warn(self, *msg_elements): self._run_tracker.log(Report.WARN, *msg_elements) def error(self, *msg_elements): self._run_tracker.log(Report.ERROR, *msg_elements) def fatal(self, *msg_elements): self._run_tracker.log(Report.FATAL, *msg_elements) # TODO: Figure out a more structured way to construct and use context than this big flat # repository of attributes? def __init__(self, options, run_tracker, target_roots, requested_goals=None, target_base=None, build_graph=None, build_file_parser=None, address_mapper=None, console_outstream=None, scm=None, workspace=None, invalidation_report=None, scheduler=None): self._options = options self.build_graph = build_graph self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.run_tracker = run_tracker self._log = self.Log(run_tracker) self._target_base = target_base or Target self._products = Products() self._buildroot = get_buildroot() self._source_roots = SourceRootConfig.global_instance().get_source_roots() self._lock = OwnerPrintingInterProcessFileLock(os.path.join(self._buildroot, '.pants.workdir.file_lock')) self._java_sysprops = None # Computed lazily. self.requested_goals = requested_goals or [] self._console_outstream = console_outstream or sys.stdout self._scm = scm or get_scm() self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None) self._replace_targets(target_roots) self._invalidation_report = invalidation_report self._scheduler = scheduler @property def options(self): """Returns the new-style options. :API: public """ return self._options @property def log(self): """Returns the preferred logger for goals to use. :API: public """ return self._log @property def products(self): """Returns the Products manager for the current run. :API: public """ return self._products @property def source_roots(self): """Returns the :class:`pants.source.source_root.SourceRoots` instance for the current run. :API: public """ return self._source_roots @property def target_roots(self): """Returns the targets specified on the command line. This set is strictly a subset of all targets in play for the run as returned by self.targets(). Note that for a command line invocation that uses wildcard selectors : or ::, the targets globbed by the wildcards are considered to be target roots. :API: public """ return self._target_roots @property def console_outstream(self): """Returns the output stream to write console messages to. :API: public """ return self._console_outstream @property def
(self): """Returns the current workspace's scm, if any. :API: public """ return self._scm @property def workspace(self): """Returns the current workspace, if any.""" return self._workspace @property def invalidation_report(self): return self._invalidation_report def __str__(self): ident = Target.identify(self.targets()) return 'Context(id:{}, targets:{})'.format(ident, self.targets()) def set_resulting_graph_size_in_runtracker(self): """Sets the resulting graph size in the run tracker's daemon stats object.""" node_count = self._scheduler.graph_len() self.run_tracker.pantsd_stats.set_resulting_graph_size(node_count) return node_count def submit_background_work_chain(self, work_chain, parent_workunit_name=None): """ :API: public """ background_root_workunit = self.run_tracker.get_background_root_workunit() if parent_workunit_name: # We have to keep this workunit alive until all its child work is done, so # we manipulate the context manually instead of using it as a contextmanager. # This is slightly funky, but the with-context usage is so pervasive and # useful elsewhere that it's worth the funkiness in this one place. workunit_parent_ctx = self.run_tracker.new_workunit_under_parent( name=parent_workunit_name, labels=[WorkUnitLabel.MULTITOOL], parent=background_root_workunit) workunit_parent = workunit_parent_ctx.__enter__() done_hook = lambda: workunit_parent_ctx.__exit__(None, None, None) else: workunit_parent = background_root_workunit # Run directly under the root. done_hook = None self.run_tracker.background_worker_pool().submit_async_work_chain( work_chain, workunit_parent=workunit_parent, done_hook=done_hook) def background_worker_pool(self): """Returns the pool to which tasks can submit background work. :API: public """ return self.run_tracker.background_worker_pool() def subproc_map(self, f, items): """Map function `f` over `items` in subprocesses and return the result. :API: public :param f: A multiproc-friendly (importable) work function. :param items: A iterable of pickleable arguments to f. """ try: # Pool.map (and async_map().get() w/o timeout) can miss SIGINT. # See: http://stackoverflow.com/a/1408476, http://bugs.python.org/issue8844 # Instead, we map_async(...), wait *with a timeout* until ready, then .get() # NB: in 2.x, wait() with timeout wakes up often to check, burning CPU. Oh well. res = SubprocPool.foreground().map_async(f, items) while not res.ready(): res.wait(60) # Repeatedly wait for up to a minute. if not res.ready(): self.log.debug('subproc_map result still not ready...') return res.get() except KeyboardInterrupt: SubprocPool.shutdown(True) raise @contextmanager def new_workunit(self, name, labels=None, cmd='', log_config=None): """Create a new workunit under the calling thread's current workunit. :API: public """ with self.run_tracker.new_workunit(name=name, labels=labels, cmd=cmd, log_config=log_config) as workunit: yield workunit def acquire_lock(self): """ Acquire the global lock for the root directory associated with this context. When a goal requires serialization, it will call this to acquire the lock. :API: public """ if self.options.for_global_scope().lock: if not self._lock.acquired: self._lock.acquire() def release_lock(self): """Release the global lock if it's held. Returns True if the lock was held before this call. :API: public """ if not self._lock.acquired: return False else: self._lock.release() return True def is_unlocked(self): """Whether the global lock object is actively holding the lock. :API: public """ return not self._lock.acquired def _replace_targets(self, target_roots): # Replaces all targets in the context with the given roots and their transitive dependencies. # # If another task has already retrieved the current targets, mutable state may have been # initialized somewhere, making it now unsafe to replace targets. Thus callers of this method # must know what they're doing! # # TODO(John Sirois): This currently has only 1 use (outside ContextTest) in pantsbuild/pants and # only 1 remaining known use case in the Foursquare codebase that will be able to go away with # the post RoundEngine engine - kill the method at that time. self._target_roots = list(target_roots) def add_new_target(self, address, target_type, target_base=None, dependencies=None, derived_from=None, **kwargs): """Creates a new target, adds it to the context and returns it. This method ensures the target resolves files against the given target_base, creating the directory if needed and registering a source root. :API: public """ rel_target_base = target_base or address.spec_path abs_target_base = os.path.join(get_buildroot(), rel_target_base) if not os.path.exists(abs_target_base): os.makedirs(abs_target_base) # TODO: Adding source roots on the fly like this is yucky, but hopefully this # method will go away entirely under the new engine. It's primarily used for injecting # synthetic codegen targets, and that isn't how codegen will work in the future. if not self.source_roots.find_by_path(rel_target_base): # TODO: Set the lang and root category (source/test/thirdparty) based on the target type? self.source_roots.add_source_root(rel_target_base) if dependencies: dependencies = [dep.address for dep in dependencies] self.build_graph.inject_synthetic_target(address=address, target_type=target_type, dependencies=dependencies, derived_from=derived_from, **kwargs) new_target = self.build_graph.get_target(address) return new_target def targets(self, predicate=None, **kwargs): """Selects targets in-play in this run from the target roots and their transitive dependencies. Also includes any new synthetic targets created from the target roots or their transitive dependencies during the course of the run. See Target.closure_for_targets for remaining parameters. :API: public :param predicate: If specified, the predicate will be used to narrow the scope of targets returned. :param bool postorder: `True` to gather transitive dependencies with a postorder traversal; `False` or preorder by default. :returns: A list of matching targets. """ target_set = self._collect_targets(self.target_roots, **kwargs) synthetics = OrderedSet() for synthetic_address in self.build_graph.synthetic_addresses: if self.build_graph.get_concrete_derived_from(synthetic_address) in target_set: synthetics.add(self.build_graph.get_target(synthetic_address)) target_set.update(self._collect_targets(synthetics, **kwargs)) return filter(predicate, target_set) def _collect_targets(self, root_targets, **kwargs): return Target.closure_for_targets( target_roots=root_targets, **kwargs ) def dependents(self, on_predicate=None, from_predicate=None): """Returns a map from targets that satisfy the from_predicate to targets they depend on that satisfy the on_predicate. :API: public """ core = set(self.targets(on_predicate)) dependees = defaultdict(set) for target in self.targets(from_predicate): for dependency in target.dependencies: if dependency in core: dependees[target].add(dependency) return dependees def resolve(self, spec): """Returns an iterator over the target(s) the given address points to. :API: public """ return self.build_graph.resolve(spec) def scan(self, root=None): """Scans and parses all BUILD files found under ``root``. Only BUILD files found under ``root`` are parsed as roots in the graph, but any dependencies of targets parsed in the root tree's BUILD files will be followed and this may lead to BUILD files outside of ``root`` being parsed and included in the returned build graph. :API: public :param string root: The path to scan; by default, the build root. :returns: A new build graph encapsulating the targets found. """ build_graph = self.build_graph.clone_new() for address in self.address_mapper.scan_addresses(root): build_graph.inject_address_closure(address) return build_graph
scm
loadCanvas.js
function loadCanvas(player){ let canvas = document.getElementById("canvas"); let ctx = canvas.getContext("2d"); let background = new Image(); let target = new Image(); let sight = new Image(); ctx.font = "24px serif"; ctx.fillStyle="yellow"; let targetObj= { speed: 50, x: 0, y:100, width:300, height:500 }; let mouse = { x: 0, y:100 }; background.src = "imgs/west.jpg"; target.src = "imgs/target.png"; sight.src = "imgs/sight.png"; let backgroundPromise = new Promise((resolve,reject)=>{ $(background).on('load',resolve); }); let targetPromise = new Promise((resolve,reject)=>{ $(target).on('load',resolve); }); let sightPromise = new Promise((resolve,reject)=>{ $(sight).on('load',resolve); }); Promise.all([backgroundPromise,targetPromise,sight]).then(()=>{ drawCanvas(canvas.width/2, canvas.height/2); }); function drawCanvas(mouseX, mouseY){ ctx.drawImage(background, 0, 0, 800, 600); ctx.drawImage(target, targetObj.x, targetObj.y,targetObj.width,targetObj.height); ctx.fillText(`Player: ${player.name}`, 650, 25); ctx.fillText(`Money: ${player.money}`, 650, 50); ctx.fillText(`Bullets: ${player.bullets}`, 650, 75); if(player.bullets === 0){ ctx.font = "48px serif"; ctx.fillStyle="red"; ctx.fillText(`Reload!`, canvas.width/2 - 50, canvas.height/2); ctx.font = "24px serif"; ctx.fillStyle="yellow"; } ctx.drawImage(sight, mouseX, mouseY,50,50); } $(canvas).mousemove(function(event) { mouse.x = event.clientX - 35; mouse.y = event.clientY - 20; drawCanvas(mouse.x, mouse.y); }); $(canvas).click(function(event) { if(player.bullets > 0){ if(event.clientX > targetObj.x && event.clientX < targetObj.x + targetObj.width && event.clientY > targetObj.y && event.clientY < targetObj.y + targetObj.height){ player.money += 20; } player.bullets--; } }); //Set a property in the Canvas to allow for cleaning the interval from outside canvas.intervalId = setInterval(moveTarget,100); function
(){ if(targetObj.speed > 0 && targetObj.x + targetObj.width >= canvas.width){ targetObj.speed = 0 - targetObj.speed; } else if(targetObj.speed < 0 && targetObj.x <= 0){ targetObj.speed = 0 - targetObj.speed; } targetObj.x += targetObj.speed; drawCanvas(mouse.x,mouse.y) } return player; }
moveTarget
conftest.py
import os import requests import pymysql import pytest from flask import url_for from solarforecastarbiter.datamodel import QualityFlagFilter as QFF from sfa_dash import create_app BASE_URL = 'http://localhost' resample_threshold = QFF.resample_threshold_percentage @pytest.fixture(scope='session') def auth_token(): token_req = requests.post( 'https://solarforecastarbiter.auth0.com/oauth/token', headers={'content-type': 'application/json'}, data=('{"grant_type": "password", ' '"username": "[email protected]",' '"password": "Thepassword123!", ' '"audience": "https://api.solarforecastarbiter.org", ' '"client_id": "c16EJo48lbTCQEhqSztGGlmxxxmZ4zX7"}')) if token_req.status_code != 200: pytest.skip('Cannot retrieve valid Auth0 token') else: token = token_req.json() return token @pytest.fixture() def expired_token(): stored = {'access_token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik5UZENSRGRFTlVNMk9FTTJNVGhCTWtRelFUSXpNRFF6TUVRd1JUZ3dNekV3T1VWR1FrRXpSUSJ9.eyJpc3MiOiJodHRwczovL3NvbGFyZm9yZWNhc3RhcmJpdGVyLmF1dGgwLmNvbS8iLCJzdWIiOiJhdXRoMHw1YmUzNDNkZjcwMjU0MDYyMzc4MjBiODUiLCJhdWQiOlsiaHR0cHM6Ly9hcGkuc29sYXJmb3JlY2FzdGFyYml0ZXIub3JnIiwiaHR0cHM6Ly9zb2xhcmZvcmVjYXN0YXJiaXRlci5hdXRoMC5jb20vdXNlcmluZm8iXSwiaWF0IjoxNTU1NDU0NzcwLCJleHAiOjE1NTU0NjU1NzAsImF6cCI6IlByRTM5QWR0R01QSTRnSzJoUnZXWjJhRFJhcmZwZzdBIiwic2NvcGUiOiJvcGVuaWQgcHJvZmlsZSBlbWFpbCBvZmZsaW5lX2FjY2VzcyJ9.lT1XPtLkYCVGUZjcAgWFCU6AJbKWtE077zw_KO4fhIaF0wo6TTpLTkZBmF9Sxmrwb5NfeR5XuJmkX3SPCjpzcZG9wdXIpPWRGhsOAAUdoSkoHKFzALoc46VPjA3A5SZxlGqNeh6RoKFlWRAp5EJN9Z-JcwT06JyJGrbx7ip4tCbAADqWuDY2tzkjKD3EHjHTO9OSJiCRxlNA22OCfMTF6B8-8RLUabZ414bypezw83S9g25mLLWtlGhQvzWGA8F7yhhVXbEsAPPC1QoyjevXzn8PBqL5dSDp6u1gL6T29PsbhZ0diZ1xt5jkm4iX-cryc7tqwq-5D5ZkC3wbhNpLuQ', 'refresh_token': 'QlLHR9wyFS5cokItX-ym7jWlCCuLO1fC3AtZLUeDVX-mI', 'id_token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik5UZENSRGRFTlVNMk9FTTJNVGhCTWtRelFUSXpNRFF6TUVRd1JUZ3dNekV3T1VWR1FrRXpSUSJ9.eyJuaWNrbmFtZSI6InRlc3RpbmciLCJuYW1lIjoidGVzdGluZ0Bzb2xhcmZvcmVjYXN0YXJiaXRlci5vcmciLCJwaWN0dXJlIjoiaHR0cHM6Ly9zLmdyYXZhdGFyLmNvbS9hdmF0YXIvY2MxMTNkZjY5NmY4ZTlmMjA2Nzc5OTQzMzUxNzRhYjY_cz00ODAmcj1wZyZkPWh0dHBzJTNBJTJGJTJGY2RuLmF1dGgwLmNvbSUyRmF2YXRhcnMlMkZ0ZS5wbmciLCJ1cGRhdGVkX2F0IjoiMjAxOS0wNC0xNlQyMjo0NjoxMC42NTdaIiwiZW1haWwiOiJ0ZXN0aW5nQHNvbGFyZm9yZWNhc3RhcmJpdGVyLm9yZyIsImVtYWlsX3ZlcmlmaWVkIjpmYWxzZSwiaXNzIjoiaHR0cHM6Ly9zb2xhcmZvcmVjYXN0YXJiaXRlci5hdXRoMC5jb20vIiwic3ViIjoiYXV0aDB8NWJlMzQzZGY3MDI1NDA2MjM3ODIwYjg1IiwiYXVkIjoiUHJFMzlBZHRHTVBJNGdLMmhSdldaMmFEUmFyZnBnN0EiLCJpYXQiOjE1NTU0NTQ3NzAsImV4cCI6MTU1NTQ5MDc3MH0.axw45-ms_LVIS_WsUdcCryZeOwpZVAn95zbUm9WO23bpIja7QaR1h6_Emb9nUNJIk44vp-J-zwKIZd4j7bg5_vaVcJER4_rL8vlc6f5lJdZAU20yeisTT4q1YcwlvQhg7avWMUkZaiO3SgK0eJ3371Gm2gJgK2b21bnpzmUHQ0vS906GLGngaVzb3VEE_g4CgR4u6qmBQRwq3Z6DyRBq572Qhn3TXk_0Xvj43Q9TyYjV5ioou5Xe-3T5HHsCoUWqDp0BZ3bP9FlYFws9DffnFzf1yVtpwfk9shmAe8V6Fn9N0OjuS4LJP0Tc-I7adspJlYfB9BeTEci6MKn58OQCrw', 'scope': ['openid', 'profile', 'email', 'offline_access'], 'expires_in': 0, 'token_type': 'Bearer', 'expires_at': 1555465570.9597309} # NOQA return stored @pytest.fixture() def mocked_storage(mocker, auth_token, expired_token): def make_storage(authenticated=False): if authenticated: token = auth_token else: token = expired_token class fake_storage: def __init__(*args, **kwargs): pass def get(self, *args): return token def set(self, *args): pass def delete(self, *args): pass return fake_storage return make_storage @pytest.fixture() def mocked_unauth_storage(mocker, mocked_storage): mocker.patch('sfa_dash.session_storage', new=mocked_storage()) @pytest.fixture() def mocked_auth_storage(mocker, mocked_storage): mocker.patch('sfa_dash.session_storage', new=mocked_storage(True)) @pytest.fixture() def app_unauth(mocked_unauth_storage): os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1' return create_app('sfa_dash.config.TestConfig') @pytest.fixture() def app(mocked_auth_storage): os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1' return create_app('sfa_dash.config.TestConfig') @pytest.fixture() def client(app): yield app.test_client() no_arg_routes_list = [ '/sites/', '/observations/', '/forecasts/single/', '/forecasts/cdf/', '/reports/', '/aggregates/', '/sites/create', '/reports/deterministic/create', '/reports/event/create', '/aggregates/create', ] @pytest.fixture(params=no_arg_routes_list) def no_arg_route(request): return request.param admin_routes_list = [ '/admin/permissions/create/cdf_forecast_group', '/admin/permissions/create/observation', '/admin/permissions/create/forecast', '/admin/permissions/create/report', '/admin/permissions/create/site', '/admin/roles/create', '/admin/permissions/', '/admin/roles/', '/admin/users/', ] @pytest.fixture(params=admin_routes_list) def admin_route(request): return request.param admin_multiarg_route_list = [ '/admin/permissions/{permission_id}/remove/{object_id}', '/admin/roles/{role_id}/remove/{permission_id}', '/admin/users/{user_id}/remove/{role_id}', ] @pytest.fixture(params=admin_multiarg_route_list) def admin_multiarg_route(request): def fn(object_id, permission_id, user_id, role_id): return request.param.format( object_id=object_id, permission_id=permission_id, user_id=user_id, role_id=role_id) return fn user_id_route_list = [ '/admin/users/{user_id}', '/admin/users/{user_id}/add/', ] @pytest.fixture(params=user_id_route_list) def user_id_route(request): def
(user_id): return request.param.format(user_id=user_id) return fn role_id_route_list = [ '/admin/roles/{role_id}', '/admin/roles/{role_id}/delete', '/admin/roles/{role_id}/add/', '/admin/roles/{role_id}/grant/', ] @pytest.fixture(params=role_id_route_list) def role_id_route(request): def fn(role_id): return request.param.format(role_id=role_id) return fn permission_id_route_list = [ '/admin/permissions/{permission_id}', '/admin/permissions/{permission_id}/delete', '/admin/permissions/{permission_id}/add', ] @pytest.fixture(params=permission_id_route_list) def permission_id_route(request): def fn(permission_id): return request.param.format(permission_id=permission_id) return fn report_id_route_list = [ '/reports/{report_id}', '/reports/{report_id}/delete', ] @pytest.fixture(params=report_id_route_list) def report_id_route(request): def fn(report_id): return request.param.format(report_id=report_id) return fn site_id_route_list = [ '/sites/{site_id}/', '/sites/{site_id}/delete', '/sites/{site_id}/forecasts/single/create', '/sites/{site_id}/forecasts/cdf/create', '/sites/{site_id}/observations/create', '/sites/{site_id}/observations/create', ] @pytest.fixture(params=site_id_route_list) def site_id_route(request): def fn(site_id): return request.param.format(site_id=site_id) return fn observation_id_route_list = [ '/observations/{observation_id}', '/observations/{observation_id}/delete', ] @pytest.fixture(params=observation_id_route_list) def observation_id_route(request): def fn(observation_id): return request.param.format(observation_id=observation_id) return fn forecast_id_route_list = [ '/forecasts/single/{forecast_id}', '/forecasts/single/{forecast_id}/delete', ] @pytest.fixture(params=forecast_id_route_list) def forecast_id_route(request): def fn(forecast_id): return request.param.format(forecast_id=forecast_id) return fn cdf_forecast_id_route_list = [ '/forecasts/cdf/{forecast_id}', '/forecasts/cdf/{forecast_id}/delete', ] @pytest.fixture(params=cdf_forecast_id_route_list) def cdf_forecast_id_route(request): def fn(forecast_id): return request.param.format(forecast_id=forecast_id) return fn cdf_forecast_single_id_routes_list = [ '/forecasts/cdf/single/{forecast_id}', ] @pytest.fixture(params=cdf_forecast_single_id_routes_list) def cdf_forecast_single_id_route(request): def fn(forecast_id): return request.param.format(forecast_id=forecast_id) return fn aggregate_id_route_list = [ '/aggregates/{aggregate_id}', '/aggregates/{aggregate_id}/delete', '/aggregates/{aggregate_id}/add', '/aggregates/{aggregate_id}/forecasts/single/create', '/aggregates/{aggregate_id}/forecasts/cdf/create', ] @pytest.fixture(params=aggregate_id_route_list) def aggregate_id_route(request): def fn(aggregate_id): return request.param.format(aggregate_id=aggregate_id) return fn clone_route_list = [ '/sites/{site_id}/clone', '/observations/{observation_id}/clone', '/forecasts/single/{forecast_id}/clone', ] @pytest.fixture(params=clone_route_list) def clone_route(request): def fn(uuids): # NOTE: expects a dict of all possible ids to use for formatting return request.param.format(**uuids) return fn @pytest.fixture() def missing_id(): return '7d2c3208-5243-11e9-8647-d663bd873d93' @pytest.fixture() def observation_id(): return '123e4567-e89b-12d3-a456-426655440000' @pytest.fixture() def cdf_forecast_group_id(): return 'ef51e87c-50b9-11e9-8647-d663bd873d93' @pytest.fixture() def cdf_forecast_id(): return '633f9396-50bb-11e9-8647-d663bd873d93' @pytest.fixture() def forecast_id(): return '11c20780-76ae-4b11-bef1-7a75bdc784e3' @pytest.fixture() def site_id(): return '123e4567-e89b-12d3-a456-426655440001' @pytest.fixture() def site_id_plant(): return '123e4567-e89b-12d3-a456-426655440002' @pytest.fixture() def test_orgid(): return 'b76ab62e-4fe1-11e9-9e44-64006a511e6f' @pytest.fixture() def user_id(): return '0c90950a-7cca-11e9-a81f-54bf64606445' @pytest.fixture() def aggregate_id(): return '458ffc27-df0b-11e9-b622-62adb5fd6af0' @pytest.fixture() def report_id(): return '9f290dd4-42b8-11ea-abdf-f4939feddd82' @pytest.fixture def all_metadata_ids( observation_id, forecast_id, cdf_forecast_group_id, cdf_forecast_id, site_id, site_id_plant, aggregate_id, report_id): return { 'observation_id': observation_id, 'forecast_id': forecast_id, 'cdf_forecast_group_id': cdf_forecast_group_id, 'cdf_forecast_id': cdf_forecast_id, 'site_id': site_id, 'site_id_plant': site_id_plant, 'aggregate_id': aggregate_id, 'report_id': report_id, } @pytest.fixture() def test_url(app): def fn(view): with app.test_request_context(): return url_for(view, _external=True) return fn @pytest.fixture(scope='session') def connection(): connection = pymysql.connect( host=os.getenv('MYSQL_HOST', '127.0.0.1'), port=int(os.getenv('MYSQL_PORT', '3306')), user='root', password='testpassword', database='arbiter_data', binary_prefix=True) # with no connection.commit(), no data should stay in db return connection @pytest.fixture() def cursor(connection): connection.rollback() return connection.cursor() @pytest.fixture() def dictcursor(connection): connection.rollback() return connection.cursor(cursor=pymysql.cursors.DictCursor) @pytest.fixture() def role_id(cursor): cursor.execute( 'SELECT BIN_TO_UUID(id, 1) from arbiter_data.roles ' 'WHERE name = "Test user role"') role_id = cursor.fetchone()[0] return role_id @pytest.fixture() def permission_id(cursor, role_id): cursor.execute( 'SELECT BIN_TO_UUID(id, 1) FROM arbiter_data.permissions ' 'WHERE id IN (SELECT permission_id FROM ' 'arbiter_data.role_permission_mapping WHERE role_id ' '= UUID_TO_BIN(%s, 1) ) LIMIT 1', role_id) permission_id = cursor.fetchone()[0] return permission_id @pytest.fixture() def permission_object_type(cursor, permission_id): cursor.execute( 'SELECT object_type FROM arbiter_data.permissions ' 'WHERE id = UUID_TO_BIN(%s, 1)', permission_id) return cursor.fetchone()[0] @pytest.fixture() def valid_permission_object_id( observation_id, forecast_id, cdf_forecast_group_id, aggregate_id, site_id, role_id, user_id, permission_id, report_id, permission_object_type): ot = permission_object_type if ot == 'forecasts': return forecast_id if ot == 'observations': return observation_id if ot == 'cdf_forecasts': return cdf_forecast_group_id if ot == 'agggregates': return aggregate_id if ot == 'sites': return site_id if ot == 'reports': return report_id if ot == 'users': return user_id if ot == 'permissions': return permission_id if ot == 'roles': return role_id @pytest.fixture() def site(): return { 'created_at': '2019-03-01T11:44:38+00:00', 'elevation': 595.0, 'extra_parameters': '{"network_api_abbreviation": "AS","network": "University of Oregon SRML","network_api_id": "94040"}', # noqa 'latitude': 42.19, 'longitude': -122.7, 'modeling_parameters': {'ac_capacity': None, 'ac_loss_factor': None, 'axis_azimuth': None, 'axis_tilt': None, 'backtrack': None, 'dc_capacity': None, 'dc_loss_factor': None, 'ground_coverage_ratio': None, 'max_rotation_angle': None, 'surface_azimuth': None, 'surface_tilt': None, 'temperature_coefficient': None, 'tracking_type': None}, 'modified_at': '2019-03-01T11:44:38+00:00', 'name': 'Weather Station', 'provider': 'Organization 1', 'site_id': '123e4567-e89b-12d3-a456-426655440001', 'timezone': 'Etc/GMT+8'} @pytest.fixture() def site_with_modeling_params(): return { 'created_at': '2019-03-01T11:44:46+00:00', 'elevation': 786.0, 'extra_parameters': '', 'latitude': 43.73403, 'longitude': -96.62328, 'modeling_parameters': { 'ac_capacity': 0.015, 'ac_loss_factor': 0.0, 'axis_azimuth': None, 'axis_tilt': None, 'backtrack': None, 'dc_capacity': 0.015, 'dc_loss_factor': 0.0, 'ground_coverage_ratio': None, 'max_rotation_angle': None, 'surface_azimuth': 180.0, 'surface_tilt': 45.0, 'temperature_coefficient': -0.2, 'tracking_type': 'fixed'}, 'modified_at': '2019-03-01T11:44:46+00:00', 'name': 'Power Plant 1', 'provider': 'Organization 1', 'site_id': '123e4567-e89b-12d3-a456-426655440002', 'timezone': 'Etc/GMT+6'} @pytest.fixture() def observation(): return { '_links': {'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'}, # noqa 'created_at': '2019-03-01T12:01:39+00:00', 'extra_parameters': '{"instrument": "Ascension Technology Rotating Shadowband Pyranometer", "network": "UO SRML"}', # noqa 'interval_label': 'beginning', 'interval_length': 5, 'interval_value_type': 'interval_mean', 'modified_at': '2019-03-01T12:01:39+00:00', 'name': 'GHI Instrument 1', 'observation_id': '123e4567-e89b-12d3-a456-426655440000', 'provider': 'Organization 1', 'site_id': '123e4567-e89b-12d3-a456-426655440001', 'uncertainty': 0.1, 'variable': 'ghi'} @pytest.fixture() def forecast(): return { '_links': {'aggregate': None, 'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'}, # noqa 'aggregate_id': None, 'created_at': '2019-03-01T11:55:37+00:00', 'extra_parameters': '', 'forecast_id': '11c20780-76ae-4b11-bef1-7a75bdc784e3', 'interval_label': 'beginning', 'interval_length': 5, 'interval_value_type': 'interval_mean', 'issue_time_of_day': '06:00', 'lead_time_to_start': 60, 'modified_at': '2019-03-01T11:55:37+00:00', 'name': 'DA GHI', 'provider': 'Organization 1', 'run_length': 1440, 'site_id': '123e4567-e89b-12d3-a456-426655440001', 'variable': 'ghi'} @pytest.fixture() def cdf_forecast(): return { '_links': {'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'}, # noqa 'aggregate_id': None, 'axis': 'y', 'constant_values': [{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9396-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa 'values': 'http://localhost:5000/forecasts/cdf/single/633f9396-50bb-11e9-8647-d663bd873d93/values'}, # noqa 'constant_value': 5.0, 'forecast_id': '633f9396-50bb-11e9-8647-d663bd873d93'}, {'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9864-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa 'values': 'http://localhost:5000/forecasts/cdf/single/633f9864-50bb-11e9-8647-d663bd873d93/values'}, # noqa 'constant_value': 20.0, 'forecast_id': '633f9864-50bb-11e9-8647-d663bd873d93'}, {'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9b2a-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa 'values': 'http://localhost:5000/forecasts/cdf/single/633f9b2a-50bb-11e9-8647-d663bd873d93/values'}, # noqa 'constant_value': 50.0, 'forecast_id': '633f9b2a-50bb-11e9-8647-d663bd873d93'}, {'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9d96-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa 'values': 'http://localhost:5000/forecasts/cdf/single/633f9d96-50bb-11e9-8647-d663bd873d93/values'}, # noqa 'constant_value': 80.0, 'forecast_id': '633f9d96-50bb-11e9-8647-d663bd873d93'}, {'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633fa548-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa 'values': 'http://localhost:5000/forecasts/cdf/single/633fa548-50bb-11e9-8647-d663bd873d93/values'}, # noqa 'constant_value': 95.0, 'forecast_id': '633fa548-50bb-11e9-8647-d663bd873d93'}], 'created_at': '2019-03-02T14:55:37+00:00', 'extra_parameters': '', 'forecast_id': 'ef51e87c-50b9-11e9-8647-d663bd873d93', 'interval_label': 'beginning', 'interval_length': 5, 'interval_value_type': 'interval_mean', 'issue_time_of_day': '06:00', 'lead_time_to_start': 60, 'modified_at': '2019-03-02T14:55:37+00:00', 'name': 'DA GHI', 'provider': 'Organization 1', 'run_length': 1440, 'site_id': '123e4567-e89b-12d3-a456-426655440001', 'variable': 'ghi'} @pytest.fixture() def aggregate(): return { 'aggregate_id': '458ffc27-df0b-11e9-b622-62adb5fd6af0', 'aggregate_type': 'mean', 'created_at': '2019-09-24T12:00:00+00:00', 'description': 'ghi agg', 'extra_parameters': 'extra', 'interval_label': 'ending', 'interval_length': 60, 'interval_value_type': 'interval_mean', 'modified_at': '2019-09-24T12:00:00+00:00', 'name': 'Test Aggregate ghi', 'observations': [ {'_links': {'observation': 'http://localhost:5000/observations/123e4567-e89b-12d3-a456-426655440000/metadata'}, # noqa 'created_at': '2019-09-25T00:00:00+00:00', 'effective_from': '2019-01-01T00:00:00+00:00', 'effective_until': None, 'observation_deleted_at': None, 'observation_id': '123e4567-e89b-12d3-a456-426655440000'}, {'_links': {'observation': 'http://localhost:5000/observations/e0da0dea-9482-4073-84de-f1b12c304d23/metadata'}, # noqa 'created_at': '2019-09-25T00:00:00+00:00', 'effective_from': '2019-01-01T00:00:00+00:00', 'effective_until': None, 'observation_deleted_at': None, 'observation_id': 'e0da0dea-9482-4073-84de-f1b12c304d23'}, {'_links': {'observation': 'http://localhost:5000/observations/b1dfe2cb-9c8e-43cd-afcf-c5a6feaf81e2/metadata'}, # noqa 'created_at': '2019-09-25T00:00:00+00:00', 'effective_from': '2019-01-01T00:00:00+00:00', 'effective_until': None, 'observation_deleted_at': None, 'observation_id': 'b1dfe2cb-9c8e-43cd-afcf-c5a6feaf81e2'}], 'provider': 'Organization 1', 'timezone': 'America/Denver', 'variable': 'ghi'} @pytest.fixture() def report(): return { 'created_at': '2020-01-22T13:48:00+00:00', 'modified_at': '2020-01-22T13:50:00+00:00', 'provider': 'Organization 1', 'raw_report': { 'data_checksum': None, 'generated_at': '2019-07-01T12:00:00+00:00', 'messages': [ {'function': 'fcn', 'level': 'error', 'message': 'FAILED', 'step': 'dunno'}], 'metrics': [], 'plots': None, 'processed_forecasts_observations': [], 'timezone': 'Etc/GMT+8', 'versions': []}, 'report_id': '9f290dd4-42b8-11ea-abdf-f4939feddd82', 'report_parameters': { 'categories': ['total', 'date'], 'end': '2019-06-01T06:59:00+00:00', 'filters': [{'quality_flags': ['USER FLAGGED'], 'discard_before_resample': True, 'resample_threshold_percentage': resample_threshold, }, {'quality_flags': ['STALE VALUES'], 'discard_before_resample': True, 'resample_threshold_percentage': resample_threshold, }], 'metrics': ['mae', 'rmse'], 'name': 'NREL MIDC OASIS GHI Forecast Analysis', 'object_pairs': [ {'forecast': '11c20780-76ae-4b11-bef1-7a75bdc784e3', 'observation': '123e4567-e89b-12d3-a456-426655440000', 'reference_forecast': None, 'uncertainty': None, 'forecast_type': 'forecast', }], 'start': '2019-04-01T07:00:00+00:00', 'costs': [{ 'name': 'example cost', 'type': 'constant', 'parameters': { "cost": 1.1, "aggregation": "sum", "net": False, }, }], }, 'status': 'failed', 'values': [ {'id': 'a2b6ed14-42d0-11ea-aa3c-f4939feddd82', 'object_id': '123e4567-e89b-12d3-a456-426655440000', 'processed_values': 'superencodedvalues'}] }
fn
app.py
import asyncio import logging from typing import Any, Dict import sentry_sdk from fastapi import BackgroundTasks, FastAPI from sentry_sdk.integrations.asgi import SentryAsgiMiddleware import elaspic_rest_api from elaspic_rest_api import config from elaspic_rest_api import jobsubmitter as js from elaspic_rest_api.types import DataIn logger = logging.getLogger(__name__) description = """\ This page lists `ELASPIC` REST API endpoints that are available for evaluating the effect of mutations on protein stability and protein interaction affinity. Please see the source code repository for more information: <https://gitlab.com/elaspic/elaspic-rest-api/>. """ app = FastAPI( title="ELASPIC REST API", description=description, version=elaspic_rest_api.__version__, root_path=config.ROOT_PATH, ) js_data: Dict[str, Any] = {} @app.post("/", status_code=200) async def
(data_in: DataIn, background_tasks: BackgroundTasks): if data_in.api_token == config.API_TOKEN: background_tasks.add_task(js.submit_job, data_in, js_data["ds"]) return {"status": "submitted"} else: return {"status": "restricted"} @app.get("/status", status_code=200) async def get_pre_qsub_queue(api_token: str): queues_to_monitor = [ "pre_qsub_queue", "qsub_queue", "validation_queue", "elaspic2_pending_queue", "elaspic2_running_queue", ] ds: js.DataStructures = js_data["ds"] if api_token == config.API_TOKEN: result = { **{name: list(getattr(ds, name)._queue) for name in queues_to_monitor}, "monitored_jobs": [ (tuple(key), list(values)) for key, values in ds.monitored_jobs.items() ], } else: result = {} return result @app.get("/_ah/warmup", include_in_schema=False) def warmup(): return {} @app.on_event("startup") async def on_startup() -> None: js_data["ds"] = js.DataStructures() js_data["js_task"] = asyncio.create_task( js.start_jobsubmitter(js_data["ds"]), name="jobsubmitter" ) await asyncio.sleep(0.1) js_task = js_data["js_task"] if js_task.done() and (error := js_task.exception()): js_task.print_stack() logger.error("Task %s finished with an error: %s", js_task.name, error) @app.on_event("shutdown") async def on_shutdown() -> None: js_task = js_data["js_task"] js_task.cancel() if js_task.done() and (error := js_task.exception()): js_task.print_stack() logger.error("Task %s finished with an error: %s", js_task.name, error) await js.finalize_lingering_jobs(js_data["ds"]) await js_task if config.SENTRY_DSN: sentry_sdk.init(config.SENTRY_DSN, traces_sample_rate=1.0) app = SentryAsgiMiddleware(app) # type: ignore
submit_job
txn_executor.rs
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 //! Processor for a single transaction. use crate::{ code_cache::module_cache::{ModuleCache, VMModuleCache}, counters::*, data_cache::{RemoteCache, TransactionDataCache}, execution_stack::ExecutionStack, gas_meter::GasMeter, identifier::{create_access_path, resource_storage_key}, loaded_data::{ function::{FunctionRef, FunctionReference}, loaded_module::LoadedModule, }, }; use solana_libra_bytecode_verifier::{VerifiedModule, VerifiedScript}; use solana_libra_types::{ access_path::AccessPath, account_address::AccountAddress, account_config, byte_array::ByteArray, contract_event::ContractEvent, event::EventKey, identifier::{IdentStr, Identifier}, language_storage::ModuleId, transaction::{ TransactionArgument, TransactionOutput, TransactionStatus, MAX_TRANSACTION_SIZE_IN_BYTES, }, vm_error::{StatusCode, StatusType, VMStatus}, write_set::WriteSet, }; use solana_libra_vm::{ access::ModuleAccess, errors::*, file_format::{Bytecode, CodeOffset, CompiledScript, StructDefinitionIndex}, gas_schedule::{AbstractMemorySize, GasAlgebra, GasUnits}, transaction_metadata::TransactionMetadata, vm_string::VMString, }; use solana_libra_vm_cache_map::Arena; use solana_libra_vm_runtime_types::{ native_functions::dispatch::{dispatch_native_function, NativeReturnStatus}, value::{ReferenceValue, Struct, Value}, }; use std::{collections::VecDeque, convert::TryFrom}; // Metadata needed for resolving the account module. lazy_static! { /// The ModuleId for the Account module pub static ref ACCOUNT_MODULE: ModuleId = ModuleId::new(account_config::core_code_address(), Identifier::new("LibraAccount").unwrap()); /// The ModuleId for the Account module pub static ref BLOCK_MODULE: ModuleId = ModuleId::new(account_config::core_code_address(), Identifier::new("Block").unwrap()); /// The ModuleId for the LibraCoin module pub static ref COIN_MODULE: ModuleId = ModuleId::new(account_config::core_code_address(), Identifier::new("LibraCoin").unwrap()); /// The ModuleId for the Event pub static ref EVENT_MODULE: ModuleId = ModuleId::new(account_config::core_code_address(), Identifier::new("Event").unwrap()); /// The ModuleId for the validator set pub static ref VALIDATOR_SET_MODULE: ModuleId = ModuleId::new(account_config::core_code_address(), Identifier::new("ValidatorSet").unwrap()); } // Names for special functions. lazy_static! { static ref PROLOGUE_NAME: Identifier = Identifier::new("prologue").unwrap(); static ref EPILOGUE_NAME: Identifier = Identifier::new("epilogue").unwrap(); static ref CREATE_ACCOUNT_NAME: Identifier = Identifier::new("make").unwrap(); static ref ACCOUNT_STRUCT_NAME: Identifier = Identifier::new("T").unwrap(); static ref EMIT_EVENT_NAME: Identifier = Identifier::new("write_to_event_store").unwrap(); } fn make_access_path( module: &impl ModuleAccess, idx: StructDefinitionIndex, address: AccountAddress, ) -> AccessPath { let struct_tag = resource_storage_key(module, idx); create_access_path(&address, struct_tag) } /// A struct that executes one single transaction. /// 'alloc is the lifetime for the code cache, which is the argument type P here. Hence the P should /// live as long as alloc. /// 'txn is the lifetime of one single transaction. /// `execution_stack` contains the call stack and value stack of current execution. /// `txn_data` contains the information of this transaction, such as sender, sequence number, etc. /// `event_data` is the vector that stores all events emitted during execution. /// `data_view` is the scratchpad for the local writes emitted by this transaction. pub struct TransactionExecutor<'alloc, 'txn, P> where 'alloc: 'txn, P: ModuleCache<'alloc>, { #[cfg(any(test, feature = "instruction_synthesis"))] pub execution_stack: ExecutionStack<'alloc, 'txn, P>, #[cfg(not(any(test, feature = "instruction_synthesis")))] execution_stack: ExecutionStack<'alloc, 'txn, P>, gas_meter: GasMeter, txn_data: TransactionMetadata, event_data: Vec<ContractEvent>, data_view: TransactionDataCache<'txn>, } impl<'alloc, 'txn, P> TransactionExecutor<'alloc, 'txn, P> where 'alloc: 'txn, P: ModuleCache<'alloc>, { /// Create a new `TransactionExecutor` to execute a single transaction. `module_cache` is the /// cache that stores the modules previously read from the blockchain. `data_cache` is the cache /// that holds read-only connection to the state store as well as the changes made by previous /// transactions within the same block. pub fn new( module_cache: P, data_cache: &'txn dyn RemoteCache, txn_data: TransactionMetadata, ) -> Self { TransactionExecutor { execution_stack: ExecutionStack::new(module_cache), gas_meter: GasMeter::new(txn_data.max_gas_amount()), txn_data, event_data: Vec::new(), data_view: TransactionDataCache::new(data_cache), } } /// Returns the module cache for this executor. pub fn module_cache(&self) -> &P { &self.execution_stack.module_cache } /// Perform a binary operation to two values at the top of the stack. fn binop<F, T>(&mut self, f: F) -> VMResult<()> where Option<T>: From<Value>, F: FnOnce(T, T) -> Option<Value>, { let rhs = self.execution_stack.pop_as::<T>()?; let lhs = self.execution_stack.pop_as::<T>()?; let result = f(lhs, rhs); if let Some(v) = result { self.execution_stack.push(v)?; Ok(()) } else { Err(vm_error( self.execution_stack.location()?, StatusCode::ARITHMETIC_ERROR, )) } } fn binop_int<F, T>(&mut self, f: F) -> VMResult<()> where Option<T>: From<Value>, F: FnOnce(T, T) -> Option<u64>, { self.binop(|lhs, rhs| f(lhs, rhs).map(Value::u64)) } fn binop_bool<F, T>(&mut self, f: F) -> VMResult<()> where Option<T>: From<Value>, F: FnOnce(T, T) -> bool, { self.binop(|lhs, rhs| Some(Value::bool(f(lhs, rhs)))) } /// This function will execute the code sequence starting from the beginning_offset, and return /// Ok(offset) when the instruction sequence hit a branch, either by calling into a new /// function, branches, function return, etc. The return value will be the pc for the next /// instruction to be executed. #[allow(clippy::cognitive_complexity)] pub fn execute_block( &mut self, code: &[Bytecode], beginning_offset: CodeOffset, ) -> VMResult<CodeOffset> { let mut pc = beginning_offset; for instruction in &code[beginning_offset as usize..] { // FIXME: Once we add in memory ops, we will need to pass in the current memory size to // this function. self.gas_meter.calculate_and_consume( &instruction, &self.execution_stack, AbstractMemorySize::new(1), )?; match instruction { Bytecode::Pop => { self.execution_stack.pop()?; } Bytecode::Ret => { self.execution_stack.pop_call()?; if self.execution_stack.is_call_stack_empty() { return Ok(0); } else { return Ok(self.execution_stack.top_frame()?.get_pc() + 1); } } Bytecode::BrTrue(offset) => { if self.execution_stack.pop_as::<bool>()? { return Ok(*offset); } } Bytecode::BrFalse(offset) => { let stack_top = self.execution_stack.pop_as::<bool>()?; if !stack_top { return Ok(*offset); } } Bytecode::Branch(offset) => return Ok(*offset), Bytecode::LdConst(int_const) => { self.execution_stack.push(Value::u64(*int_const))?; } Bytecode::LdAddr(idx) => { let top_frame = self.execution_stack.top_frame()?; let addr_ref = top_frame.module().address_at(*idx); self.execution_stack.push(Value::address(*addr_ref))?; } Bytecode::LdStr(idx) => { let top_frame = self.execution_stack.top_frame()?; let string_ref = top_frame.module().user_string_at(*idx); self.execution_stack .push(Value::string(string_ref.into()))?; } Bytecode::LdByteArray(idx) => { let top_frame = self.execution_stack.top_frame()?; let byte_array = top_frame.module().byte_array_at(*idx); self.execution_stack .push(Value::byte_array(byte_array.clone()))?; } Bytecode::LdTrue => { self.execution_stack.push(Value::bool(true))?; } Bytecode::LdFalse => { self.execution_stack.push(Value::bool(false))?; } Bytecode::CopyLoc(idx) => { let value = self.execution_stack.top_frame()?.copy_loc(*idx)?; self.execution_stack.push(value)?; } Bytecode::MoveLoc(idx) => { let value = self.execution_stack.top_frame_mut()?.move_loc(*idx)?; self.execution_stack.push(value)?; } Bytecode::StLoc(idx) => { let value = self.execution_stack.pop()?; self.execution_stack .top_frame_mut()? .store_loc(*idx, value)?; } Bytecode::Call(idx, _) => { let self_module = &self.execution_stack.top_frame()?.module(); let callee_function_ref = self .execution_stack .module_cache .resolve_function_ref(self_module, *idx)? .ok_or_else(|| VMStatus::new(StatusCode::LINKER_ERROR))?; if callee_function_ref.is_native() { let module = callee_function_ref.module(); let module_id = module.self_id(); let function_name = callee_function_ref.name(); let native_function = match dispatch_native_function(&module_id, function_name) { None => return Err(VMStatus::new(StatusCode::LINKER_ERROR)), Some(native_function) => native_function, }; if module_id == *EVENT_MODULE && function_name == EMIT_EVENT_NAME.as_ident_str() { let msg = self .execution_stack .pop()? .simple_serialize() .ok_or_else(|| VMStatus::new(StatusCode::DATA_FORMAT_ERROR))?; let count = self.execution_stack.pop_as::<u64>()?; let key = self.execution_stack.pop_as::<ByteArray>()?; let guid = EventKey::try_from(key.as_bytes()) .map_err(|_| VMStatus::new(StatusCode::EVENT_KEY_MISMATCH))?; // TODO: // 1. Rename the AccessPath here to a new type that represents such // globally unique id for event streams. // 2. Charge gas for the msg emitted. self.event_data.push(ContractEvent::new(guid, count, msg)) } else { let mut arguments = VecDeque::new(); let expected_args = native_function.num_args(); if callee_function_ref.arg_count() != expected_args { // Should not be possible due to bytecode verifier but this // assertion is here to make sure // the view the type checker had lines up with the // execution of the native function return Err(VMStatus::new(StatusCode::LINKER_ERROR)); } for _ in 0..expected_args { arguments.push_front(self.execution_stack.pop()?); } let (cost, return_values) = match (native_function.dispatch)(arguments) { NativeReturnStatus::InvalidArguments => { // TODO: better error return Err(VMStatus::new(StatusCode::LINKER_ERROR)); } NativeReturnStatus::Aborted { cost, error_code } => { self.gas_meter .consume_gas(GasUnits::new(cost), &self.execution_stack)?; return Err(vm_error( self.execution_stack.location()?,
} NativeReturnStatus::Success { cost, return_values, } => (cost, return_values), }; self.gas_meter .consume_gas(GasUnits::new(cost), &self.execution_stack)?; for value in return_values { self.execution_stack.push(value)?; } } // Call stack is not reconstructed for a native call, so we just // proceed on to next instruction. } else { self.execution_stack.top_frame_mut()?.save_pc(pc); self.execution_stack.push_call(callee_function_ref)?; // Call stack is reconstructed, the next instruction to execute will be the // first instruction of the callee function. Thus we should break here to // restart the instruction sequence from there. return Ok(0); } } Bytecode::MutBorrowLoc(idx) | Bytecode::ImmBorrowLoc(idx) => { let local_ref = self.execution_stack.top_frame_mut()?.borrow_loc(*idx)?; self.execution_stack.push(local_ref)?; } Bytecode::ImmBorrowField(fd_idx) | Bytecode::MutBorrowField(fd_idx) => { let field_offset = self .execution_stack .top_frame()? .module() .get_field_offset(*fd_idx)?; let reference = self.execution_stack.pop_as::<ReferenceValue>()?; let field_ref = reference.borrow_field(field_offset as usize)?; self.execution_stack.push(field_ref)?; } Bytecode::Pack(sd_idx, _) => { let self_module = self.execution_stack.top_frame()?.module(); let struct_def = self_module.struct_def_at(*sd_idx); let field_count = struct_def.declared_field_count()?; let args = self.execution_stack.popn(field_count)?; self.execution_stack .push(Value::struct_(Struct::new(args)))?; } Bytecode::Unpack(sd_idx, _) => { let self_module = self.execution_stack.top_frame()?.module(); let struct_def = self_module.struct_def_at(*sd_idx); let field_count = struct_def.declared_field_count()?; let struct_ = self.execution_stack.pop_as::<Struct>()?; for idx in 0..field_count { self.execution_stack .push(struct_.get_field_value(idx as usize)?)?; } } Bytecode::ReadRef => { let reference = self.execution_stack.pop_as::<ReferenceValue>()?; let value = reference.read_ref()?; self.execution_stack.push(value)?; } Bytecode::WriteRef => { let reference = self.execution_stack.pop_as::<ReferenceValue>()?; let value = self.execution_stack.pop()?; reference.write_ref(value); } // Arithmetic Operations Bytecode::Add => self.binop_int(u64::checked_add)?, Bytecode::Sub => self.binop_int(u64::checked_sub)?, Bytecode::Mul => self.binop_int(u64::checked_mul)?, Bytecode::Mod => self.binop_int(u64::checked_rem)?, Bytecode::Div => self.binop_int(u64::checked_div)?, Bytecode::BitOr => self.binop_int(|l: u64, r| Some(l | r))?, Bytecode::BitAnd => self.binop_int(|l: u64, r| Some(l & r))?, Bytecode::Xor => self.binop_int(|l: u64, r| Some(l ^ r))?, Bytecode::Or => self.binop_bool(|l, r| l || r)?, Bytecode::And => self.binop_bool(|l, r| l && r)?, Bytecode::Lt => self.binop_bool(|l: u64, r| l < r)?, Bytecode::Gt => self.binop_bool(|l: u64, r| l > r)?, Bytecode::Le => self.binop_bool(|l: u64, r| l <= r)?, Bytecode::Ge => self.binop_bool(|l: u64, r| l >= r)?, Bytecode::Abort => { let error_code = self.execution_stack.pop_as::<u64>()?; return Err( vm_error(self.execution_stack.location()?, StatusCode::ABORTED) .with_sub_status(error_code), ); } // TODO: Should we emit different eq for different primitive type values? // How should equality between references be defined? Should we just panic // on reference values? Bytecode::Eq => { let lhs = self.execution_stack.pop()?; let rhs = self.execution_stack.pop()?; self.execution_stack.push(Value::bool(lhs.equals(&rhs)?))?; } Bytecode::Neq => { let lhs = self.execution_stack.pop()?; let rhs = self.execution_stack.pop()?; self.execution_stack .push(Value::bool(lhs.not_equals(&rhs)?))?; } Bytecode::GetTxnGasUnitPrice => { self.execution_stack .push(Value::u64(self.txn_data.gas_unit_price().get()))?; } Bytecode::GetTxnMaxGasUnits => { self.execution_stack .push(Value::u64(self.txn_data.max_gas_amount().get()))?; } Bytecode::GetTxnSequenceNumber => { self.execution_stack .push(Value::u64(self.txn_data.sequence_number()))?; } Bytecode::GetTxnSenderAddress => { self.execution_stack .push(Value::address(self.txn_data.sender()))?; } Bytecode::GetTxnPublicKey => { self.execution_stack.push(Value::byte_array(ByteArray::new( self.txn_data.public_key().to_bytes().to_vec(), )))?; } Bytecode::MutBorrowGlobal(idx, _) | Bytecode::ImmBorrowGlobal(idx, _) => { let address = self.execution_stack.pop_as::<AccountAddress>()?; let curr_module = self.execution_stack.top_frame()?.module(); let ap = make_access_path(curr_module, *idx, address); if let Some(struct_def) = self.execution_stack.module_cache.resolve_struct_def( curr_module, *idx, &self.gas_meter, )? { let global_ref = self.data_view.borrow_global(&ap, struct_def)?; self.gas_meter.calculate_and_consume( &instruction, &self.execution_stack, global_ref.size(), )?; self.execution_stack.push(Value::global_ref(global_ref))?; } else { return Err(VMStatus::new(StatusCode::LINKER_ERROR)); } } Bytecode::Exists(idx, _) => { let address = self.execution_stack.pop_as::<AccountAddress>()?; let curr_module = self.execution_stack.top_frame()?.module(); let ap = make_access_path(curr_module, *idx, address); if let Some(struct_def) = self.execution_stack.module_cache.resolve_struct_def( curr_module, *idx, &self.gas_meter, )? { let (exists, mem_size) = self.data_view.resource_exists(&ap, struct_def)?; self.gas_meter.calculate_and_consume( &instruction, &self.execution_stack, mem_size, )?; self.execution_stack.push(Value::bool(exists))?; } else { return Err(VMStatus::new(StatusCode::LINKER_ERROR)); } } Bytecode::MoveFrom(idx, _) => { let address = self.execution_stack.pop_as::<AccountAddress>()?; let curr_module = self.execution_stack.top_frame()?.module(); let ap = make_access_path(curr_module, *idx, address); if let Some(struct_def) = self.execution_stack.module_cache.resolve_struct_def( curr_module, *idx, &self.gas_meter, )? { let resource = self.data_view.move_resource_from(&ap, struct_def)?; self.gas_meter.calculate_and_consume( &instruction, &self.execution_stack, resource.size(), )?; self.execution_stack.push(resource)?; } else { return Err(VMStatus::new(StatusCode::LINKER_ERROR)); } } Bytecode::MoveToSender(idx, _) => { let curr_module = self.execution_stack.top_frame()?.module(); let ap = make_access_path(curr_module, *idx, self.txn_data.sender()); if let Some(struct_def) = self.execution_stack.module_cache.resolve_struct_def( curr_module, *idx, &self.gas_meter, )? { let resource = self.execution_stack.pop_as::<Struct>()?; self.gas_meter.calculate_and_consume( &instruction, &self.execution_stack, resource.size(), )?; self.data_view.move_resource_to(&ap, struct_def, resource)?; } else { return Err(VMStatus::new(StatusCode::LINKER_ERROR)); } } Bytecode::CreateAccount => { let addr = self.execution_stack.pop_as::<AccountAddress>()?; self.create_account(addr)?; } Bytecode::FreezeRef => { // FreezeRef should just be a null op as we don't distinguish between mut and // immut ref at runtime. } Bytecode::Not => { let top = self.execution_stack.pop_as::<bool>()?; self.execution_stack.push(Value::bool(!top))?; } Bytecode::GetGasRemaining => { self.execution_stack .push(Value::u64(self.gas_meter.remaining_gas().get()))?; } } pc += 1; } if cfg!(test) || cfg!(feature = "instruction_synthesis") { // In order to test the behavior of an instruction stream, hitting end of the code // should report no error so that we can check the locals. Ok(code.len() as CodeOffset) } else { Err(VMStatus::new(StatusCode::PC_OVERFLOW)) } } /// Convert the transaction arguments into move values and push them to the top of the stack. pub(crate) fn setup_main_args(&mut self, args: Vec<TransactionArgument>) { for arg in args.into_iter() { let push_result = self.execution_stack.push(match arg { TransactionArgument::U64(i) => Value::u64(i), TransactionArgument::Address(a) => Value::address(a), TransactionArgument::ByteArray(b) => Value::byte_array(b), TransactionArgument::String(s) => Value::string(VMString::new(s)), }); assume!(push_result.is_ok()); push_result.expect("Stack should be empty at beginning of function"); } } /// Create an account on the blockchain by calling into `CREATE_ACCOUNT_NAME` function stored /// in the `ACCOUNT_MODULE` on chain. pub fn create_account(&mut self, addr: AccountAddress) -> VMResult<()> { let account_module = self .execution_stack .module_cache .get_loaded_module(&ACCOUNT_MODULE)? .ok_or_else(|| VMStatus::new(StatusCode::LINKER_ERROR))?; // TODO: Currently the event counter will cause the gas cost for create account be flexible. // We either need to fix the gas stability test cases in tests or we need to come up // with some better ideas for the event counter creation. self.gas_meter.disable_metering(); // Address will be used as the initial authentication key. self.execute_function( &ACCOUNT_MODULE, &CREATE_ACCOUNT_NAME, vec![Value::byte_array(ByteArray::new(addr.to_vec()))], )?; self.gas_meter.enable_metering(); let account_resource = self.execution_stack.pop_as::<Struct>()?; let account_struct_id = account_module .struct_defs_table .get(&*ACCOUNT_STRUCT_NAME) .ok_or_else(|| VMStatus::new(StatusCode::LINKER_ERROR))?; let account_struct_def = self .execution_stack .module_cache .resolve_struct_def(account_module, *account_struct_id, &self.gas_meter)? .ok_or_else(|| VMStatus::new(StatusCode::LINKER_ERROR))?; // TODO: Adding the freshly created account's expiration date to the TransactionOutput here. let account_path = make_access_path(account_module, *account_struct_id, addr); self.data_view .move_resource_to(&account_path, account_struct_def, account_resource) } /// Run the prologue of a transaction by calling into `PROLOGUE_NAME` function stored /// in the `ACCOUNT_MODULE` on chain. pub(crate) fn run_prologue(&mut self) -> VMResult<()> { record_stats! {time_hist | TXN_PROLOGUE_TIME_TAKEN | { self.gas_meter.disable_metering(); let result = self.execute_function(&ACCOUNT_MODULE, &PROLOGUE_NAME, vec![]); self.gas_meter.enable_metering(); result } } } /// Run the epilogue of a transaction by calling into `EPILOGUE_NAME` function stored /// in the `ACCOUNT_MODULE` on chain. fn run_epilogue(&mut self) -> VMResult<()> { record_stats! {time_hist | TXN_EPILOGUE_TIME_TAKEN | { self.gas_meter.disable_metering(); let result = self.execute_function(&ACCOUNT_MODULE, &EPILOGUE_NAME, vec![]); self.gas_meter.enable_metering(); result } } } /// Generate the TransactionOutput on failure. There can be two possibilities: /// 1. The transaction encounters some runtime error, such as out of gas, arithmetic overflow, /// etc. In this scenario, we are going to keep this transaction and charge proper gas to the /// sender. 2. The transaction encounters VM invariant violation error type which indicates some /// properties should have been guaranteed failed. Such transaction should be discarded for /// sanity but this implies a bug in the VM that we should take care of. pub(crate) fn failed_transaction_cleanup(&mut self, result: VMResult<()>) -> TransactionOutput { // Discard all the local writes, restart execution from a clean state. self.clear(); match self.run_epilogue() { Ok(_) => match self.make_write_set(vec![], result) { Ok(trans_out) => trans_out, Err(err) => error_output(err), }, // Running epilogue shouldn't fail here as we've already checked for enough balance in // the prologue Err(err) => error_output(err), } } /// Clear all the writes local to this transaction. fn clear(&mut self) { self.data_view.clear(); self.event_data.clear(); } /// Generate the TransactionOutput for a successful transaction pub(crate) fn transaction_cleanup( &mut self, to_be_published_modules: Vec<(ModuleId, Vec<u8>)>, ) -> TransactionOutput { // First run the epilogue match self.run_epilogue() { // If epilogue runs successfully, try to emit the writeset. Ok(_) => match self.make_write_set(to_be_published_modules, Ok(())) { // This step could fail if the program has dangling global reference Ok(trans_out) => trans_out, // In case of failure, run the cleanup code. Err(err) => self.failed_transaction_cleanup(Err(err)), }, // If the sender depleted its balance and can't pay for the gas, run the cleanup code. Err(err) => match err.status_type() { StatusType::InvariantViolation => error_output(err), _ => self.failed_transaction_cleanup(Err(err)), }, } } /// Entrypoint into the interpreter. All external calls need to be routed through this /// function. pub(crate) fn interpeter_entrypoint(&mut self, func: FunctionRef<'txn>) -> VMResult<()> { // We charge an intrinsic amount of gas based upon the size of the transaction submitted // (in raw bytes). let txn_size = self.txn_data.transaction_size; // The callers of this function verify the transaction before executing it. Transaction // verification ensures the following condition. assume!(txn_size.get() <= (MAX_TRANSACTION_SIZE_IN_BYTES as u64)); // We count the intrinsic cost of the transaction here, since that needs to also cover the // setup of the function. let starting_gas = self.gas_meter.remaining_gas().get(); self.gas_meter .charge_transaction_gas(txn_size, &self.execution_stack)?; let ret = self.execute_function_impl(func); record_stats!(observe | TXN_EXECUTION_GAS_USAGE | starting_gas); ret } /// Execute a function given a FunctionRef. fn execute_function_impl(&mut self, func: FunctionRef<'txn>) -> VMResult<()> { let beginning_height = self.execution_stack.call_stack_height(); self.execution_stack.push_call(func)?; // We always start execution from the first instruction. let mut pc = 0; // Execute code until the stack goes back to its original height. At that time we will know // this function has terminated. while self.execution_stack.call_stack_height() != beginning_height { let code = self.execution_stack.top_frame()?.code_definition(); // Get the pc for the next instruction to be executed. pc = self.execute_block(code, pc)?; if self.execution_stack.call_stack_height() == beginning_height { return Ok(()); } } Ok(()) } /// Execute a function. /// `module` is an identifier for the name the module is stored in. `function_name` is the name /// of the function. If such function is found, the VM will execute this function with arguments /// `args`. The return value will be placed on the top of the value stack and abort if an error /// occurs. pub fn execute_function( &mut self, module: &ModuleId, function_name: &IdentStr, args: Vec<Value>, ) -> VMResult<()> { let loaded_module = match self .execution_stack .module_cache .get_loaded_module(module)? { Some(module) => module, None => return Err(VMStatus::new(StatusCode::LINKER_ERROR)), }; let func_idx = loaded_module .function_defs_table .get(function_name) .ok_or_else(|| VMStatus::new(StatusCode::LINKER_ERROR))?; let func = FunctionRef::new(loaded_module, *func_idx); for arg in args.into_iter() { self.execution_stack.push(arg)?; } self.execute_function_impl(func) } /// Execute a function with the sender set to `sender`, restoring the original sender afterward. /// This should only be used in the logic for generating the genesis block. #[allow(non_snake_case)] pub fn execute_function_with_sender_FOR_GENESIS_ONLY( &mut self, address: AccountAddress, module: &ModuleId, function_name: &IdentStr, args: Vec<Value>, ) -> VMResult<()> { let old_sender = self.txn_data.sender(); self.txn_data.sender = address; let res = self.execute_function(module, function_name, args); self.txn_data.sender = old_sender; res } /// Get the value on the top of the value stack. pub fn pop_stack(&mut self) -> VMResult<Value> { self.execution_stack.pop() } /// Produce a write set at the end of a transaction. This will clear all the local states in /// the TransactionProcessor and turn them into a writeset. pub fn make_write_set( &mut self, to_be_published_modules: Vec<(ModuleId, Vec<u8>)>, result: VMResult<()>, ) -> VMResult<TransactionOutput> { // This should only be used for bookkeeping. The gas is already deducted from the sender's // account in the account module's epilogue. let gas_used: u64 = self .txn_data .max_gas_amount .sub(self.gas_meter.remaining_gas()) .mul(self.txn_data.gas_unit_price) .get(); let write_set = self.data_view.make_write_set(to_be_published_modules)?; record_stats!(observe | TXN_TOTAL_GAS_USAGE | gas_used); Ok(TransactionOutput::new( write_set, self.event_data.clone(), gas_used, match result { Ok(()) => TransactionStatus::from(VMStatus::new(StatusCode::EXECUTED)), Err(err) => TransactionStatus::from(err), }, )) } } #[inline] fn error_output(err: VMStatus) -> TransactionOutput { // Since this transaction will be discarded, no writeset will be included. TransactionOutput::new( WriteSet::default(), vec![], 0, TransactionStatus::Discard(err), ) } /// A helper function for executing a single script. Will be deprecated once we have a better /// testing framework for executing arbitrary script. pub fn execute_function( caller_script: VerifiedScript, modules: Vec<VerifiedModule>, _args: Vec<TransactionArgument>, data_cache: &dyn RemoteCache, ) -> VMResult<()> { let allocator = Arena::new(); let module_cache = VMModuleCache::new(&allocator); let main_module = caller_script.into_module(); let loaded_main = LoadedModule::new(main_module); let entry_func = FunctionRef::new(&loaded_main, CompiledScript::MAIN_INDEX); let txn_metadata = TransactionMetadata::default(); for m in modules { module_cache.cache_module(m); } let mut vm = TransactionExecutor { execution_stack: ExecutionStack::new(&module_cache), gas_meter: GasMeter::new(txn_metadata.max_gas_amount()), txn_data: txn_metadata, event_data: Vec::new(), data_view: TransactionDataCache::new(data_cache), }; vm.execute_function_impl(entry_func) } #[cfg(feature = "instruction_synthesis")] impl<'alloc, 'txn, P> TransactionExecutor<'alloc, 'txn, P> where 'alloc: 'txn, P: ModuleCache<'alloc>, { /// Clear all the writes local to this transaction. pub fn clear_writes(&mut self) { self.data_view.clear(); self.event_data.clear(); } /// During cost synthesis, turn off gas metering so that we don't run out of gas. pub fn turn_off_gas_metering(&mut self) { self.gas_meter.disable_metering(); } }
StatusCode::NATIVE_FUNCTION_ERROR, ) .with_sub_status(error_code));
test.go
// Copyright 2010 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Test cases for cgo. // Both the import "C" prologue and the main file are sorted by issue number. // This file contains C definitions (not just declarations) // and so it must NOT contain any //export directives on Go functions. // See testx.go for exports. package cgotest /* #include <complex.h> #include <math.h> #include <stdarg.h> #include <stdbool.h> #include <stddef.h> #include <stdint.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include <sys/stat.h> #include <errno.h> #cgo LDFLAGS: -lm #ifndef WIN32 #include <pthread.h> #include <signal.h> #endif // alignment tests typedef unsigned char Uint8; typedef unsigned short Uint16; typedef enum { MOD1 = 0x0000, MODX = 0x8000 } SDLMod; typedef enum { A1 = 1, B1 = 322, SDLK_LAST } SDLKey; typedef struct SDL_keysym { Uint8 scancode; SDLKey sym; SDLMod mod; Uint16 unicode; } SDL_keysym; typedef struct SDL_KeyboardEvent { Uint8 typ; Uint8 which; Uint8 state; SDL_keysym keysym; } SDL_KeyboardEvent; void makeEvent(SDL_KeyboardEvent *event) { unsigned char *p; int i; p = (unsigned char*)event; for (i=0; i<sizeof *event; i++) { p[i] = i; } } int same(SDL_KeyboardEvent* e, Uint8 typ, Uint8 which, Uint8 state, Uint8 scan, SDLKey sym, SDLMod mod, Uint16 uni) { return e->typ == typ && e->which == which && e->state == state && e->keysym.scancode == scan && e->keysym.sym == sym && e->keysym.mod == mod && e->keysym.unicode == uni; } void cTest(SDL_KeyboardEvent *event) { printf("C: %#x %#x %#x %#x %#x %#x %#x\n", event->typ, event->which, event->state, event->keysym.scancode, event->keysym.sym, event->keysym.mod, event->keysym.unicode); fflush(stdout); } // api const char *greeting = "hello, world"; // basic test cases #define SHIFT(x, y) ((x)<<(y)) #define KILO SHIFT(1, 10) #define UINT32VAL 0xc008427bU enum E { Enum1 = 1, Enum2 = 2, }; typedef unsigned char cgo_uuid_t[20]; void uuid_generate(cgo_uuid_t x) { x[0] = 0; } struct S { int x; }; const char *cstr = "abcefghijklmnopqrstuvwxyzABCEFGHIJKLMNOPQRSTUVWXYZ1234567890"; extern enum E myConstFunc(struct S* const ctx, int const id, struct S **const filter); enum E myConstFunc(struct S *const ctx, int const id, struct S **const filter) { return 0; } int add(int x, int y) { return x+y; }; // complex alignment struct { float x; _Complex float y; } cplxAlign = { 3.14, 2.17 }; // constants and pointer checking #define CheckConstVal 0 typedef struct { int *p; } CheckConstStruct; static void CheckConstFunc(CheckConstStruct *p, int e) {} // duplicate symbol int base_symbol = 0; #define alias_one base_symbol #define alias_two base_symbol // function pointer variables typedef int (*intFunc) (); int bridge_int_func(intFunc f) { return f(); } int fortytwo() { return 42; } // issue 1222 typedef union { long align; } xxpthread_mutex_t; struct ibv_async_event { union { int x; } element; }; struct ibv_context { xxpthread_mutex_t mutex; }; // issue 1635 // Mac OS X's gcc will generate scattered relocation 2/1 for // this function on Darwin/386, and 8l couldn't handle it. // this example is in issue 1635 void scatter() { void *p = scatter; printf("scatter = %p\n", p); } // Adding this explicit extern declaration makes this a test for // https://gcc.gnu.org/PR68072 aka https://golang.org/issue/13344 . // It used to cause a cgo error when building with GCC 6. extern int hola; // this example is in issue 3253 int hola = 0; int testHola() { return hola; } // issue 3250 #ifdef WIN32 void testSendSIG() {} #else static void *thread(void *p) { const int M = 100; int i; (void)p; for (i = 0; i < M; i++) { pthread_kill(pthread_self(), SIGCHLD); usleep(rand() % 20 + 5); } return NULL; } void testSendSIG() { const int N = 20; int i; pthread_t tid[N]; for (i = 0; i < N; i++) { usleep(rand() % 200 + 100); pthread_create(&tid[i], 0, thread, NULL); } for (i = 0; i < N; i++) pthread_join(tid[i], 0); } #endif // issue 3261 // libgcc on ARM might be compiled as thumb code, but our 5l // can't handle that, so we have to disable this test on arm. #ifdef __ARMEL__ int vabs(int x) { puts("testLibgcc is disabled on ARM because 5l cannot handle thumb library."); return (x < 0) ? -x : x; } #elif defined(__arm64__) && defined(__clang__) int vabs(int x) { puts("testLibgcc is disabled on ARM64 with clang due to lack of libgcc."); return (x < 0) ? -x : x; } #else int __absvsi2(int); // dummy prototype for libgcc function // we shouldn't name the function abs, as gcc might use // the builtin one. int vabs(int x) { return __absvsi2(x); } #endif // issue 3729 // access errno from void C function const char _expA = 0x42; const float _expB = 3.14159; const short _expC = 0x55aa; const int _expD = 0xdeadbeef; #ifdef WIN32 void g(void) {} void g2(int x, char a, float b, short c, int d) {} #else void g(void) { errno = E2BIG; } // try to pass some non-trivial arguments to function g2 void g2(int x, char a, float b, short c, int d) { if (a == _expA && b == _expB && c == _expC && d == _expD) errno = x; else errno = -1; } #endif // issue 3945 // Test that cgo reserves enough stack space during cgo call. // See https://golang.org/issue/3945 for details. void say() { printf("%s from C\n", "hello"); } // issue 4054 part 1 - other half in testx.go typedef enum { A = 0, B, C, D, E, F, G, H, II, J, } issue4054a; // issue 4339 // We've historically permitted #include <>, so test it here. Issue 29333. #include <issue4339.h> // issue 4417 // cmd/cgo: bool alignment/padding issue. // bool alignment is wrong and causing wrong arguments when calling functions. static int c_bool(bool a, bool b, int c, bool d, bool e) { return c; } // issue 4857 #cgo CFLAGS: -Werror const struct { int a; } *issue4857() { return (void *)0; } // issue 5224 // Test that the #cgo CFLAGS directive works, // with and without platform filters. #cgo CFLAGS: -DCOMMON_VALUE=123 #cgo windows CFLAGS: -DIS_WINDOWS=1 #cgo !windows CFLAGS: -DIS_WINDOWS=0 int common = COMMON_VALUE; int is_windows = IS_WINDOWS; // issue 5227 // linker incorrectly treats common symbols and // leaves them undefined. typedef struct { int Count; } Fontinfo; Fontinfo SansTypeface; extern void init(); Fontinfo loadfont() { Fontinfo f = {0}; return f; } void init() { SansTypeface = loadfont(); } // issue 5242 // Cgo incorrectly computed the alignment of structs // with no Go accessible fields as 0, and then panicked on // modulo-by-zero computations. typedef struct { } foo; typedef struct { int x : 1; } bar; int issue5242(foo f, bar b) { return 5242; } // issue 5337 // Verify that we can withstand SIGPROF received on foreign threads #ifdef WIN32 void test5337() {} #else static void *thread1(void *p) { (void)p; pthread_kill(pthread_self(), SIGPROF); return NULL; } void test5337() { pthread_t tid; pthread_create(&tid, 0, thread1, NULL); pthread_join(tid, 0); } #endif // issue 5603 const long long issue5603exp = 0x12345678; long long issue5603foo0() { return issue5603exp; } long long issue5603foo1(void *p) { return issue5603exp; } long long issue5603foo2(void *p, void *q) { return issue5603exp; } long long issue5603foo3(void *p, void *q, void *r) { return issue5603exp; } long long issue5603foo4(void *p, void *q, void *r, void *s) { return issue5603exp; } // issue 5740 int test5740a(void), test5740b(void); // issue 5986 static void output5986() { int current_row = 0, row_count = 0; double sum_squares = 0; double d; do { if (current_row == 10) { current_row = 0; } ++row_count; } while (current_row++ != 1); d = sqrt(sum_squares / row_count); printf("sqrt is: %g\n", d); } // issue 6128 // Test handling of #defined names in clang. // NOTE: Must use hex, or else a shortcut for decimals // in cgo avoids trying to pass this to clang. #define X 0x1 // issue 6472 typedef struct { struct { int x; } y[16]; } z; // issue 6612 // Test new scheme for deciding whether C.name is an expression, type, constant. // Clang silences some warnings when the name is a #defined macro, so test those too // (even though we now use errors exclusively, not warnings). void myfunc(void) {} int myvar = 5; const char *mytext = "abcdef"; typedef int mytype; enum { myenum = 1234, }; #define myfunc_def myfunc #define myvar_def myvar #define mytext_def mytext #define mytype_def mytype #define myenum_def myenum #define myint_def 12345 #define myfloat_def 1.5 #define mystring_def "hello" // issue 6907 char* Issue6907CopyString(_GoString_ s) { size_t n; const char *p; char *r; n = _GoStringLen(s); p = _GoStringPtr(s); r = malloc(n + 1); memmove(r, p, n); r[n] = '\0'; return r; } // issue 7560 typedef struct { char x; long y; } __attribute__((__packed__)) misaligned; int offset7560(void) { return (uintptr_t)&((misaligned*)0)->y; } // issue 7786 // No runtime test, just make sure that typedef and struct/union/class are interchangeable at compile time. struct test7786; typedef struct test7786 typedef_test7786; void f7786(struct test7786 *ctx) {} void g7786(typedef_test7786 *ctx) {} typedef struct body7786 typedef_body7786; struct body7786 { int x; }; void b7786(struct body7786 *ctx) {} void c7786(typedef_body7786 *ctx) {} typedef union union7786 typedef_union7786; void u7786(union union7786 *ctx) {} void v7786(typedef_union7786 *ctx) {} // issue 8092 // Test that linker defined symbols (e.g., text, data) don't // conflict with C symbols. char text[] = "text"; char data[] = "data"; char *ctext(void) { return text; } char *cdata(void) { return data; } // issue 8428 // Cgo inconsistently translated zero size arrays. struct issue8428one { char b; char rest[]; }; struct issue8428two { void *p; char b; char rest[0]; char pad; }; struct issue8428three { char w[1][2][3][0]; char x[2][3][0][1]; char y[3][0][1][2]; char z[0][1][2][3]; }; // issue 8331 part 1 - part 2 in testx.go // A typedef of an unnamed struct is the same struct when // #include'd twice. No runtime test; just make sure it compiles. #include "issue8331.h" // issue 8368 and 8441 // Recursive struct definitions didn't work. // No runtime test; just make sure it compiles. typedef struct one one; typedef struct two two; struct one { two *x; }; struct two { one *x; }; // issue 8811 extern int issue8811Initialized; extern void issue8811Init(); void issue8811Execute() { if(!issue8811Initialized) issue8811Init(); } // issue 9557 struct issue9557_t { int a; } test9557bar = { 42 }; struct issue9557_t *issue9557foo = &test9557bar; // issue 10303 // Pointers passed to C were not marked as escaping (bug in cgo). typedef int *intptr; void setintstar(int *x) { *x = 1; } void setintptr(intptr x) { *x = 1; } void setvoidptr(void *x) { *(int*)x = 1; } typedef struct Struct Struct; struct Struct { int *P; }; void setstruct(Struct s) { *s.P = 1; } // issue 11925 // Structs with zero-length trailing fields are now padded by the Go compiler. struct a11925 { int i; char a[0]; char b[0]; }; struct b11925 { int i; char a[0]; char b[]; }; // issue 12030 void issue12030conv(char *buf, double x) { sprintf(buf, "d=%g", x); } // issue 14838 int check_cbytes(char *b, size_t l) { int i; for (i = 0; i < l; i++) { if (b[i] != i) { return 0; } } return 1; } // issue 17065 // Test that C symbols larger than a page play nicely with the race detector. int ii[65537]; // issue 17537 // The void* cast introduced by cgo to avoid problems // with const/volatile qualifiers breaks C preprocessor macros that // emulate functions. typedef struct { int i; } S17537; int I17537(S17537 *p); #define I17537(p) ((p)->i) // Calling this function used to fail without the cast. const int F17537(const char **p) { return **p; } // issue 17723 // API compatibility checks typedef char *cstring_pointer; static void cstring_pointer_fun(cstring_pointer dummy) { } const char *api_hello = "hello!"; // Calling this function used to trigger an error from the C compiler // (issue 18298). void F18298(const void *const *p) { } // Test that conversions between typedefs work as they used to. typedef const void *T18298_1; struct S18298 { int i; }; typedef const struct S18298 *T18298_2; void G18298(T18298_1 t) { } // issue 18126 // cgo check of void function returning errno. void Issue18126C(void **p) {} // issue 18720 #define HELLO "hello" #define WORLD "world" #define HELLO_WORLD HELLO "\000" WORLD struct foo { char c; }; #define SIZE_OF(x) sizeof(x) #define SIZE_OF_FOO SIZE_OF(struct foo) #define VAR1 VAR #define VAR var int var = 5; #define ADDR &var #define CALL fn() int fn(void) { return ++var; } // issue 20129 int issue20129 = 0; typedef void issue20129Void; issue20129Void issue20129Foo() { issue20129 = 1; } typedef issue20129Void issue20129Void2; issue20129Void2 issue20129Bar() { issue20129 = 2; } // issue 20369 #define XUINT64_MAX 18446744073709551615ULL // issue 21668 // Fail to guess the kind of the constant "x". // No runtime test; just make sure it compiles. const int x21668 = 42; // issue 21708 #define CAST_TO_INT64 (int64_t)(-1) // issue 21809 // Compile C `typedef` to go type aliases. typedef long MySigned_t; // tests alias-to-alias typedef MySigned_t MySigned2_t; long takes_long(long x) { return x * x; } MySigned_t takes_typedef(MySigned_t x) { return x * x; } // issue 22906 // It's going to be hard to include a whole real JVM to test this. // So we'll simulate a really easy JVM using just the parts we need. // This is the relevant part of jni.h. struct _jobject; typedef struct _jobject *jobject; typedef jobject jclass; typedef jobject jthrowable; typedef jobject jstring; typedef jobject jarray; typedef jarray jbooleanArray; typedef jarray jbyteArray; typedef jarray jcharArray; typedef jarray jshortArray; typedef jarray jintArray; typedef jarray jlongArray; typedef jarray jfloatArray; typedef jarray jdoubleArray; typedef jarray jobjectArray; typedef jobject jweak; // Note: jvalue is already a non-pointer type due to it being a C union. // issue 22958 typedef struct { unsigned long long f8 : 8; unsigned long long f16 : 16; unsigned long long f24 : 24; unsigned long long f32 : 32; unsigned long long f40 : 40; unsigned long long f48 : 48; unsigned long long f56 : 56; unsigned long long f64 : 64; } issue22958Type; // issue 23356 int a(void) { return 5; }; int r(void) { return 3; }; // issue 23720 typedef int *issue23720A; typedef const int *issue23720B; void issue23720F(issue23720B a) {} // issue 24206 #if defined(__linux__) && defined(__x86_64__) #include <sys/mman.h> // Returns string with null byte at the last valid address char* dangerousString1() { int pageSize = 4096; char *data = mmap(0, 2 * pageSize, PROT_READ|PROT_WRITE, MAP_ANON|MAP_PRIVATE, 0, 0); mprotect(data + pageSize,pageSize,PROT_NONE); int start = pageSize - 123 - 1; // last 123 bytes of first page + 1 null byte int i = start; for (; i < pageSize; i++) { data[i] = 'x'; } data[pageSize -1 ] = 0; return data+start; } char* dangerousString2() { int pageSize = 4096; char *data = mmap(0, 3 * pageSize, PROT_READ|PROT_WRITE, MAP_ANON|MAP_PRIVATE, 0, 0); mprotect(data + 2 * pageSize,pageSize,PROT_NONE); int start = pageSize - 123 - 1; // last 123 bytes of first page + 1 null byte int i = start; for (; i < 2 * pageSize; i++) { data[i] = 'x'; } data[2*pageSize -1 ] = 0; return data+start; } #else char *dangerousString1() { return NULL; } char *dangerousString2() { return NULL; } #endif // issue 26066 const unsigned long long int issue26066 = (const unsigned long long) -1; // issue 26517 // Introduce two pointer types which are distinct, but have the same // base type. Make sure that both of those pointer types get resolved // correctly. Before the fix for 26517 if one of these pointer types // was resolved before the other one was processed, the second one // would never be resolved. // Before this issue was fixed this test failed on Windows, // where va_list expands to a named char* type. typedef va_list TypeOne; typedef char *TypeTwo; // issue 28540 static void twoargs1(void *p, int n) {} static void *twoargs2() { return 0; } static int twoargs3(void * p) { return 0; } // issue 28545 // Failed to add type conversion for negative constant. static void issue28545F(char **p, int n, complex double a) {} // issue 28772 part 1 - part 2 in testx.go // Failed to add type conversion for Go constant set to C constant. // No runtime test; just make sure it compiles. #define issue28772Constant 1 // issue 28896 // cgo was incorrectly adding padding after a packed struct. typedef struct { void *f1; uint32_t f2; } __attribute__((__packed__)) innerPacked; typedef struct { innerPacked g1; uint64_t g2; } outerPacked; typedef struct { void *f1; uint32_t f2; } innerUnpacked; typedef struct { innerUnpacked g1; uint64_t g2; } outerUnpacked; size_t offset(int x) { switch (x) { case 0: return offsetof(innerPacked, f2); case 1: return offsetof(outerPacked, g2); case 2: return offsetof(innerUnpacked, f2); case 3: return offsetof(outerUnpacked, g2); default: abort(); } } // issue 29748 typedef struct { char **p; } S29748; static int f29748(S29748 *p) { return 0; } // issue 29781 // Error with newline inserted into constant expression. // Compilation test only, nothing to run. static void issue29781F(char **p, int n) {} #define ISSUE29781C 0 // issue 31093 static uint16_t issue31093F(uint16_t v) { return v; } // issue 32579 typedef struct S32579 { int data[1]; } S32579; */ import "C" import ( "context" "fmt" "math" "math/rand" "os" "os/signal" "reflect" "runtime" "sync" "syscall" "testing" "time" "unsafe" ) // alignment func testAlign(t *testing.T) { var evt C.SDL_KeyboardEvent C.makeEvent(&evt) if C.same(&evt, evt.typ, evt.which, evt.state, evt.keysym.scancode, evt.keysym.sym, evt.keysym.mod, evt.keysym.unicode) == 0 { t.Error("*** bad alignment") C.cTest(&evt) t.Errorf("Go: %#x %#x %#x %#x %#x %#x %#x\n", evt.typ, evt.which, evt.state, evt.keysym.scancode, evt.keysym.sym, evt.keysym.mod, evt.keysym.unicode) t.Error(evt) } } // api const greeting = "hello, world" type testPair struct { Name string Got, Want interface{} } var testPairs = []testPair{ {"GoString", C.GoString(C.greeting), greeting}, {"GoStringN", C.GoStringN(C.greeting, 5), greeting[:5]}, {"GoBytes", C.GoBytes(unsafe.Pointer(C.greeting), 5), []byte(greeting[:5])}, } func testHelpers(t *testing.T) { for _, pair := range testPairs { if !reflect.DeepEqual(pair.Got, pair.Want) { t.Errorf("%s: got %#v, want %#v", pair.Name, pair.Got, pair.Want) } } } // basic test cases const EINVAL = C.EINVAL /* test #define */ var KILO = C.KILO func uuidgen() { var uuid C.cgo_uuid_t C.uuid_generate(&uuid[0]) } func Strtol(s string, base int) (int, error) { p := C.CString(s) n, err := C.strtol(p, nil, C.int(base)) C.free(unsafe.Pointer(p)) return int(n), err } func Atol(s string) int { p := C.CString(s) n := C.atol(p) C.free(unsafe.Pointer(p)) return int(n) } func testConst(t *testing.T) { C.myConstFunc(nil, 0, nil) } func testEnum(t *testing.T) { if C.Enum1 != 1 || C.Enum2 != 2 { t.Error("bad enum", C.Enum1, C.Enum2) } } func testAtol(t *testing.T)
func testErrno(t *testing.T) { p := C.CString("no-such-file") m := C.CString("r") f, err := C.fopen(p, m) C.free(unsafe.Pointer(p)) C.free(unsafe.Pointer(m)) if err == nil { C.fclose(f) t.Fatalf("C.fopen: should fail") } if err != syscall.ENOENT { t.Fatalf("C.fopen: unexpected error: %v", err) } } func testMultipleAssign(t *testing.T) { p := C.CString("234") n, m := C.strtol(p, nil, 345), C.strtol(p, nil, 10) if runtime.GOOS == "openbsd" { // Bug in OpenBSD strtol(3) - base > 36 succeeds. if (n != 0 && n != 239089) || m != 234 { t.Fatal("Strtol x2: ", n, m) } } else if n != 0 || m != 234 { t.Fatal("Strtol x2: ", n, m) } C.free(unsafe.Pointer(p)) } var ( cuint = (C.uint)(0) culong C.ulong cchar C.char ) type Context struct { ctx *C.struct_ibv_context } func benchCgoCall(b *testing.B) { const x = C.int(2) const y = C.int(3) for i := 0; i < b.N; i++ { C.add(x, y) } } var sinkString string func benchGoString(b *testing.B) { for i := 0; i < b.N; i++ { sinkString = C.GoString(C.cstr) } const want = "abcefghijklmnopqrstuvwxyzABCEFGHIJKLMNOPQRSTUVWXYZ1234567890" if sinkString != want { b.Fatalf("%q != %q", sinkString, want) } } // Static (build-time) test that syntax traversal visits all operands of s[i:j:k]. func sliceOperands(array [2000]int) { _ = array[C.KILO:C.KILO:C.KILO] // no type error } // set in cgo_thread_lock.go init var testThreadLockFunc = func(*testing.T) {} // complex alignment func TestComplexAlign(t *testing.T) { if C.cplxAlign.x != 3.14 { t.Errorf("got %v, expected 3.14", C.cplxAlign.x) } if C.cplxAlign.y != 2.17 { t.Errorf("got %v, expected 2.17", C.cplxAlign.y) } } // constants and pointer checking func testCheckConst(t *testing.T) { // The test is that this compiles successfully. p := C.malloc(C.size_t(unsafe.Sizeof(C.int(0)))) defer C.free(p) C.CheckConstFunc(&C.CheckConstStruct{(*C.int)(p)}, C.CheckConstVal) } // duplicate symbol func duplicateSymbols() { fmt.Printf("%v %v %v\n", C.base_symbol, C.alias_one, C.alias_two) } // environment // This is really an os package test but here for convenience. func testSetEnv(t *testing.T) { if runtime.GOOS == "windows" { // Go uses SetEnvironmentVariable on windows. However, // C runtime takes a *copy* at process startup of the // OS environment, and stores it in environ/envp. // It is this copy that getenv/putenv manipulate. t.Logf("skipping test") return } const key = "CGO_OS_TEST_KEY" const val = "CGO_OS_TEST_VALUE" os.Setenv(key, val) keyc := C.CString(key) defer C.free(unsafe.Pointer(keyc)) v := C.getenv(keyc) if uintptr(unsafe.Pointer(v)) == 0 { t.Fatal("getenv returned NULL") } vs := C.GoString(v) if vs != val { t.Fatalf("getenv() = %q; want %q", vs, val) } } // function pointer variables func callBridge(f C.intFunc) int { return int(C.bridge_int_func(f)) } func callCBridge(f C.intFunc) C.int { return C.bridge_int_func(f) } func testFpVar(t *testing.T) { const expected = 42 f := C.intFunc(C.fortytwo) res1 := C.bridge_int_func(f) if r1 := int(res1); r1 != expected { t.Errorf("got %d, want %d", r1, expected) } res2 := callCBridge(f) if r2 := int(res2); r2 != expected { t.Errorf("got %d, want %d", r2, expected) } r3 := callBridge(f) if r3 != expected { t.Errorf("got %d, want %d", r3, expected) } } // issue 1222 type AsyncEvent struct { event C.struct_ibv_async_event } // issue 1635 func test1635(t *testing.T) { C.scatter() if v := C.hola; v != 0 { t.Fatalf("C.hola is %d, should be 0", v) } if v := C.testHola(); v != 0 { t.Fatalf("C.testHola() is %d, should be 0", v) } } // issue 2470 func testUnsignedInt(t *testing.T) { a := (int64)(C.UINT32VAL) b := (int64)(0xc008427b) if a != b { t.Errorf("Incorrect unsigned int - got %x, want %x", a, b) } } // issue 3250 func test3250(t *testing.T) { if runtime.GOOS == "windows" { t.Skip("not applicable on windows") } t.Skip("skipped, see golang.org/issue/5885") var ( thres = 1 sig = syscall_dot_SIGCHLD ) type result struct { n int sig os.Signal } var ( sigCh = make(chan os.Signal, 10) waitStart = make(chan struct{}) waitDone = make(chan result) ) signal.Notify(sigCh, sig) go func() { n := 0 alarm := time.After(time.Second * 3) for { select { case <-waitStart: waitStart = nil case v := <-sigCh: n++ if v != sig || n > thres { waitDone <- result{n, v} return } case <-alarm: waitDone <- result{n, sig} return } } }() waitStart <- struct{}{} C.testSendSIG() r := <-waitDone if r.sig != sig { t.Fatalf("received signal %v, but want %v", r.sig, sig) } t.Logf("got %d signals\n", r.n) if r.n <= thres { t.Fatalf("expected more than %d", thres) } } // issue 3261 func testLibgcc(t *testing.T) { var table = []struct { in, out C.int }{ {0, 0}, {1, 1}, {-42, 42}, {1000300, 1000300}, {1 - 1<<31, 1<<31 - 1}, } for _, v := range table { if o := C.vabs(v.in); o != v.out { t.Fatalf("abs(%d) got %d, should be %d", v.in, o, v.out) return } } } // issue 3729 func test3729(t *testing.T) { if runtime.GOOS == "windows" { t.Skip("skipping on windows") } _, e := C.g() if e != syscall.E2BIG { t.Errorf("got %q, expect %q", e, syscall.E2BIG) } _, e = C.g2(C.EINVAL, C._expA, C._expB, C._expC, C._expD) if e != syscall.EINVAL { t.Errorf("got %q, expect %q", e, syscall.EINVAL) } } // issue 3945 func testPrintf(t *testing.T) { C.say() } // issue 4054 var issue4054a = []int{C.A, C.B, C.C, C.D, C.E, C.F, C.G, C.H, C.I, C.J} // issue 4339 func test4339(t *testing.T) { C.handle4339(&C.exported4339) } // issue 4417 func testBoolAlign(t *testing.T) { b := C.c_bool(true, true, 10, true, false) if b != 10 { t.Fatalf("found %d expected 10\n", b) } b = C.c_bool(true, true, 5, true, true) if b != 5 { t.Fatalf("found %d expected 5\n", b) } b = C.c_bool(true, true, 3, true, false) if b != 3 { t.Fatalf("found %d expected 3\n", b) } b = C.c_bool(false, false, 1, true, false) if b != 1 { t.Fatalf("found %d expected 1\n", b) } b = C.c_bool(false, true, 200, true, false) if b != 200 { t.Fatalf("found %d expected 200\n", b) } } // issue 4857 func test4857() { _ = C.issue4857() } // issue 5224 func testCflags(t *testing.T) { is_windows := C.is_windows == 1 if is_windows != (runtime.GOOS == "windows") { t.Errorf("is_windows: %v, runtime.GOOS: %s", is_windows, runtime.GOOS) } if C.common != 123 { t.Errorf("common: %v (expected 123)", C.common) } } // issue 5227 func test5227(t *testing.T) { C.init() } func selectfont() C.Fontinfo { return C.SansTypeface } // issue 5242 func test5242(t *testing.T) { if got := C.issue5242(C.foo{}, C.bar{}); got != 5242 { t.Errorf("got %v", got) } } func test5603(t *testing.T) { var x [5]int64 exp := int64(C.issue5603exp) x[0] = int64(C.issue5603foo0()) x[1] = int64(C.issue5603foo1(nil)) x[2] = int64(C.issue5603foo2(nil, nil)) x[3] = int64(C.issue5603foo3(nil, nil, nil)) x[4] = int64(C.issue5603foo4(nil, nil, nil, nil)) for i, v := range x { if v != exp { t.Errorf("issue5603foo%d() returns %v, expected %v", i, v, exp) } } } // issue 5337 func test5337(t *testing.T) { C.test5337() } // issue 5740 func test5740(t *testing.T) { if v := C.test5740a() + C.test5740b(); v != 5 { t.Errorf("expected 5, got %v", v) } } // issue 5986 func test5986(t *testing.T) { C.output5986() } // issue 6128 func test6128() { // nothing to run, just make sure this compiles. _ = C.X } // issue 6390 func test6390(t *testing.T) { p1 := C.malloc(1024) if p1 == nil { t.Fatalf("C.malloc(1024) returned nil") } p2 := C.malloc(0) if p2 == nil { t.Fatalf("C.malloc(0) returned nil") } C.free(p1) C.free(p2) } func test6472() { // nothing to run, just make sure this compiles s := new(C.z) println(s.y[0].x) } // issue 6506 func test6506() { // nothing to run, just make sure this compiles var x C.size_t C.calloc(x, x) C.malloc(x) C.realloc(nil, x) C.memcpy(nil, nil, x) C.memcmp(nil, nil, x) C.memmove(nil, nil, x) C.strncpy(nil, nil, x) C.strncmp(nil, nil, x) C.strncat(nil, nil, x) x = C.strxfrm(nil, nil, x) C.memchr(nil, 0, x) x = C.strcspn(nil, nil) x = C.strspn(nil, nil) C.memset(nil, 0, x) x = C.strlen(nil) _ = x } // issue 6612 func testNaming(t *testing.T) { C.myfunc() C.myfunc_def() if v := C.myvar; v != 5 { t.Errorf("C.myvar = %d, want 5", v) } if v := C.myvar_def; v != 5 { t.Errorf("C.myvar_def = %d, want 5", v) } if s := C.GoString(C.mytext); s != "abcdef" { t.Errorf("C.mytext = %q, want %q", s, "abcdef") } if s := C.GoString(C.mytext_def); s != "abcdef" { t.Errorf("C.mytext_def = %q, want %q", s, "abcdef") } if c := C.myenum; c != 1234 { t.Errorf("C.myenum = %v, want 1234", c) } if c := C.myenum_def; c != 1234 { t.Errorf("C.myenum_def = %v, want 1234", c) } { const c = C.myenum if c != 1234 { t.Errorf("C.myenum as const = %v, want 1234", c) } } { const c = C.myenum_def if c != 1234 { t.Errorf("C.myenum as const = %v, want 1234", c) } } if c := C.myint_def; c != 12345 { t.Errorf("C.myint_def = %v, want 12345", c) } { const c = C.myint_def if c != 12345 { t.Errorf("C.myint as const = %v, want 12345", c) } } if c := C.myfloat_def; c != 1.5 { t.Errorf("C.myint_def = %v, want 1.5", c) } { const c = C.myfloat_def if c != 1.5 { t.Errorf("C.myint as const = %v, want 1.5", c) } } if s := C.mystring_def; s != "hello" { t.Errorf("C.mystring_def = %q, want %q", s, "hello") } } // issue 6907 func test6907(t *testing.T) { want := "yarn" if got := C.GoString(C.Issue6907CopyString(want)); got != want { t.Errorf("C.GoString(C.Issue6907CopyString(%q)) == %q, want %q", want, got, want) } } // issue 7560 func test7560(t *testing.T) { // some mingw don't implement __packed__ correctly. if C.offset7560() != 1 { t.Skip("C compiler did not pack struct") } // C.misaligned should have x but then a padding field to get to the end of the struct. // There should not be a field named 'y'. var v C.misaligned rt := reflect.TypeOf(&v).Elem() if rt.NumField() != 2 || rt.Field(0).Name != "x" || rt.Field(1).Name != "_" { t.Errorf("unexpected fields in C.misaligned:\n") for i := 0; i < rt.NumField(); i++ { t.Logf("%+v\n", rt.Field(i)) } } } // issue 7786 func f() { var x1 *C.typedef_test7786 var x2 *C.struct_test7786 x1 = x2 x2 = x1 C.f7786(x1) C.f7786(x2) C.g7786(x1) C.g7786(x2) var b1 *C.typedef_body7786 var b2 *C.struct_body7786 b1 = b2 b2 = b1 C.b7786(b1) C.b7786(b2) C.c7786(b1) C.c7786(b2) var u1 *C.typedef_union7786 var u2 *C.union_union7786 u1 = u2 u2 = u1 C.u7786(u1) C.u7786(u2) C.v7786(u1) C.v7786(u2) } // issue 8092 func test8092(t *testing.T) { tests := []struct { s string a, b *C.char }{ {"text", &C.text[0], C.ctext()}, {"data", &C.data[0], C.cdata()}, } for _, test := range tests { if test.a != test.b { t.Errorf("%s: pointer mismatch: %v != %v", test.s, test.a, test.b) } if got := C.GoString(test.a); got != test.s { t.Errorf("%s: points at %#v, want %#v", test.s, got, test.s) } } } // issues 8368 and 8441 func issue8368(one *C.struct_one, two *C.struct_two) { } func issue8441(one *C.one, two *C.two) { issue8441(two.x, one.x) } // issue 8428 var _ = C.struct_issue8428one{ b: C.char(0), // The trailing rest field is not available in cgo. // See issue 11925. // rest: [0]C.char{}, } var _ = C.struct_issue8428two{ p: unsafe.Pointer(nil), b: C.char(0), rest: [0]C.char{}, } var _ = C.struct_issue8428three{ w: [1][2][3][0]C.char{}, x: [2][3][0][1]C.char{}, y: [3][0][1][2]C.char{}, z: [0][1][2][3]C.char{}, } // issue 8811 func test8811(t *testing.T) { C.issue8811Execute() } // issue 9557 func test9557(t *testing.T) { // implicitly dereference a Go variable foo := C.issue9557foo if v := foo.a; v != 42 { t.Fatalf("foo.a expected 42, but got %d", v) } // explicitly dereference a C variable if v := (*C.issue9557foo).a; v != 42 { t.Fatalf("(*C.issue9557foo).a expected 42, but is %d", v) } // implicitly dereference a C variable if v := C.issue9557foo.a; v != 42 { t.Fatalf("C.issue9557foo.a expected 42, but is %d", v) } } // issue 8331 part 1 func issue8331a() C.issue8331 { return issue8331Var } // issue 10303 func test10303(t *testing.T, n int) { if runtime.Compiler == "gccgo" { t.Skip("gccgo permits C pointers on the stack") } // Run at a few different stack depths just to avoid an unlucky pass // due to variables ending up on different pages. if n > 0 { test10303(t, n-1) } if t.Failed() { return } var x, y, z, v, si C.int var s C.Struct C.setintstar(&x) C.setintptr(&y) C.setvoidptr(unsafe.Pointer(&v)) s.P = &si C.setstruct(s) if uintptr(unsafe.Pointer(&x))&^0xfff == uintptr(unsafe.Pointer(&z))&^0xfff { t.Error("C int* argument on stack") } if uintptr(unsafe.Pointer(&y))&^0xfff == uintptr(unsafe.Pointer(&z))&^0xfff { t.Error("C intptr argument on stack") } if uintptr(unsafe.Pointer(&v))&^0xfff == uintptr(unsafe.Pointer(&z))&^0xfff { t.Error("C void* argument on stack") } if uintptr(unsafe.Pointer(&si))&^0xfff == uintptr(unsafe.Pointer(&z))&^0xfff { t.Error("C struct field pointer on stack") } } // issue 11925 func test11925(t *testing.T) { if C.sizeof_struct_a11925 != unsafe.Sizeof(C.struct_a11925{}) { t.Errorf("size of a changed: C %d, Go %d", C.sizeof_struct_a11925, unsafe.Sizeof(C.struct_a11925{})) } if C.sizeof_struct_b11925 != unsafe.Sizeof(C.struct_b11925{}) { t.Errorf("size of b changed: C %d, Go %d", C.sizeof_struct_b11925, unsafe.Sizeof(C.struct_b11925{})) } } // issue 12030 func test12030(t *testing.T) { buf := (*C.char)(C.malloc(256)) defer C.free(unsafe.Pointer(buf)) for _, f := range []float64{1.0, 2.0, 3.14} { C.issue12030conv(buf, C.double(f)) got := C.GoString(buf) if want := fmt.Sprintf("d=%g", f); got != want { t.Fatalf("C.sprintf failed for %g: %q != %q", f, got, want) } } } // issue 13402 var _ C.complexfloat var _ C.complexdouble // issue 13930 // Test that cgo's multiple-value special form for // C function calls works in variable declaration statements. var _, _ = C.abs(0) // issue 14838 func test14838(t *testing.T) { data := []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9} cData := C.CBytes(data) defer C.free(cData) if C.check_cbytes((*C.char)(cData), C.size_t(len(data))) == 0 { t.Fatalf("mismatched data: expected %v, got %v", data, (*(*[10]byte)(unsafe.Pointer(cData)))[:]) } } // issue 17065 var sink C.int func test17065(t *testing.T) { if runtime.GOOS == "darwin" { t.Skip("broken on darwin; issue 17065") } for i := range C.ii { sink = C.ii[i] } } // issue 17537 func test17537(t *testing.T) { v := C.S17537{i: 17537} if got, want := C.I17537(&v), C.int(17537); got != want { t.Errorf("got %d, want %d", got, want) } p := (*C.char)(C.malloc(1)) *p = 17 if got, want := C.F17537(&p), C.int(17); got != want { t.Errorf("got %d, want %d", got, want) } C.F18298(nil) var v18298 C.T18298_2 C.G18298(C.T18298_1(v18298)) } // issue 17723 func testAPI() { var cs *C.char cs = C.CString("hello") defer C.free(unsafe.Pointer(cs)) var s string s = C.GoString((*C.char)(C.api_hello)) s = C.GoStringN((*C.char)(C.api_hello), C.int(6)) var b []byte b = C.GoBytes(unsafe.Pointer(C.api_hello), C.int(6)) _, _ = s, b C.cstring_pointer_fun(nil) } // issue 18126 func test18126(t *testing.T) { p := C.malloc(1) _, err := C.Issue18126C(&p) C.free(p) _ = err } // issue 18720 func test18720(t *testing.T) { if got, want := C.HELLO_WORLD, "hello\000world"; got != want { t.Errorf("C.HELLO_WORLD == %q, expected %q", got, want) } if got, want := C.VAR1, C.int(5); got != want { t.Errorf("C.VAR1 == %v, expected %v", got, want) } if got, want := *C.ADDR, C.int(5); got != want { t.Errorf("*C.ADDR == %v, expected %v", got, want) } if got, want := C.CALL, C.int(6); got != want { t.Errorf("C.CALL == %v, expected %v", got, want) } if got, want := C.CALL, C.int(7); got != want { t.Errorf("C.CALL == %v, expected %v", got, want) } // Issue 20125. if got, want := C.SIZE_OF_FOO, 1; got != want { t.Errorf("C.SIZE_OF_FOO == %v, expected %v", got, want) } } // issue 20129 func test20129(t *testing.T) { if C.issue20129 != 0 { t.Fatal("test is broken") } C.issue20129Foo() if C.issue20129 != 1 { t.Errorf("got %v but expected %v", C.issue20129, 1) } C.issue20129Bar() if C.issue20129 != 2 { t.Errorf("got %v but expected %v", C.issue20129, 2) } } // issue 20369 func test20369(t *testing.T) { if C.XUINT64_MAX != math.MaxUint64 { t.Fatalf("got %v, want %v", uint64(C.XUINT64_MAX), uint64(math.MaxUint64)) } } // issue 21668 var issue21668_X = C.x21668 // issue 21708 func test21708(t *testing.T) { if got, want := C.CAST_TO_INT64, -1; got != want { t.Errorf("C.CAST_TO_INT64 == %v, expected %v", got, want) } } // issue 21809 func test21809(t *testing.T) { longVar := C.long(3) typedefVar := C.MySigned_t(4) typedefTypedefVar := C.MySigned2_t(5) // all three should be considered identical to `long` if ret := C.takes_long(longVar); ret != 9 { t.Errorf("got %v but expected %v", ret, 9) } if ret := C.takes_long(typedefVar); ret != 16 { t.Errorf("got %v but expected %v", ret, 16) } if ret := C.takes_long(typedefTypedefVar); ret != 25 { t.Errorf("got %v but expected %v", ret, 25) } // They should also be identical to the typedef'd type if ret := C.takes_typedef(longVar); ret != 9 { t.Errorf("got %v but expected %v", ret, 9) } if ret := C.takes_typedef(typedefVar); ret != 16 { t.Errorf("got %v but expected %v", ret, 16) } if ret := C.takes_typedef(typedefTypedefVar); ret != 25 { t.Errorf("got %v but expected %v", ret, 25) } } // issue 22906 func test22906(t *testing.T) { var x1 C.jobject = 0 // Note: 0, not nil. That makes sure we use uintptr for these types. _ = x1 var x2 C.jclass = 0 _ = x2 var x3 C.jthrowable = 0 _ = x3 var x4 C.jstring = 0 _ = x4 var x5 C.jarray = 0 _ = x5 var x6 C.jbooleanArray = 0 _ = x6 var x7 C.jbyteArray = 0 _ = x7 var x8 C.jcharArray = 0 _ = x8 var x9 C.jshortArray = 0 _ = x9 var x10 C.jintArray = 0 _ = x10 var x11 C.jlongArray = 0 _ = x11 var x12 C.jfloatArray = 0 _ = x12 var x13 C.jdoubleArray = 0 _ = x13 var x14 C.jobjectArray = 0 _ = x14 var x15 C.jweak = 0 _ = x15 } // issue 22958 // Nothing to run, just make sure this compiles. var Vissue22958 C.issue22958Type func test23356(t *testing.T) { if got, want := C.a(), C.int(5); got != want { t.Errorf("C.a() == %v, expected %v", got, want) } if got, want := C.r(), C.int(3); got != want { t.Errorf("C.r() == %v, expected %v", got, want) } } // issue 23720 func Issue23720F() { var x C.issue23720A C.issue23720F(x) } // issue 24206 func test24206(t *testing.T) { if runtime.GOOS != "linux" || runtime.GOARCH != "amd64" { t.Skipf("skipping on %s/%s", runtime.GOOS, runtime.GOARCH) } if l := len(C.GoString(C.dangerousString1())); l != 123 { t.Errorf("Incorrect string length - got %d, want 123", l) } if l := len(C.GoString(C.dangerousString2())); l != 4096+123 { t.Errorf("Incorrect string length - got %d, want %d", l, 4096+123) } } // issue 25143 func issue25143sum(ns ...C.int) C.int { total := C.int(0) for _, n := range ns { total += n } return total } func test25143(t *testing.T) { if got, want := issue25143sum(1, 2, 3), C.int(6); got != want { t.Errorf("issue25143sum(1, 2, 3) == %v, expected %v", got, want) } } // issue 26066 // Wrong type of constant with GCC 8 and newer. func test26066(t *testing.T) { var i = int64(C.issue26066) if i != -1 { t.Errorf("got %d, want -1", i) } } // issue 26517 var a C.TypeOne var b C.TypeTwo // issue 27660 // Stress the interaction between the race detector and cgo in an // attempt to reproduce the memory corruption described in #27660. // The bug was very timing sensitive; at the time of writing this // test would only trigger the bug about once out of every five runs. func test27660(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() ints := make([]int, 100) locks := make([]sync.Mutex, 100) // Slowly create threads so that ThreadSanitizer is forced to // frequently resize its SyncClocks. for i := 0; i < 100; i++ { go func() { for ctx.Err() == nil { // Sleep in C for long enough that it is likely that the runtime // will retake this goroutine's currently wired P. C.usleep(1000 /* 1ms */) runtime.Gosched() // avoid starvation (see #28701) } }() go func() { // Trigger lots of synchronization and memory reads/writes to // increase the likelihood that the race described in #27660 // results in corruption of ThreadSanitizer's internal state // and thus an assertion failure or segfault. for ctx.Err() == nil { j := rand.Intn(100) locks[j].Lock() ints[j]++ locks[j].Unlock() } }() time.Sleep(time.Millisecond) } } // issue 28540 func twoargsF() { v := []string{} C.twoargs1(C.twoargs2(), C.twoargs3(unsafe.Pointer(&v))) } // issue 28545 func issue28545G(p **C.char) { C.issue28545F(p, -1, (0)) C.issue28545F(p, 2+3, complex(1, 1)) C.issue28545F(p, issue28772Constant, issue28772Constant2) } // issue 28772 part 1 - part 2 in testx.go const issue28772Constant = C.issue28772Constant // issue 28896 func offset(i int) uintptr { var pi C.innerPacked var po C.outerPacked var ui C.innerUnpacked var uo C.outerUnpacked switch i { case 0: return unsafe.Offsetof(pi.f2) case 1: return unsafe.Offsetof(po.g2) case 2: return unsafe.Offsetof(ui.f2) case 3: return unsafe.Offsetof(uo.g2) default: panic("can't happen") } } func test28896(t *testing.T) { for i := 0; i < 4; i++ { c := uintptr(C.offset(C.int(i))) g := offset(i) if c != g { t.Errorf("%d: C: %d != Go %d", i, c, g) } } } // issue 29383 // cgo's /*line*/ comments failed when inserted after '/', // because the result looked like a "//" comment. // No runtime test; just make sure it compiles. func Issue29383(n, size uint) int { if ^C.size_t(0)/C.size_t(n) < C.size_t(size) { return 0 } return 0 } // issue 29748 // Error handling a struct initializer that requires pointer checking. // Compilation test only, nothing to run. var Vissue29748 = C.f29748(&C.S29748{ nil, }) func Fissue299748() { C.f29748(&C.S29748{ nil, }) } // issue 29781 var issue29781X struct{ X int } func issue29781F(...int) int { return 0 } func issue29781G() { var p *C.char C.issue29781F(&p, C.ISSUE29781C+1) C.issue29781F(nil, (C.int)( 0)) C.issue29781F(&p, (C.int)(0)) C.issue29781F(&p, (C.int)( 0)) C.issue29781F(&p, (C.int)(issue29781X. X)) } // issue 30065 func test30065(t *testing.T) { var a [256]byte b := []byte("a") C.memcpy(unsafe.Pointer(&a), unsafe.Pointer(&b[0]), 1) if a[0] != 'a' { t.Errorf("&a failed: got %c, want %c", a[0], 'a') } b = []byte("b") C.memcpy(unsafe.Pointer(&a[0]), unsafe.Pointer(&b[0]), 1) if a[0] != 'b' { t.Errorf("&a[0] failed: got %c, want %c", a[0], 'b') } d := make([]byte, 256) b = []byte("c") C.memcpy(unsafe.Pointer(&d[0]), unsafe.Pointer(&b[0]), 1) if d[0] != 'c' { t.Errorf("&d[0] failed: got %c, want %c", d[0], 'c') } } // issue 31093 // No runtime test; just make sure it compiles. func Issue31093() { C.issue31093F(C.ushort(0)) } // issue 32579 func test32579(t *testing.T) { var s [1]C.struct_S32579 C.memset(unsafe.Pointer(&s[0].data[0]), 1, 1) if s[0].data[0] != 1 { t.Errorf("&s[0].data[0] failed: got %d, want %d", s[0].data[0], 1) } }
{ l := Atol("123") if l != 123 { t.Error("Atol 123: ", l) } }
resource.go
package retryresource import ( "context" "fmt" "time" "github.com/cenk/backoff" "github.com/giantswarm/microerror" "github.com/giantswarm/micrologger" "github.com/giantswarm/operatorkit/framework" ) const ( // Name is the identifier of the resource. Name = "retry" ) // Config represents the configuration used to create a new retry resource. type Config struct { // Dependencies. BackOff backoff.BackOff Logger micrologger.Logger Resource framework.Resource } // DefaultConfig provides a default configuration to create a new retry resource // by best effort. func DefaultConfig() Config { var err error var newLogger micrologger.Logger { config := micrologger.DefaultConfig() newLogger, err = micrologger.New(config) if err != nil { panic(err) } } return Config{ // Dependencies. BackOff: backoff.NewExponentialBackOff(), Logger: newLogger, Resource: nil, } } // New creates a new configured retry resource. func New(config Config) (*Resource, error) { // Dependencies. if config.BackOff == nil { return nil, microerror.Maskf(invalidConfigError, "config.BackOff must not be empty") } if config.Logger == nil { return nil, microerror.Maskf(invalidConfigError, "config.Logger must not be empty") } if config.Resource == nil { return nil, microerror.Maskf(invalidConfigError, "config.Resource must not be empty") } newResource := &Resource{ // Dependencies. backOff: config.BackOff, logger: config.Logger.With( "underlyingResource", config.Resource.Underlying().Name(), ), resource: config.Resource, } return newResource, nil } type Resource struct { // Dependencies. backOff backoff.BackOff logger micrologger.Logger resource framework.Resource } func (r *Resource) GetCurrentState(ctx context.Context, obj interface{}) (interface{}, error) { var err error var v interface{} o := func() error { v, err = r.resource.GetCurrentState(ctx, obj) if err != nil { return microerror.Mask(err) } return nil } n := func(err error, dur time.Duration) { r.logger.Log("warning", fmt.Sprintf("retrying 'GetCurrentState' due to error (%s)", err.Error())) } err = backoff.RetryNotify(o, r.backOff, n) if err != nil { return nil, microerror.Mask(err) } return v, nil } func (r *Resource) GetDesiredState(ctx context.Context, obj interface{}) (interface{}, error) { var err error var v interface{} o := func() error { v, err = r.resource.GetDesiredState(ctx, obj) if err != nil { return microerror.Mask(err) } return nil } n := func(err error, dur time.Duration) { r.logger.Log("warning", fmt.Sprintf("retrying 'GetDesiredState' due to error (%s)", err.Error())) } err = backoff.RetryNotify(o, r.backOff, n) if err != nil { return nil, microerror.Mask(err) } return v, nil } func (r *Resource) NewUpdatePatch(ctx context.Context, obj, currentState, desiredState interface{}) (*framework.Patch, error) { var err error var v *framework.Patch o := func() error { v, err = r.resource.NewUpdatePatch(ctx, obj, currentState, desiredState) if err != nil { return microerror.Mask(err) } return nil } n := func(err error, dur time.Duration) { r.logger.Log("warning", fmt.Sprintf("retrying 'NewUpdatePatch' due to error (%s)", err.Error())) } err = backoff.RetryNotify(o, r.backOff, n) if err != nil { return nil, microerror.Mask(err) } return v, nil } func (r *Resource) NewDeletePatch(ctx context.Context, obj, currentState, desiredState interface{}) (*framework.Patch, error) { var err error var v *framework.Patch o := func() error { v, err = r.resource.NewDeletePatch(ctx, obj, currentState, desiredState) if err != nil { return microerror.Mask(err) } return nil } n := func(err error, dur time.Duration) { r.logger.Log("warning", fmt.Sprintf("retrying 'NewDeletePatch' due to error (%s)", err.Error())) } err = backoff.RetryNotify(o, r.backOff, n) if err != nil { return nil, microerror.Mask(err) } return v, nil } func (r *Resource) Name() string { return Name } func (r *Resource) ApplyCreateChange(ctx context.Context, obj, createState interface{}) error { o := func() error { err := r.resource.ApplyCreateChange(ctx, obj, createState) if err != nil { return microerror.Mask(err) } return nil } n := func(err error, dur time.Duration) { r.logger.Log("warning", fmt.Sprintf("retrying 'ApplyCreatePatch' due to error (%s)", err.Error())) } err := backoff.RetryNotify(o, r.backOff, n) if err != nil { return microerror.Mask(err) } return nil } func (r *Resource) ApplyDeleteChange(ctx context.Context, obj, deleteState interface{}) error { o := func() error { err := r.resource.ApplyDeleteChange(ctx, obj, deleteState) if err != nil { return microerror.Mask(err) } return nil } n := func(err error, dur time.Duration) { r.logger.Log("warning", fmt.Sprintf("retrying 'ApplyDeletePatch' due to error (%s)", err.Error())) } err := backoff.RetryNotify(o, r.backOff, n) if err != nil { return microerror.Mask(err) } return nil } func (r *Resource) ApplyUpdateChange(ctx context.Context, obj, updateState interface{}) error { o := func() error { err := r.resource.ApplyUpdateChange(ctx, obj, updateState) if err != nil { return microerror.Mask(err) } return nil } n := func(err error, dur time.Duration) { r.logger.Log("warning", fmt.Sprintf("retrying 'ApplyUpdatePatch' due to error (%s)", err.Error())) } err := backoff.RetryNotify(o, r.backOff, n) if err != nil
return nil } func (r *Resource) Underlying() framework.Resource { return r.resource.Underlying() }
{ return microerror.Mask(err) }
builtin_cast.rs
// Copyright 2017 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. use std::borrow::Cow; use std::convert::TryInto; use std::{i64, str, u64}; use cop_datatype::prelude::*; use cop_datatype::{self, FieldTypeFlag, FieldTypeTp}; use super::{Error, EvalContext, Result, ScalarFunc}; use coprocessor::codec::convert::{self, convert_float_to_int, convert_float_to_uint}; use coprocessor::codec::mysql::decimal::RoundMode; use coprocessor::codec::mysql::{charset, Decimal, Duration, Json, Res, Time, TimeType}; use coprocessor::codec::{mysql, Datum}; impl ScalarFunc { pub fn cast_int_as_int(&self, ctx: &mut EvalContext, row: &[Datum]) -> Result<Option<i64>> { self.children[0].eval_int(ctx, row) } pub fn cast_real_as_int(&self, ctx: &mut EvalContext, row: &[Datum]) -> Result<Option<i64>> { let val = try_opt!(self.children[0].eval_real(ctx, row)); if self.field_type.flag().contains(FieldTypeFlag::UNSIGNED) { let uval = convert_float_to_uint(val, u64::MAX, FieldTypeTp::Double)?; Ok(Some(uval as i64)) } else { let res = convert_float_to_int(val, i64::MIN, i64::MAX, FieldTypeTp::Double)?; Ok(Some(res)) } } pub fn cast_decimal_as_int(&self, ctx: &mut EvalContext, row: &[Datum]) -> Result<Option<i64>> { let val = try_opt!(self.children[0].eval_decimal(ctx, row)); let val = val.into_owned().round(0, RoundMode::HalfEven).unwrap(); let (overflow, res) = if self.field_type.flag().contains(FieldTypeFlag::UNSIGNED) { let uint = val.as_u64(); (uint.is_overflow(), uint.unwrap() as i64) } else { let val = val.as_i64(); (val.is_overflow(), val.unwrap()) }; if overflow { if !ctx.cfg.overflow_as_warning { return Err(Error::overflow("CastDecimalAsInt", &format!("{}", val))); } ctx.warnings .append_warning(Error::truncated_wrong_val("DECIMAL", &format!("{}", val))); } Ok(Some(res)) } pub fn cast_str_as_int(&self, ctx: &mut EvalContext, row: &[Datum]) -> Result<Option<i64>> { if self.children[0].field_type().is_hybrid() { return self.children[0].eval_int(ctx, row); } let val = try_opt!(self.children[0].eval_string(ctx, row)); let is_negative = match val.iter().skip_while(|x| x.is_ascii_whitespace()).next() { Some(&b'-') => true, _ => false, }; let res = if is_negative { convert::bytes_to_int(ctx, &val).map(|v| { ctx.warnings .append_warning(Error::cast_neg_int_as_unsigned()); v }) } else { convert::bytes_to_uint(ctx, &val).map(|urs| { if !self.field_type.flag().contains(FieldTypeFlag::UNSIGNED) && urs > (i64::MAX as u64) { ctx.warnings .append_warning(Error::cast_as_signed_overflow()); } urs as i64 }) }; match res { Ok(v) => Ok(Some(v)), Err(e) => if e.is_overflow() { ctx.overflow_from_cast_str_as_int(&val, e, is_negative) .map(Some) } else { Err(e) }, } } pub fn cast_time_as_int(&self, ctx: &mut EvalContext, row: &[Datum]) -> Result<Option<i64>> { let val = try_opt!(self.children[0].eval_time(ctx, row)); let dec = val.to_decimal()?; let dec = dec .round(mysql::DEFAULT_FSP as i8, RoundMode::HalfEven) .unwrap(); let res = dec.as_i64().unwrap(); Ok(Some(res)) } pub fn cast_duration_as_int( &self, ctx: &mut EvalContext, row: &[Datum], ) -> Result<Option<i64>> { let val = try_opt!(self.children[0].eval_duration(ctx, row)); let dec = val.to_decimal()?; let dec = dec .round(mysql::DEFAULT_FSP as i8, RoundMode::HalfEven) .unwrap(); let res = dec.as_i64().unwrap(); Ok(Some(res)) } pub fn cast_json_as_int(&self, ctx: &mut EvalContext, row: &[Datum]) -> Result<Option<i64>> { let val = try_opt!(self.children[0].eval_json(ctx, row)); let res = val.cast_to_int(); Ok(Some(res)) } pub fn cast_int_as_real(&self, ctx: &mut EvalContext, row: &[Datum]) -> Result<Option<f64>> { let val = try_opt!(self.children[0].eval_int(ctx, row)); if !self.children[0] .field_type() .flag() .contains(FieldTypeFlag::UNSIGNED) { Ok(Some(self.produce_float_with_specified_tp(ctx, val as f64)?)) } else { let uval = val as u64; Ok(Some( self.produce_float_with_specified_tp(ctx, uval as f64)?, )) } } pub fn cast_real_as_real(&self, ctx: &mut EvalContext, row: &[Datum]) -> Result<Option<f64>> { let val = try_opt!(self.children[0].eval_real(ctx, row)); Ok(Some(self.produce_float_with_specified_tp(ctx, val)?)) } pub fn cast_decimal_as_real( &self, ctx: &mut EvalContext, row: &[Datum], ) -> Result<Option<f64>> { let val = try_opt!(self.children[0].eval_decimal(ctx, row)); let res = val.as_f64()?; Ok(Some(self.produce_float_with_specified_tp(ctx, res)?)) } pub fn cast_str_as_real(&self, ctx: &mut EvalContext, row: &[Datum]) -> Result<Option<f64>> { if self.children[0].field_type().is_hybrid() { return self.children[0].eval_real(ctx, row); } let val = try_opt!(self.children[0].eval_string(ctx, row)); let res = convert::bytes_to_f64(ctx, &val)?; Ok(Some(self.produce_float_with_specified_tp(ctx, res)?)) } pub fn cast_time_as_real(&self, ctx: &mut EvalContext, row: &[Datum]) -> Result<Option<f64>> { let val = try_opt!(self.children[0].eval_time(ctx, row)); let val = val.to_decimal()?; let res = val.as_f64()?; Ok(Some(self.produce_float_with_specified_tp(ctx, res)?)) } pub fn cast_duration_as_real( &self, ctx: &mut EvalContext, row: &[Datum], ) -> Result<Option<f64>> { let val = try_opt!(self.children[0].eval_duration(ctx, row)); let val = val.to_decimal()?; let res = val.as_f64()?; Ok(Some(self.produce_float_with_specified_tp(ctx, res)?)) } pub fn cast_json_as_real(&self, ctx: &mut EvalContext, row: &[Datum]) -> Result<Option<f64>> { let val = try_opt!(self.children[0].eval_json(ctx, row)); let val = val.cast_to_real(ctx)?; Ok(Some(self.produce_float_with_specified_tp(ctx, val)?)) } pub fn cast_int_as_decimal<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &[Datum], ) -> Result<Option<Cow<'a, Decimal>>> { let val = try_opt!(self.children[0].eval_int(ctx, row)); let field_type = &self.children[0].field_type(); let res = if !field_type.flag().contains(FieldTypeFlag::UNSIGNED) { Cow::Owned(Decimal::from(val)) } else { let uval = val as u64; Cow::Owned(Decimal::from(uval)) }; self.produce_dec_with_specified_tp(ctx, res).map(Some) } pub fn cast_real_as_decimal<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Decimal>>> { let val = try_opt!(self.children[0].eval_real(ctx, row)); let res = Decimal::from_f64(val)?; self.produce_dec_with_specified_tp(ctx, Cow::Owned(res)) .map(Some) } pub fn cast_decimal_as_decimal<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Decimal>>> { let val = try_opt!(self.children[0].eval_decimal(ctx, row)); self.produce_dec_with_specified_tp(ctx, val).map(Some) } pub fn cast_str_as_decimal<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Decimal>>> { let dec = if self.children[0].field_type().is_hybrid() { try_opt!(self.children[0].eval_decimal(ctx, row)) } else { let val = try_opt!(self.children[0].eval_string(ctx, row)); match Decimal::from_bytes(&val)? { Res::Ok(d) => Cow::Owned(d), Res::Truncated(d) | Res::Overflow(d) => { ctx.handle_truncate(true)?; Cow::Owned(d) } } }; self.produce_dec_with_specified_tp(ctx, dec).map(Some) } pub fn cast_time_as_decimal<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Decimal>>> { let val = try_opt!(self.children[0].eval_time(ctx, row)); let dec = val.to_decimal()?; self.produce_dec_with_specified_tp(ctx, Cow::Owned(dec)) .map(Some) } pub fn cast_duration_as_decimal<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Decimal>>> { let val = try_opt!(self.children[0].eval_duration(ctx, row)); let dec = val.to_decimal()?; self.produce_dec_with_specified_tp(ctx, Cow::Owned(dec)) .map(Some) }
pub fn cast_json_as_decimal<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Decimal>>> { let val = try_opt!(self.children[0].eval_json(ctx, row)); let val = val.cast_to_real(ctx)?; let dec = Decimal::from_f64(val)?; self.produce_dec_with_specified_tp(ctx, Cow::Owned(dec)) .map(Some) } pub fn cast_int_as_str<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, [u8]>>> { let val = try_opt!(self.children[0].eval_int(ctx, row)); let s = if self.children[0] .field_type() .flag() .contains(FieldTypeFlag::UNSIGNED) { let uval = val as u64; format!("{}", uval) } else { format!("{}", val) }; self.produce_str_with_specified_tp(ctx, Cow::Owned(s.into_bytes())) .map(Some) } pub fn cast_real_as_str<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, [u8]>>> { let val = try_opt!(self.children[0].eval_real(ctx, row)); let s = format!("{}", val); self.produce_str_with_specified_tp(ctx, Cow::Owned(s.into_bytes())) .map(Some) } pub fn cast_decimal_as_str<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, [u8]>>> { let val = try_opt!(self.children[0].eval_decimal(ctx, row)); let s = val.to_string(); self.produce_str_with_specified_tp(ctx, Cow::Owned(s.into_bytes())) .map(Some) } pub fn cast_str_as_str<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, [u8]>>> { let val = try_opt!(self.children[0].eval_string(ctx, row)); self.produce_str_with_specified_tp(ctx, val).map(Some) } pub fn cast_time_as_str<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, [u8]>>> { let val = try_opt!(self.children[0].eval_time(ctx, row)); let s = format!("{}", val); self.produce_str_with_specified_tp(ctx, Cow::Owned(s.into_bytes())) .map(Some) } pub fn cast_duration_as_str<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, [u8]>>> { let val = try_opt!(self.children[0].eval_duration(ctx, row)); let s = format!("{}", val); self.produce_str_with_specified_tp(ctx, Cow::Owned(s.into_bytes())) .map(Some) } pub fn cast_json_as_str<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, [u8]>>> { let val = try_opt!(self.children[0].eval_json(ctx, row)); let s = val.to_string(); self.produce_str_with_specified_tp(ctx, Cow::Owned(s.into_bytes())) .map(Some) } pub fn cast_int_as_time<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Time>>> { let val = try_opt!(self.children[0].eval_int(ctx, row)); let s = format!("{}", val); Ok(Some(self.produce_time_with_str(ctx, &s)?)) } pub fn cast_real_as_time<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Time>>> { let val = try_opt!(self.children[0].eval_real(ctx, row)); let s = format!("{}", val); Ok(Some(self.produce_time_with_str(ctx, &s)?)) } pub fn cast_decimal_as_time<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Time>>> { let val = try_opt!(self.children[0].eval_decimal(ctx, row)); let s = val.to_string(); Ok(Some(self.produce_time_with_float_str(ctx, &s)?)) } pub fn cast_str_as_time<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Time>>> { let val = try_opt!(self.children[0].eval_string_and_decode(ctx, row)); Ok(Some(self.produce_time_with_str(ctx, &val)?)) } pub fn cast_time_as_time<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Time>>> { let val = try_opt!(self.children[0].eval_time(ctx, row)); let mut val = val.into_owned(); val.round_frac(self.field_type.decimal() as i8)?; // TODO: tidb only update tp when tp is Date val.set_time_type(self.field_type.tp().try_into()?)?; Ok(Some(Cow::Owned(val))) } pub fn cast_duration_as_time<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Time>>> { let val = try_opt!(self.children[0].eval_duration(ctx, row)); let mut val = Time::from_duration(ctx.cfg.tz, self.field_type.tp().try_into()?, val.as_ref())?; val.round_frac(self.field_type.decimal() as i8)?; Ok(Some(Cow::Owned(val))) } pub fn cast_json_as_time<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Time>>> { let val = try_opt!(self.children[0].eval_json(ctx, row)); let s = val.unquote()?; Ok(Some(self.produce_time_with_str(ctx, &s)?)) } pub fn cast_int_as_duration<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Duration>>> { let val = try_opt!(self.children[0].eval_int(ctx, row)); let s = format!("{}", val); // TODO: port NumberToDuration from tidb. match Duration::parse(s.as_bytes(), self.field_type.decimal() as i8) { Ok(dur) => Ok(Some(Cow::Owned(dur))), Err(e) => if e.is_overflow() { ctx.handle_overflow(e)?; Ok(None) } else { Err(e) }, } } pub fn cast_real_as_duration<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Duration>>> { let val = try_opt!(self.children[0].eval_real(ctx, row)); let s = format!("{}", val); let dur = Duration::parse(s.as_bytes(), self.field_type.decimal() as i8)?; Ok(Some(Cow::Owned(dur))) } pub fn cast_decimal_as_duration<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Duration>>> { let val = try_opt!(self.children[0].eval_decimal(ctx, row)); let s = val.to_string(); let dur = Duration::parse(s.as_bytes(), self.field_type.decimal() as i8)?; Ok(Some(Cow::Owned(dur))) } pub fn cast_str_as_duration<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Duration>>> { let val = try_opt!(self.children[0].eval_string(ctx, row)); let dur = Duration::parse(val.as_ref(), self.field_type.decimal() as i8)?; Ok(Some(Cow::Owned(dur))) } pub fn cast_time_as_duration<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Duration>>> { let val = try_opt!(self.children[0].eval_time(ctx, row)); let mut res = val.to_duration()?; res.round_frac(self.field_type.decimal() as i8)?; Ok(Some(Cow::Owned(res))) } pub fn cast_duration_as_duration<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Duration>>> { let val = try_opt!(self.children[0].eval_duration(ctx, row)); let mut res = val.into_owned(); res.round_frac(self.field_type.decimal() as i8)?; Ok(Some(Cow::Owned(res))) } pub fn cast_json_as_duration<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Duration>>> { let val = try_opt!(self.children[0].eval_json(ctx, row)); let s = val.unquote()?; // TODO: tidb would handle truncate here let d = Duration::parse(s.as_bytes(), self.field_type.decimal() as i8)?; Ok(Some(Cow::Owned(d))) } pub fn cast_int_as_json<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { let val = try_opt!(self.children[0].eval_int(ctx, row)); let flag = self.children[0].field_type().flag(); let j = if flag.contains(FieldTypeFlag::IS_BOOLEAN) { Json::Boolean(val != 0) } else if flag.contains(FieldTypeFlag::UNSIGNED) { Json::U64(val as u64) } else { Json::I64(val) }; Ok(Some(Cow::Owned(j))) } pub fn cast_real_as_json<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { let val = try_opt!(self.children[0].eval_real(ctx, row)); let j = Json::Double(val); Ok(Some(Cow::Owned(j))) } pub fn cast_decimal_as_json<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { let val = try_opt!(self.children[0].eval_decimal(ctx, row)); let val = val.as_f64()?; let j = Json::Double(val); Ok(Some(Cow::Owned(j))) } pub fn cast_str_as_json<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { let val = try_opt!(self.children[0].eval_string_and_decode(ctx, row)); if self .field_type .flag() .contains(FieldTypeFlag::PARSE_TO_JSON) { let j: Json = val.parse()?; Ok(Some(Cow::Owned(j))) } else { Ok(Some(Cow::Owned(Json::String(val.into_owned())))) } } pub fn cast_time_as_json<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { let val = try_opt!(self.children[0].eval_time(ctx, row)); let mut val = val.into_owned(); if val.get_time_type() == TimeType::DateTime || val.get_time_type() == TimeType::Timestamp { val.set_fsp(mysql::MAX_FSP as u8); } let s = format!("{}", val); Ok(Some(Cow::Owned(Json::String(s)))) } pub fn cast_duration_as_json<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { let val = try_opt!(self.children[0].eval_duration(ctx, row)); let mut val = val.into_owned(); val.fsp = mysql::MAX_FSP as u8; let s = format!("{}", val); Ok(Some(Cow::Owned(Json::String(s)))) } pub fn cast_json_as_json<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, row: &'a [Datum], ) -> Result<Option<Cow<'a, Json>>> { self.children[0].eval_json(ctx, row) } fn produce_dec_with_specified_tp<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, val: Cow<'a, Decimal>, ) -> Result<Cow<'a, Decimal>> { let flen = self.field_type.flen(); let decimal = self.field_type.decimal(); if flen == cop_datatype::UNSPECIFIED_LENGTH || decimal == cop_datatype::UNSPECIFIED_LENGTH { return Ok(val); } let res = val.into_owned().convert_to(ctx, flen as u8, decimal as u8)?; Ok(Cow::Owned(res)) } /// `produce_str_with_specified_tp`(`ProduceStrWithSpecifiedTp` in tidb) produces /// a new string according to `flen` and `chs`. fn produce_str_with_specified_tp<'a, 'b: 'a>( &'b self, ctx: &mut EvalContext, s: Cow<'a, [u8]>, ) -> Result<Cow<'a, [u8]>> { let flen = self.field_type.flen(); let chs = self.field_type.get_charset(); if flen < 0 { return Ok(s); } let flen = flen as usize; // flen is the char length, not byte length, for UTF8 charset, we need to calculate the // char count and truncate to flen chars if it is too long. if chs == charset::CHARSET_UTF8 || chs == charset::CHARSET_UTF8MB4 { let truncate_info = { let s = str::from_utf8(s.as_ref())?; let mut indices = s.char_indices().skip(flen); if let Some((truncate_pos, _)) = indices.next() { let char_count = flen + 1 + indices.count(); Some((char_count, truncate_pos)) } else { None } }; if truncate_info.is_none() { return Ok(s); } let (char_count, truncate_pos) = truncate_info.unwrap(); ctx.handle_truncate_err(Error::data_too_long(format!( "Data Too Long, field len {}, data len {}", flen, char_count )))?; let mut res = s.into_owned(); convert::truncate_binary(&mut res, truncate_pos as isize); return Ok(Cow::Owned(res)); } if s.len() > flen { ctx.handle_truncate_err(Error::data_too_long(format!( "Data Too Long, field len {}, data len {}", flen, s.len() )))?; let mut res = s.into_owned(); convert::truncate_binary(&mut res, flen as isize); return Ok(Cow::Owned(res)); } if self.field_type.tp() == FieldTypeTp::String && s.len() < flen { let mut s = s.into_owned(); s.resize(flen, 0); return Ok(Cow::Owned(s)); } Ok(s) } fn produce_time_with_str(&self, ctx: &mut EvalContext, s: &str) -> Result<Cow<Time>> { let mut t = Time::parse_datetime(s, self.field_type.decimal() as i8, ctx.cfg.tz)?; t.set_time_type(self.field_type.tp().try_into()?)?; Ok(Cow::Owned(t)) } fn produce_time_with_float_str(&self, ctx: &mut EvalContext, s: &str) -> Result<Cow<Time>> { let mut t = Time::parse_datetime_from_float_string(s, self.field_type.decimal() as i8, ctx.cfg.tz)?; t.set_time_type(self.field_type.tp().try_into()?)?; Ok(Cow::Owned(t)) } /// `produce_float_with_specified_tp`(`ProduceFloatWithSpecifiedTp` in tidb) produces /// a new float64 according to `flen` and `decimal` in `self.tp`. /// TODO port tests from tidb(tidb haven't implemented now) fn produce_float_with_specified_tp(&self, ctx: &mut EvalContext, f: f64) -> Result<f64> { let flen = self.field_type.flen(); let decimal = self.field_type.decimal(); if flen == cop_datatype::UNSPECIFIED_LENGTH || decimal == cop_datatype::UNSPECIFIED_LENGTH { return Ok(f); } match convert::truncate_f64(f, flen as u8, decimal as u8) { Res::Ok(d) => Ok(d), Res::Overflow(d) | Res::Truncated(d) => { //TODO process warning with ctx ctx.handle_truncate(true)?; Ok(d) } } } } #[cfg(test)] mod tests { use std::str::FromStr; use std::sync::Arc; use std::{i64, u64}; use cop_datatype::{self, FieldTypeAccessor, FieldTypeFlag, FieldTypeTp}; use tipb::expression::{Expr, FieldType, ScalarFuncSig}; use chrono::Utc; use coprocessor::codec::error::*; use coprocessor::codec::mysql::{self, charset, Decimal, Duration, Json, Time, TimeType, Tz}; use coprocessor::codec::Datum; use coprocessor::dag::expr::ctx::FLAG_OVERFLOW_AS_WARNING; use coprocessor::dag::expr::tests::{col_expr as base_col_expr, scalar_func_expr}; use coprocessor::dag::expr::{EvalConfig, EvalContext, Expression}; pub fn col_expr(col_id: i64, tp: FieldTypeTp) -> Expr { let mut expr = base_col_expr(col_id); let mut fp = FieldType::new(); fp.as_mut_accessor().set_tp(tp); if tp == FieldTypeTp::String { fp.set_charset(charset::CHARSET_UTF8.to_owned()); } expr.set_field_type(fp); expr } #[test] fn test_cast_as_int() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let t = Time::parse_utc_datetime("2012-12-12 12:00:23", 0).unwrap(); #[cfg_attr(feature = "cargo-clippy", allow(inconsistent_digit_grouping))] let time_int = 2012_12_12_12_00_23i64; let duration_t = Duration::parse(b"12:00:23", 0).unwrap(); let cases = vec![ ( ScalarFuncSig::CastIntAsInt, FieldTypeTp::LongLong, Some(FieldTypeFlag::UNSIGNED), vec![Datum::U64(1)], 1, ), ( ScalarFuncSig::CastIntAsInt, FieldTypeTp::LongLong, None, vec![Datum::I64(-1)], -1, ), ( ScalarFuncSig::CastStringAsInt, FieldTypeTp::String, None, vec![Datum::Bytes(b"1".to_vec())], 1, ), ( ScalarFuncSig::CastRealAsInt, FieldTypeTp::Double, None, vec![Datum::F64(1f64)], 1, ), ( ScalarFuncSig::CastRealAsInt, FieldTypeTp::Double, None, vec![Datum::F64(1234.000)], 1234, ), ( ScalarFuncSig::CastTimeAsInt, FieldTypeTp::DateTime, None, vec![Datum::Time(t)], time_int, ), ( ScalarFuncSig::CastDurationAsInt, FieldTypeTp::Duration, None, vec![Datum::Dur(duration_t)], 120023, ), ( ScalarFuncSig::CastJsonAsInt, FieldTypeTp::JSON, None, vec![Datum::Json(Json::I64(-1))], -1, ), ( ScalarFuncSig::CastJsonAsInt, FieldTypeTp::JSON, None, vec![Datum::Json(Json::U64(1))], 1, ), ( ScalarFuncSig::CastDecimalAsInt, FieldTypeTp::NewDecimal, None, vec![Datum::Dec(Decimal::from(1))], 1, ), ]; let null_cols = vec![Datum::Null]; for (sig, tp, flag, col, expect) in cases { let col_expr = col_expr(0, tp); let mut exp = scalar_func_expr(sig, &[col_expr]); if flag.is_some() { exp.mut_field_type() .as_mut_accessor() .set_flag(flag.unwrap()); } let e = Expression::build(&ctx, exp).unwrap(); let res = e.eval_int(&mut ctx, &col).unwrap(); assert_eq!(res.unwrap(), expect); // test None let res = e.eval_int(&mut ctx, &null_cols).unwrap(); assert!(res.is_none()); } let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flags(FLAG_OVERFLOW_AS_WARNING))); let cases = vec![ ( ScalarFuncSig::CastDecimalAsInt, FieldTypeTp::NewDecimal, vec![Datum::Dec( Decimal::from_str("1111111111111111111111111").unwrap(), )], 9223372036854775807, ), ( ScalarFuncSig::CastDecimalAsInt, FieldTypeTp::NewDecimal, vec![Datum::Dec( Decimal::from_str("-1111111111111111111111111").unwrap(), )], -9223372036854775808, ), ]; for (sig, tp, col, expect) in cases { let col_expr = col_expr(0, tp); let mut exp = scalar_func_expr(sig, &[col_expr]); let e = Expression::build(&ctx, exp).unwrap(); let res = e.eval_int(&mut ctx, &col).unwrap(); assert_eq!(res.unwrap(), expect); } } #[test] fn test_cast_as_real() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let t = Time::parse_utc_datetime("2012-12-12 12:00:23", 0).unwrap(); #[cfg_attr(feature = "cargo-clippy", allow(inconsistent_digit_grouping))] let int_t = 2012_12_12_12_00_23u64; let duration_t = Duration::parse(b"12:00:23", 0).unwrap(); let cases = vec![ ( ScalarFuncSig::CastIntAsReal, FieldTypeTp::LongLong, vec![Datum::I64(1)], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, 1f64, ), ( ScalarFuncSig::CastIntAsReal, FieldTypeTp::LongLong, vec![Datum::I64(1234)], 7, 3, 1234.000, ), ( ScalarFuncSig::CastStringAsReal, FieldTypeTp::String, vec![Datum::Bytes(b"1".to_vec())], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, 1f64, ), ( ScalarFuncSig::CastStringAsReal, FieldTypeTp::String, vec![Datum::Bytes(b"1234".to_vec())], 7, 3, 1234.000, ), ( ScalarFuncSig::CastRealAsReal, FieldTypeTp::Double, vec![Datum::F64(1f64)], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, 1f64, ), ( ScalarFuncSig::CastRealAsReal, FieldTypeTp::Double, vec![Datum::F64(1234.123)], 8, 4, 1234.1230, ), ( ScalarFuncSig::CastTimeAsReal, FieldTypeTp::DateTime, vec![Datum::Time(t.clone())], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, int_t as f64, ), ( ScalarFuncSig::CastTimeAsReal, FieldTypeTp::DateTime, vec![Datum::Time(t)], 15, 1, format!("{}.0", int_t).parse::<f64>().unwrap(), ), ( ScalarFuncSig::CastDurationAsReal, FieldTypeTp::Duration, vec![Datum::Dur(duration_t.clone())], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, 120023f64, ), ( ScalarFuncSig::CastDurationAsReal, FieldTypeTp::Duration, vec![Datum::Dur(duration_t)], 7, 1, 120023.0, ), ( ScalarFuncSig::CastJsonAsReal, FieldTypeTp::JSON, vec![Datum::Json(Json::I64(1))], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, 1f64, ), ( ScalarFuncSig::CastJsonAsReal, FieldTypeTp::JSON, vec![Datum::Json(Json::I64(1))], 2, 1, 1.0, ), ( ScalarFuncSig::CastDecimalAsReal, FieldTypeTp::NewDecimal, vec![Datum::Dec(Decimal::from(1))], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, 1f64, ), ( ScalarFuncSig::CastDecimalAsReal, FieldTypeTp::NewDecimal, vec![Datum::Dec(Decimal::from(1))], 2, 1, 1.0, ), ]; let null_cols = vec![Datum::Null]; for (sig, tp, col, flen, decimal, expect) in cases { let col_expr = col_expr(0, tp); let mut exp = scalar_func_expr(sig, &[col_expr]); exp.mut_field_type() .as_mut_accessor() .set_flen(flen) .set_decimal(decimal); let e = Expression::build(&ctx, exp).unwrap(); let res = e.eval_real(&mut ctx, &col).unwrap(); assert_eq!(format!("{}", res.unwrap()), format!("{}", expect)); // test None let res = e.eval_real(&mut ctx, &null_cols).unwrap(); assert!(res.is_none()); } } #[test] fn test_cast_as_decimal() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let t = Time::parse_utc_datetime("2012-12-12 12:00:23", 0).unwrap(); let int_t = 20121212120023u64; let duration_t = Duration::parse(b"12:00:23", 0).unwrap(); let cases = vec![ ( ScalarFuncSig::CastIntAsDecimal, FieldTypeTp::LongLong, vec![Datum::I64(1)], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, Decimal::from(1), ), ( ScalarFuncSig::CastIntAsDecimal, FieldTypeTp::LongLong, vec![Datum::I64(1234)], 7, 3, Decimal::from_f64(1234.000).unwrap(), ), ( ScalarFuncSig::CastStringAsDecimal, FieldTypeTp::String, vec![Datum::Bytes(b"1".to_vec())], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, Decimal::from(1), ), ( ScalarFuncSig::CastStringAsDecimal, FieldTypeTp::String, vec![Datum::Bytes(b"1234".to_vec())], 7, 3, Decimal::from_f64(1234.000).unwrap(), ), ( ScalarFuncSig::CastRealAsDecimal, FieldTypeTp::Double, vec![Datum::F64(1f64)], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, Decimal::from(1), ), ( ScalarFuncSig::CastRealAsDecimal, FieldTypeTp::Double, vec![Datum::F64(1234.123)], 8, 4, Decimal::from_f64(1234.1230).unwrap(), ), ( ScalarFuncSig::CastTimeAsDecimal, FieldTypeTp::DateTime, vec![Datum::Time(t.clone())], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, Decimal::from(int_t), ), ( ScalarFuncSig::CastTimeAsDecimal, FieldTypeTp::DateTime, vec![Datum::Time(t)], 15, 1, format!("{}.0", int_t).parse::<Decimal>().unwrap(), ), ( ScalarFuncSig::CastDurationAsDecimal, FieldTypeTp::Duration, vec![Datum::Dur(duration_t.clone())], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, Decimal::from(120023), ), ( ScalarFuncSig::CastDurationAsDecimal, FieldTypeTp::Duration, vec![Datum::Dur(duration_t)], 7, 1, Decimal::from_f64(120023.0).unwrap(), ), ( ScalarFuncSig::CastJsonAsDecimal, FieldTypeTp::JSON, vec![Datum::Json(Json::I64(1))], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, Decimal::from(1), ), ( ScalarFuncSig::CastJsonAsDecimal, FieldTypeTp::JSON, vec![Datum::Json(Json::I64(1))], 2, 1, Decimal::from_f64(1.0).unwrap(), ), ( ScalarFuncSig::CastDecimalAsDecimal, FieldTypeTp::NewDecimal, vec![Datum::Dec(Decimal::from(1))], cop_datatype::UNSPECIFIED_LENGTH, cop_datatype::UNSPECIFIED_LENGTH, Decimal::from(1), ), ( ScalarFuncSig::CastDecimalAsDecimal, FieldTypeTp::NewDecimal, vec![Datum::Dec(Decimal::from(1))], 2, 1, Decimal::from_f64(1.0).unwrap(), ), ]; let null_cols = vec![Datum::Null]; for (sig, tp, col, flen, decimal, expect) in cases { let col_expr = col_expr(0, tp); let mut exp = scalar_func_expr(sig, &[col_expr]); exp.mut_field_type() .as_mut_accessor() .set_flen(flen) .set_decimal(decimal); let e = Expression::build(&ctx, exp).unwrap(); let res = e.eval_decimal(&mut ctx, &col).unwrap(); assert_eq!(res.unwrap().into_owned(), expect); // test None let res = e.eval_decimal(&mut ctx, &null_cols).unwrap(); assert!(res.is_none()); } } #[test] fn test_cast_as_str() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let t_str = "2012-12-12 12:00:23"; let t = Time::parse_utc_datetime(t_str, 0).unwrap(); let dur_str = b"12:00:23"; let duration_t = Duration::parse(dur_str, 0).unwrap(); let s = "您好world"; let exp_s = "您好w"; let cases = vec![ ( ScalarFuncSig::CastIntAsString, FieldTypeTp::LongLong, charset::CHARSET_UTF8, None, vec![Datum::I64(1)], cop_datatype::UNSPECIFIED_LENGTH, b"1".to_vec(), ), ( ScalarFuncSig::CastIntAsString, FieldTypeTp::LongLong, charset::CHARSET_UTF8, None, vec![Datum::I64(1234)], 3, b"123".to_vec(), ), ( ScalarFuncSig::CastStringAsString, FieldTypeTp::String, charset::CHARSET_ASCII, Some(FieldTypeTp::String), vec![Datum::Bytes(b"1234".to_vec())], 6, b"1234\0\0".to_vec(), ), ( ScalarFuncSig::CastStringAsString, FieldTypeTp::String, charset::CHARSET_UTF8, None, vec![Datum::Bytes(s.as_bytes().to_vec())], 3, exp_s.as_bytes().to_vec(), ), ( ScalarFuncSig::CastRealAsString, FieldTypeTp::Double, charset::CHARSET_UTF8, None, vec![Datum::F64(1f64)], cop_datatype::UNSPECIFIED_LENGTH, b"1".to_vec(), ), ( ScalarFuncSig::CastRealAsString, FieldTypeTp::Double, charset::CHARSET_UTF8, None, vec![Datum::F64(1234.123)], 3, b"123".to_vec(), ), ( ScalarFuncSig::CastTimeAsString, FieldTypeTp::DateTime, charset::CHARSET_UTF8, None, vec![Datum::Time(t.clone())], cop_datatype::UNSPECIFIED_LENGTH, t_str.as_bytes().to_vec(), ), ( ScalarFuncSig::CastTimeAsString, FieldTypeTp::DateTime, charset::CHARSET_UTF8, None, vec![Datum::Time(t)], 3, t_str[0..3].as_bytes().to_vec(), ), ( ScalarFuncSig::CastDurationAsString, FieldTypeTp::Duration, charset::CHARSET_UTF8, None, vec![Datum::Dur(duration_t.clone())], cop_datatype::UNSPECIFIED_LENGTH, dur_str.to_vec(), ), ( ScalarFuncSig::CastDurationAsString, FieldTypeTp::Duration, charset::CHARSET_UTF8, None, vec![Datum::Dur(duration_t)], 3, dur_str[0..3].to_vec(), ), ( ScalarFuncSig::CastJsonAsString, FieldTypeTp::JSON, charset::CHARSET_UTF8, None, vec![Datum::Json(Json::I64(1))], cop_datatype::UNSPECIFIED_LENGTH, b"1".to_vec(), ), ( ScalarFuncSig::CastJsonAsString, FieldTypeTp::JSON, charset::CHARSET_UTF8, None, vec![Datum::Json(Json::I64(1234))], 2, b"12".to_vec(), ), ( ScalarFuncSig::CastDecimalAsString, FieldTypeTp::NewDecimal, charset::CHARSET_UTF8, None, vec![Datum::Dec(Decimal::from(1))], cop_datatype::UNSPECIFIED_LENGTH, b"1".to_vec(), ), ( ScalarFuncSig::CastDecimalAsString, FieldTypeTp::NewDecimal, charset::CHARSET_UTF8, None, vec![Datum::Dec(Decimal::from(1234))], 2, b"12".to_vec(), ), ]; let null_cols = vec![Datum::Null]; for (sig, tp, charset, to_tp, col, flen, exp) in cases { let col_expr = col_expr(0, tp); let mut ex = scalar_func_expr(sig, &[col_expr]); ex.mut_field_type() .as_mut_accessor() .set_flen(flen) .set_decimal(cop_datatype::UNSPECIFIED_LENGTH); if to_tp.is_some() { ex.mut_field_type().as_mut_accessor().set_tp(to_tp.unwrap()); } ex.mut_field_type().set_charset(String::from(charset)); let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_string(&mut ctx, &col).unwrap(); assert_eq!( res.unwrap().into_owned(), exp, "sig: {:?} with flen {} failed", sig, flen ); // test None let res = e.eval_string(&mut ctx, &null_cols).unwrap(); assert!(res.is_none()); } } #[test] fn test_cast_as_time() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let today = Utc::now(); let t_date_str = format!("{}", today.format("%Y-%m-%d")); let t_time_str = format!("{}", today.format("%Y-%m-%d %H:%M:%S")); let t_time = Time::parse_utc_datetime(t_time_str.as_ref(), 0).unwrap(); let t_date = { let mut date = t_time.clone(); date.set_time_type(TimeType::Date).unwrap(); date }; let t_int = format!("{}", today.format("%Y%m%d%H%M%S")) .parse::<u64>() .unwrap(); let dur_str = "12:00:23"; let duration_t = Duration::parse(dur_str.as_bytes(), 0).unwrap(); let dur_to_time_str = format!("{} 12:00:23", t_date_str); let dur_to_time = Time::parse_utc_datetime(&dur_to_time_str, 0).unwrap(); let mut dur_to_date = dur_to_time.clone(); dur_to_date.set_time_type(TimeType::Date).unwrap(); let json_cols = vec![Datum::Json(Json::String(t_time_str.clone()))]; let int_cols = vec![Datum::U64(t_int)]; let str_cols = vec![Datum::Bytes(t_time_str.as_bytes().to_vec())]; let f64_cols = vec![Datum::F64(t_int as f64)]; let time_cols = vec![Datum::Time(t_time.clone())]; let duration_cols = vec![Datum::Dur(duration_t)]; let dec_cols = vec![Datum::Dec(Decimal::from(t_int))]; let cases = vec![ ( // cast int as time ScalarFuncSig::CastIntAsTime, FieldTypeTp::LongLong, &int_cols, mysql::UNSPECIFIED_FSP, FieldTypeTp::DateTime, &t_time, ), ( // cast int as datetime(6) ScalarFuncSig::CastIntAsTime, FieldTypeTp::LongLong, &int_cols, mysql::MAX_FSP, FieldTypeTp::DateTime, &t_time, ), ( ScalarFuncSig::CastStringAsTime, FieldTypeTp::String, &str_cols, mysql::UNSPECIFIED_FSP, FieldTypeTp::DateTime, &t_time, ), ( // cast string as datetime(6) ScalarFuncSig::CastStringAsTime, FieldTypeTp::String, &str_cols, mysql::MAX_FSP, FieldTypeTp::DateTime, &t_time, ), ( ScalarFuncSig::CastRealAsTime, FieldTypeTp::Double, &f64_cols, mysql::UNSPECIFIED_FSP, FieldTypeTp::DateTime, &t_time, ), ( // cast real as date(0) ScalarFuncSig::CastRealAsTime, FieldTypeTp::Double, &f64_cols, mysql::DEFAULT_FSP, FieldTypeTp::Date, &t_date, ), ( ScalarFuncSig::CastTimeAsTime, FieldTypeTp::DateTime, &time_cols, mysql::UNSPECIFIED_FSP, FieldTypeTp::DateTime, &t_time, ), ( // cast time as date ScalarFuncSig::CastTimeAsTime, FieldTypeTp::DateTime, &time_cols, mysql::DEFAULT_FSP, FieldTypeTp::Date, &t_date, ), ( ScalarFuncSig::CastDurationAsTime, FieldTypeTp::Duration, &duration_cols, mysql::UNSPECIFIED_FSP, FieldTypeTp::DateTime, &dur_to_time, ), ( // cast duration as date ScalarFuncSig::CastDurationAsTime, FieldTypeTp::Duration, &duration_cols, mysql::MAX_FSP, FieldTypeTp::Date, &dur_to_date, ), ( ScalarFuncSig::CastJsonAsTime, FieldTypeTp::JSON, &json_cols, mysql::UNSPECIFIED_FSP, FieldTypeTp::DateTime, &t_time, ), ( ScalarFuncSig::CastJsonAsTime, FieldTypeTp::JSON, &json_cols, mysql::DEFAULT_FSP, FieldTypeTp::Date, &t_date, ), ( ScalarFuncSig::CastDecimalAsTime, FieldTypeTp::NewDecimal, &dec_cols, mysql::UNSPECIFIED_FSP, FieldTypeTp::DateTime, &t_time, ), ( // cast decimal as date ScalarFuncSig::CastDecimalAsTime, FieldTypeTp::NewDecimal, &dec_cols, mysql::DEFAULT_FSP, FieldTypeTp::Date, &t_date, ), ]; let null_cols = vec![Datum::Null]; for (sig, tp, col, to_fsp, to_tp, exp) in cases { let col_expr = col_expr(0, tp); let mut ex = scalar_func_expr(sig, &[col_expr]); ex.mut_field_type() .as_mut_accessor() .set_decimal(isize::from(to_fsp)) .set_tp(to_tp); let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_time(&mut ctx, col).unwrap(); let data = res.unwrap().into_owned(); let mut expt = exp.clone(); if to_fsp != mysql::UNSPECIFIED_FSP { expt.set_fsp(to_fsp as u8); } assert_eq!( data.to_string(), expt.to_string(), "sig: {:?} with to tp {} and fsp {} failed", sig, to_tp, to_fsp, ); // test None let res = e.eval_time(&mut ctx, &null_cols).unwrap(); assert!(res.is_none()); } } #[test] fn test_cast_as_duration() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let today = Utc::now(); let t_date_str = format!("{}", today.format("%Y-%m-%d")); let dur_str = "12:00:23"; let dur_int = 120023u64; let duration = Duration::parse(dur_str.as_bytes(), 0).unwrap(); let dur_to_time_str = format!("{} 12:00:23", t_date_str); let dur_to_time = Time::parse_utc_datetime(&dur_to_time_str, 0).unwrap(); let mut dur_to_date = dur_to_time.clone(); dur_to_date.set_time_type(TimeType::Date).unwrap(); let json_cols = vec![Datum::Json(Json::String(String::from(dur_str)))]; let int_cols = vec![Datum::U64(dur_int)]; let str_cols = vec![Datum::Bytes(dur_str.as_bytes().to_vec())]; let f64_cols = vec![Datum::F64(dur_int as f64)]; let time_cols = vec![Datum::Time(dur_to_time)]; let duration_cols = vec![Datum::Dur(duration.clone())]; let dec_cols = vec![Datum::Dec(Decimal::from(dur_int))]; let cases = vec![ ( // cast int as duration ScalarFuncSig::CastIntAsDuration, FieldTypeTp::LongLong, &int_cols, mysql::UNSPECIFIED_FSP, &duration, ), ( // cast int as duration ScalarFuncSig::CastIntAsDuration, FieldTypeTp::LongLong, &int_cols, mysql::MAX_FSP, &duration, ), ( // string as duration ScalarFuncSig::CastStringAsDuration, FieldTypeTp::String, &str_cols, mysql::UNSPECIFIED_FSP, &duration, ), ( // cast string as duration ScalarFuncSig::CastStringAsDuration, FieldTypeTp::String, &str_cols, 4, &duration, ), ( // cast real as duration ScalarFuncSig::CastRealAsDuration, FieldTypeTp::Double, &f64_cols, mysql::UNSPECIFIED_FSP, &duration, ), ( // cast real as duration ScalarFuncSig::CastRealAsDuration, FieldTypeTp::Double, &f64_cols, 1, &duration, ), ( // cast time as duration ScalarFuncSig::CastTimeAsDuration, FieldTypeTp::DateTime, &time_cols, mysql::UNSPECIFIED_FSP, &duration, ), ( // cast time as duration ScalarFuncSig::CastTimeAsDuration, FieldTypeTp::DateTime, &time_cols, 5, &duration, ), ( ScalarFuncSig::CastDurationAsDuration, FieldTypeTp::Duration, &duration_cols, mysql::UNSPECIFIED_FSP, &duration, ), ( // cast duration as duration ScalarFuncSig::CastDurationAsDuration, FieldTypeTp::Duration, &duration_cols, mysql::MAX_FSP, &duration, ), ( // cast json as duration ScalarFuncSig::CastJsonAsDuration, FieldTypeTp::JSON, &json_cols, mysql::UNSPECIFIED_FSP, &duration, ), ( ScalarFuncSig::CastJsonAsDuration, FieldTypeTp::JSON, &json_cols, 5, &duration, ), ( // cast decimal as duration ScalarFuncSig::CastDecimalAsDuration, FieldTypeTp::NewDecimal, &dec_cols, mysql::UNSPECIFIED_FSP, &duration, ), ( // cast decimal as duration ScalarFuncSig::CastDecimalAsDuration, FieldTypeTp::NewDecimal, &dec_cols, 2, &duration, ), ]; let null_cols = vec![Datum::Null]; for (sig, tp, col, to_fsp, exp) in cases { let col_expr = col_expr(0, tp); let mut ex = scalar_func_expr(sig, &[col_expr]); ex.mut_field_type() .as_mut_accessor() .set_decimal(isize::from(to_fsp)); let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_duration(&mut ctx, col).unwrap(); let data = res.unwrap().into_owned(); let mut expt = exp.clone(); if to_fsp != mysql::UNSPECIFIED_FSP { expt.fsp = to_fsp as u8; } assert_eq!( data.to_string(), expt.to_string(), "sig: {:?} with fsp {} failed", sig, to_fsp, ); // test None let res = e.eval_duration(&mut ctx, &null_cols).unwrap(); assert!(res.is_none()); } } #[test] fn test_cast_int_as_json() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let cases = vec![ ( Some(FieldTypeFlag::UNSIGNED), vec![Datum::U64(32)], Some(Json::U64(32)), ), ( Some(FieldTypeFlag::UNSIGNED | FieldTypeFlag::IS_BOOLEAN), vec![Datum::U64(1)], Some(Json::Boolean(true)), ), ( Some(FieldTypeFlag::UNSIGNED | FieldTypeFlag::IS_BOOLEAN), vec![Datum::I64(0)], Some(Json::Boolean(false)), ), (None, vec![Datum::I64(-1)], Some(Json::I64(-1))), (None, vec![Datum::Null], None), ]; for (flag, cols, exp) in cases { let mut col_expr = col_expr(0, FieldTypeTp::LongLong); if flag.is_some() { col_expr .mut_field_type() .as_mut_accessor() .set_flag(flag.unwrap()); } let ex = scalar_func_expr(ScalarFuncSig::CastIntAsJson, &[col_expr]); let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_json(&mut ctx, &cols).unwrap(); if exp.is_none() { assert!(res.is_none()); continue; } assert_eq!(res.unwrap().into_owned(), exp.unwrap()); } } #[test] fn test_cast_real_as_json() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let cases = vec![ (vec![Datum::F64(32.0001)], Some(Json::Double(32.0001))), (vec![Datum::Null], None), ]; for (cols, exp) in cases { let col_expr = col_expr(0, FieldTypeTp::Double); let ex = scalar_func_expr(ScalarFuncSig::CastRealAsJson, &[col_expr]); let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_json(&mut ctx, &cols).unwrap(); if exp.is_none() { assert!(res.is_none()); continue; } assert_eq!(res.unwrap().into_owned(), exp.unwrap()); } } #[test] fn test_cast_decimal_as_json() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let cases = vec![ ( vec![Datum::Dec(Decimal::from_f64(32.0001).unwrap())], Some(Json::Double(32.0001)), ), (vec![Datum::Null], None), ]; for (cols, exp) in cases { let col_expr = col_expr(0, FieldTypeTp::NewDecimal); let ex = scalar_func_expr(ScalarFuncSig::CastDecimalAsJson, &[col_expr]); let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_json(&mut ctx, &cols).unwrap(); if exp.is_none() { assert!(res.is_none()); continue; } assert_eq!(res.unwrap().into_owned(), exp.unwrap()); } } #[test] fn test_cast_str_as_json() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let cases = vec![ ( false, vec![Datum::Bytes(b"[1,2,3]".to_vec())], Some(Json::String(String::from("[1,2,3]"))), ), ( true, vec![Datum::Bytes(b"[1,2,3]".to_vec())], Some(Json::Array(vec![Json::I64(1), Json::I64(2), Json::I64(3)])), ), (false, vec![Datum::Null], None), (true, vec![Datum::Null], None), ]; for (by_parse, cols, exp) in cases { let col_expr = col_expr(0, FieldTypeTp::String); let mut ex = scalar_func_expr(ScalarFuncSig::CastStringAsJson, &[col_expr]); if by_parse { let mut flag = ex.get_field_type().flag(); flag |= FieldTypeFlag::PARSE_TO_JSON; ex.mut_field_type().as_mut_accessor().set_flag(flag); } let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_json(&mut ctx, &cols).unwrap(); if exp.is_none() { assert!(res.is_none()); continue; } assert_eq!(res.unwrap().into_owned(), exp.unwrap()); } } #[test] fn test_cast_time_as_json() { let cfg = EvalConfig::default_for_test(); let mut ctx = EvalContext::new(Arc::new(cfg)); let time_str = "2012-12-12 11:11:11"; let date_str = "2012-12-12"; let tz = Tz::utc(); let time = Time::parse_utc_datetime(time_str, mysql::DEFAULT_FSP).unwrap(); let time_stamp = { let t = time.to_packed_u64(); Time::from_packed_u64(t, TimeType::Timestamp, mysql::DEFAULT_FSP, tz).unwrap() }; let date = { let mut t = time.clone(); t.set_time_type(TimeType::Date).unwrap(); t }; let cases = vec![ ( FieldTypeTp::DateTime, vec![Datum::Time(time)], Some(Json::String(format!("{}.000000", time_str))), ), ( FieldTypeTp::Timestamp, vec![Datum::Time(time_stamp)], Some(Json::String(format!("{}.000000", time_str))), ), ( FieldTypeTp::Date, vec![Datum::Time(date)], Some(Json::String(String::from(date_str))), ), (FieldTypeTp::Unspecified, vec![Datum::Null], None), ]; for (tp, cols, exp) in cases { let col_expr = col_expr(0, tp); let ex = scalar_func_expr(ScalarFuncSig::CastTimeAsJson, &[col_expr]); let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_json(&mut ctx, &cols).unwrap(); if exp.is_none() { assert!(res.is_none()); continue; } assert_eq!(res.unwrap().into_owned(), exp.unwrap()); } } #[test] fn test_cast_duration_as_json() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let dur_str = "11:12:08"; let dur_str_expect = "11:12:08.000000"; let cases = vec![ ( vec![Datum::Dur(Duration::parse(dur_str.as_bytes(), 0).unwrap())], Some(Json::String(String::from(dur_str_expect))), ), (vec![Datum::Null], None), ]; for (cols, exp) in cases { let col_expr = col_expr(0, FieldTypeTp::String); let ex = scalar_func_expr(ScalarFuncSig::CastDurationAsJson, &[col_expr]); let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_json(&mut ctx, &cols).unwrap(); if exp.is_none() { assert!(res.is_none()); continue; } assert_eq!(res.unwrap().into_owned(), exp.unwrap()); } } #[test] fn test_cast_json_as_json() { let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test())); let cases = vec![ ( vec![Datum::Json(Json::Boolean(true))], Some(Json::Boolean(true)), ), (vec![Datum::Null], None), ]; for (cols, exp) in cases { let col_expr = col_expr(0, FieldTypeTp::String); let ex = scalar_func_expr(ScalarFuncSig::CastJsonAsJson, &[col_expr]); let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_json(&mut ctx, &cols).unwrap(); if exp.is_none() { assert!(res.is_none()); continue; } assert_eq!(res.unwrap().into_owned(), exp.unwrap()); } } #[test] fn test_dec_as_int_with_overflow() { let cases = vec![ ( FieldTypeFlag::empty(), vec![Datum::Dec( Decimal::from_f64(i64::MAX as f64 + 100.5).unwrap(), )], i64::MAX, ), ( FieldTypeFlag::UNSIGNED, vec![Datum::Dec( Decimal::from_f64(u64::MAX as f64 + 100.5).unwrap(), )], u64::MAX as i64, ), ]; for (flag, cols, exp) in cases { let mut col_expr = col_expr(0, FieldTypeTp::NewDecimal); let mut ex = scalar_func_expr(ScalarFuncSig::CastDecimalAsInt, &[col_expr]); ex.mut_field_type().as_mut_accessor().set_flag(flag); // test with overflow as warning let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flags(FLAG_OVERFLOW_AS_WARNING))); let e = Expression::build(&ctx, ex.clone()).unwrap(); let res = e.eval_int(&mut ctx, &cols).unwrap().unwrap(); assert_eq!(res, exp); assert_eq!(ctx.warnings.warning_cnt, 1); assert_eq!( ctx.warnings.warnings[0].get_code(), ERR_TRUNCATE_WRONG_VALUE ); // test overflow as error ctx = EvalContext::new(Arc::new(EvalConfig::default())); let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_int(&mut ctx, &cols); assert!(res.is_err()); } } #[test] fn test_str_as_int() { let cases = vec![ ( FieldTypeFlag::empty(), vec![Datum::Bytes(b"18446744073709551615".to_vec())], u64::MAX as i64, 1, ), ( FieldTypeFlag::UNSIGNED, vec![Datum::Bytes(b"18446744073709551615".to_vec())], u64::MAX as i64, 0, ), ( FieldTypeFlag::empty(), vec![Datum::Bytes(b"-1".to_vec())], -1, 1, ), ]; for (flag, cols, exp, warnings_cnt) in cases { let mut col_expr = col_expr(0, FieldTypeTp::String); let mut ex = scalar_func_expr(ScalarFuncSig::CastStringAsInt, &[col_expr]); ex.mut_field_type().as_mut_accessor().set_flag(flag); let mut ctx = EvalContext::new(Arc::new(EvalConfig::default())); let e = Expression::build(&ctx, ex.clone()).unwrap(); let res = e.eval_int(&mut ctx, &cols).unwrap().unwrap(); assert_eq!(res, exp); assert_eq!(ctx.warnings.warning_cnt, warnings_cnt); if warnings_cnt > 0 { assert_eq!(ctx.warnings.warnings[0].get_code(), ERR_UNKNOWN); } } let cases = vec![ ( vec![Datum::Bytes(b"-9223372036854775810".to_vec())], i64::MIN, ), ( vec![Datum::Bytes(b"18446744073709551616".to_vec())], u64::MAX as i64, ), ]; for (cols, exp) in cases { let mut col_expr = col_expr(0, FieldTypeTp::String); let ex = scalar_func_expr(ScalarFuncSig::CastStringAsInt, &[col_expr]); // test with overflow as warning && in select stmt let mut cfg = EvalConfig::new(); cfg.set_overflow_as_warning(true).set_in_select_stmt(true); let mut ctx = EvalContext::new(Arc::new(cfg)); let e = Expression::build(&ctx, ex.clone()).unwrap(); let res = e.eval_int(&mut ctx, &cols).unwrap().unwrap(); assert_eq!(res, exp); assert_eq!(ctx.warnings.warning_cnt, 1); assert_eq!( ctx.warnings.warnings[0].get_code(), ERR_TRUNCATE_WRONG_VALUE ); // test overflow as error ctx = EvalContext::new(Arc::new(EvalConfig::default())); let e = Expression::build(&ctx, ex).unwrap(); let res = e.eval_int(&mut ctx, &cols); assert!(res.is_err()); } } // This test should work when NumberToDuration ported from tidb. // #[test] // fn test_int_as_duration_with_overflow() { // let cols = vec![Datum::I64(3020400)]; // let col_expr = col_expr(0, i32::from(FieldTypeTp::LongLong)); // let ex = scalar_func_expr(ScalarFuncSig::CastIntAsDuration, &[col_expr]); // // test with overflow as warning // let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flags(FLAG_OVERFLOW_AS_WARNING))); // let e = Expression::build(&ctx, ex.clone()).unwrap(); // let res = e.eval_duration(&mut ctx, &cols).unwrap(); // assert!(res.is_none()); // assert_eq!(ctx.warnings.warning_cnt, 1); // assert_eq!(ctx.warnings.warnings[0].get_code(), ERR_DATA_OUT_OF_RANGE); // // test overflow as error // ctx = EvalContext::new(Arc::new(EvalConfig::default())); // let e = Expression::build(&ctx, ex).unwrap(); // let res = e.eval_duration(&mut ctx, &cols); // assert!(res.is_err()); // } }
acomp_ctrl.rs
#[doc = "Register `acomp_ctrl` reader"] pub struct R(crate::R<ACOMP_CTRL_SPEC>); impl core::ops::Deref for R { type Target = crate::R<ACOMP_CTRL_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<ACOMP_CTRL_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<ACOMP_CTRL_SPEC>) -> Self { R(reader) } } #[doc = "Register `acomp_ctrl` writer"] pub struct W(crate::W<ACOMP_CTRL_SPEC>); impl core::ops::Deref for W { type Target = crate::W<ACOMP_CTRL_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<ACOMP_CTRL_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<ACOMP_CTRL_SPEC>) -> Self { W(writer) } } #[doc = "Field `acomp_reserved` reader - "] pub struct ACOMP_RESERVED_R(crate::FieldReader<u8, u8>); impl ACOMP_RESERVED_R { pub(crate) fn new(bits: u8) -> Self { ACOMP_RESERVED_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ACOMP_RESERVED_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `acomp_reserved` writer - "] pub struct ACOMP_RESERVED_W<'a> { w: &'a mut W, } impl<'a> ACOMP_RESERVED_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 24)) | ((value as u32 & 0xff) << 24); self.w } } #[doc = "Field `acomp0_out_raw` reader - "] pub struct ACOMP0_OUT_RAW_R(crate::FieldReader<bool, bool>); impl ACOMP0_OUT_RAW_R { pub(crate) fn new(bits: bool) -> Self { ACOMP0_OUT_RAW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ACOMP0_OUT_RAW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `acomp0_out_raw` writer - "] pub struct ACOMP0_OUT_RAW_W<'a> { w: &'a mut W, } impl<'a> ACOMP0_OUT_RAW_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | ((value as u32 & 0x01) << 19); self.w } } #[doc = "Field `acomp1_out_raw` reader - "] pub struct ACOMP1_OUT_RAW_R(crate::FieldReader<bool, bool>); impl ACOMP1_OUT_RAW_R { pub(crate) fn new(bits: bool) -> Self { ACOMP1_OUT_RAW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ACOMP1_OUT_RAW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `acomp1_out_raw` writer - "] pub struct ACOMP1_OUT_RAW_W<'a> { w: &'a mut W, } impl<'a> ACOMP1_OUT_RAW_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | ((value as u32 & 0x01) << 17); self.w } } #[doc = "Field `acomp0_test_sel` reader - "] pub struct ACOMP0_TEST_SEL_R(crate::FieldReader<u8, u8>); impl ACOMP0_TEST_SEL_R { pub(crate) fn new(bits: u8) -> Self { ACOMP0_TEST_SEL_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ACOMP0_TEST_SEL_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `acomp0_test_sel` writer - "] pub struct ACOMP0_TEST_SEL_W<'a> { w: &'a mut W, } impl<'a> ACOMP0_TEST_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 12)) | ((value as u32 & 0x03) << 12); self.w } } #[doc = "Field `acomp1_test_sel` reader - "] pub struct ACOMP1_TEST_SEL_R(crate::FieldReader<u8, u8>); impl ACOMP1_TEST_SEL_R { pub(crate) fn new(bits: u8) -> Self { ACOMP1_TEST_SEL_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ACOMP1_TEST_SEL_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `acomp1_test_sel` writer - "] pub struct ACOMP1_TEST_SEL_W<'a> { w: &'a mut W, } impl<'a> ACOMP1_TEST_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 10)) | ((value as u32 & 0x03) << 10); self.w } } #[doc = "Field `acomp0_test_en` reader - "] pub struct ACOMP0_TEST_EN_R(crate::FieldReader<bool, bool>); impl ACOMP0_TEST_EN_R { pub(crate) fn new(bits: bool) -> Self { ACOMP0_TEST_EN_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ACOMP0_TEST_EN_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `acomp0_test_en` writer - "] pub struct ACOMP0_TEST_EN_W<'a> { w: &'a mut W, } impl<'a> ACOMP0_TEST_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | ((value as u32 & 0x01) << 9); self.w } } #[doc = "Field `acomp1_test_en` reader - "] pub struct ACOMP1_TEST_EN_R(crate::FieldReader<bool, bool>); impl ACOMP1_TEST_EN_R { pub(crate) fn new(bits: bool) -> Self { ACOMP1_TEST_EN_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ACOMP1_TEST_EN_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `acomp1_test_en` writer - "] pub struct ACOMP1_TEST_EN_W<'a> { w: &'a mut W, } impl<'a> ACOMP1_TEST_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | ((value as u32 & 0x01) << 8); self.w } } #[doc = "Field `acomp0_rstn_ana` reader - "] pub struct ACOMP0_RSTN_ANA_R(crate::FieldReader<bool, bool>); impl ACOMP0_RSTN_ANA_R { pub(crate) fn new(bits: bool) -> Self { ACOMP0_RSTN_ANA_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ACOMP0_RSTN_ANA_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `acomp0_rstn_ana` writer - "] pub struct ACOMP0_RSTN_ANA_W<'a> { w: &'a mut W, } impl<'a> ACOMP0_RSTN_ANA_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1); self.w } } #[doc = "Field `acomp1_rstn_ana` reader - "] pub struct ACOMP1_RSTN_ANA_R(crate::FieldReader<bool, bool>); impl ACOMP1_RSTN_ANA_R { pub(crate) fn new(bits: bool) -> Self { ACOMP1_RSTN_ANA_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ACOMP1_RSTN_ANA_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `acomp1_rstn_ana` writer - "] pub struct ACOMP1_RSTN_ANA_W<'a> { w: &'a mut W, } impl<'a> ACOMP1_RSTN_ANA_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01); self.w } } impl R { #[doc = "Bits 24:31"] #[inline(always)] pub fn acomp_reserved(&self) -> ACOMP_RESERVED_R { ACOMP_RESERVED_R::new(((self.bits >> 24) & 0xff) as u8) } #[doc = "Bit 19"] #[inline(always)] pub fn acomp0_out_raw(&self) -> ACOMP0_OUT_RAW_R { ACOMP0_OUT_RAW_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 17"] #[inline(always)] pub fn acomp1_out_raw(&self) -> ACOMP1_OUT_RAW_R { ACOMP1_OUT_RAW_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bits 12:13"] #[inline(always)] pub fn acomp0_test_sel(&self) -> ACOMP0_TEST_SEL_R { ACOMP0_TEST_SEL_R::new(((self.bits >> 12) & 0x03) as u8) } #[doc = "Bits 10:11"] #[inline(always)] pub fn acomp1_test_sel(&self) -> ACOMP1_TEST_SEL_R { ACOMP1_TEST_SEL_R::new(((self.bits >> 10) & 0x03) as u8) } #[doc = "Bit 9"] #[inline(always)] pub fn acomp0_test_en(&self) -> ACOMP0_TEST_EN_R { ACOMP0_TEST_EN_R::new(((self.bits >> 9) & 0x01) != 0) }
pub fn acomp1_test_en(&self) -> ACOMP1_TEST_EN_R { ACOMP1_TEST_EN_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 1"] #[inline(always)] pub fn acomp0_rstn_ana(&self) -> ACOMP0_RSTN_ANA_R { ACOMP0_RSTN_ANA_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0"] #[inline(always)] pub fn acomp1_rstn_ana(&self) -> ACOMP1_RSTN_ANA_R { ACOMP1_RSTN_ANA_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bits 24:31"] #[inline(always)] pub fn acomp_reserved(&mut self) -> ACOMP_RESERVED_W { ACOMP_RESERVED_W { w: self } } #[doc = "Bit 19"] #[inline(always)] pub fn acomp0_out_raw(&mut self) -> ACOMP0_OUT_RAW_W { ACOMP0_OUT_RAW_W { w: self } } #[doc = "Bit 17"] #[inline(always)] pub fn acomp1_out_raw(&mut self) -> ACOMP1_OUT_RAW_W { ACOMP1_OUT_RAW_W { w: self } } #[doc = "Bits 12:13"] #[inline(always)] pub fn acomp0_test_sel(&mut self) -> ACOMP0_TEST_SEL_W { ACOMP0_TEST_SEL_W { w: self } } #[doc = "Bits 10:11"] #[inline(always)] pub fn acomp1_test_sel(&mut self) -> ACOMP1_TEST_SEL_W { ACOMP1_TEST_SEL_W { w: self } } #[doc = "Bit 9"] #[inline(always)] pub fn acomp0_test_en(&mut self) -> ACOMP0_TEST_EN_W { ACOMP0_TEST_EN_W { w: self } } #[doc = "Bit 8"] #[inline(always)] pub fn acomp1_test_en(&mut self) -> ACOMP1_TEST_EN_W { ACOMP1_TEST_EN_W { w: self } } #[doc = "Bit 1"] #[inline(always)] pub fn acomp0_rstn_ana(&mut self) -> ACOMP0_RSTN_ANA_W { ACOMP0_RSTN_ANA_W { w: self } } #[doc = "Bit 0"] #[inline(always)] pub fn acomp1_rstn_ana(&mut self) -> ACOMP1_RSTN_ANA_W { ACOMP1_RSTN_ANA_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "acomp_ctrl.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [acomp_ctrl](index.html) module"] pub struct ACOMP_CTRL_SPEC; impl crate::RegisterSpec for ACOMP_CTRL_SPEC { type Ux = u32; } #[doc = "`read()` method returns [acomp_ctrl::R](R) reader structure"] impl crate::Readable for ACOMP_CTRL_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [acomp_ctrl::W](W) writer structure"] impl crate::Writable for ACOMP_CTRL_SPEC { type Writer = W; } #[doc = "`reset()` method sets acomp_ctrl to value 0"] impl crate::Resettable for ACOMP_CTRL_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
#[doc = "Bit 8"] #[inline(always)]
util.rs
//! Miscellaneous functions that don't fit in any other (rust code) module. use crate::{early_logging::KConsole, IGNITED_CONFIG, IGNITED_TARGET_ROOT_PATH, PROGRAM_NAME}; use cstr::cstr; use nix::{ errno::Errno, libc::{dev_t, mode_t, stat as FileStat, S_IFDIR, S_IFMT}, sys::{ memfd::{memfd_create, MemFdCreateFlag}, stat::{lstat as lstat_fn, stat as stat_fn, Mode}, statfs::{self as StatFs, statfs, FsType as StatFsType}, utsname::uname }, unistd::{execv, mkdir}, }; use precisej_printable_errno::{ErrnoResult, printable_error, PrintableErrno}; use std::{ convert::Infallible, fs::{read_dir, remove_dir, remove_file, File}, ffi::{CStr, OsStr, OsString}, os::unix::{ffi::OsStrExt, io::FromRawFd}, path::{Path, PathBuf}, process::id as getpid, }; /// Remove ramfs without touching the target root. /// /// We check that the current process is PID1 and that the current initramfs root /// is either a `ramfs` or a `tmpfs` with the appropriate initramfs files present. pub fn delete_ramfs() -> Result<(), PrintableErrno<String>> { // see statfs(2) const RAMFS_MAGIC: StatFsType = StatFsType(0x858458f6); const TMPFS_MAGIC: StatFsType = StatFsType(0x01021994); fn is_dir(mode: mode_t) -> bool { mode & S_IFMT == S_IFDIR } fn delete_recursive(path: &Path, root_dev: dev_t) -> Result<(), PrintableErrno<String>> { let path_stat: FileStat = lstat_fn(path).printable( PROGRAM_NAME, format!("unable to stat {}", path.display()) )?; if path_stat.st_dev != root_dev { // is outside the root initramfs, conserve return Ok(()) } if is_dir(path_stat.st_mode) { let path_dir_entries = read_dir(path).map_err(|io| { printable_error( PROGRAM_NAME, format!("unable to read {}: {}", path.display(), io), ) })?; for entry in path_dir_entries.flatten() { if entry.file_name() == "." || entry.file_name() == ".." { delete_recursive(&entry.path(), root_dev)?; } } if path != Path::new("/") { // delete directory remove_dir(path).map_err(|io| { printable_error( PROGRAM_NAME, format!("unable to remove directory {}: {}", path.display(), io), ) })?; } } else if path != Path::new("/") { remove_file(path).map_err(|io| { printable_error( PROGRAM_NAME, format!("unable to remove file {}: {}", path.display(), io), ) })?; } Ok(()) } fn exists_in_root(path: &Path, root_dev: dev_t) -> Result<(), PrintableErrno<String>> { let path_stat: FileStat = stat_fn(path).printable( PROGRAM_NAME, format!("unable to stat {}", path.display()) )?; if path_stat.st_dev != root_dev { return Err(printable_error( PROGRAM_NAME, format!("{} is not in our current initramfs", path.display()), )); } Ok(()) } fn full_sanity_check() -> Result<dev_t, PrintableErrno<String>> { (getpid() == 1).then(|| ()).ok_or_else(|| { printable_error( PROGRAM_NAME, "not running in an initrd environment, exiting...", ) })?; let root_stat: FileStat = stat_fn("/").printable( PROGRAM_NAME, "unable to stat /", )?; let root_dev = root_stat.st_dev; let new_root_stat: FileStat = stat_fn(IGNITED_TARGET_ROOT_PATH).printable( PROGRAM_NAME, format!("unable to stat {}", IGNITED_TARGET_ROOT_PATH), )?; if new_root_stat.st_dev == root_dev { return Err(printable_error( PROGRAM_NAME, format!("/ and {} belong to the same device", IGNITED_TARGET_ROOT_PATH) )); } exists_in_root(Path::new("/etc/initrd-release"), root_dev)?; exists_in_root(Path::new(IGNITED_CONFIG), root_dev)?; exists_in_root(Path::new("/init"), root_dev)?; let root_statfs = statfs("/").printable( PROGRAM_NAME, "unable to statfs /" )?; let root_statfs_type = root_statfs.filesystem_type(); if root_statfs_type != RAMFS_MAGIC && root_statfs_type != TMPFS_MAGIC { return Err(printable_error( PROGRAM_NAME, "/ should still be initramfs, but is not of type ramfs/tmpfs".to_string(), )); } Ok(root_dev) } let root_dev = full_sanity_check()?; delete_recursive(Path::new("/"), root_dev) } /// Get current kernel version. Corresponds to the `release` field in the `utsname` /// struct returned by `uname(2)`. /// /// See `uname(2)` for more information. pub fn get_booted_kernel_ver() -> String { uname().release().to_string() } /// Create `systemd-state`: a `memfd` containing the timestamp (both realtime and /// monotonic) when the `ignited` binary started. pub fn get_systemd_state() -> Result<File, PrintableErrno<String>>
/// Check to see if we are running as the `init` inside the initramfs. /// /// - Make sure we are PID 1. /// - Check for the existence of `/etc/initrd-release` (see /// [INITRD_INTERFACE](https://systemd.io/INITRD_INTERFACE/)). pub fn initial_sanity_check() -> Result<(), PrintableErrno<String>> { // We must be the initramfs' PID1 (getpid() == 1).then(|| ()).ok_or_else(|| { printable_error( PROGRAM_NAME, "not running in an initrd environment, exiting...", ) })?; // Per https://systemd.io/INITRD_INTERFACE/, we should only run if /etc/initrd-release // is present Path::new("/etc/initrd-release") .exists() .then(|| ()) .ok_or_else(|| { printable_error( PROGRAM_NAME, "not running in an initrd environment, exiting...", ) })?; Ok(()) } /// Get whether the target init system is systemd-compatible. /// /// Currently assumes that `/path/to/init` is a symbolic link to `/path/to/lib/systemd` /// on distributions with systemd, as is standard. pub fn is_systemd_compatible(init_path: &CStr) -> bool { let mut init_path = PathBuf::from( OsString::from(OsStr::from_bytes(init_path.to_bytes())) ); // Max depth of 10 to prevent DoS for _ in 0..10 { match init_path.read_link() { Ok(new) => init_path = new, _ => break, } } if let Ok(new) = init_path.canonicalize() { init_path = new } init_path.ends_with("/systemd") } /// Create `/run/initramfs`, which can be used by the booted system to switch back to /// the initramfs environment on shutdown. /// /// Per [systemd's INITRD_INTERFACE](https://systemd.io/INITRD_INTERFACE/). pub fn make_shutdown_pivot_dir() -> Result<(), PrintableErrno<String>> { let s_rwxu_rxg_rxo = Mode::S_IRWXU | Mode::S_IRGRP | Mode::S_IXGRP | Mode::S_IROTH | Mode::S_IXOTH; loop { match mkdir(Path::new("/run/initramfs"), s_rwxu_rxg_rxo) { Ok(()) => break Ok(()), Err(e) if e == Errno::ENOENT => { // Recurse and try again } Err(e) => { break Err(e).printable( PROGRAM_NAME, "FATAL: unable to create /run/initramfs".to_string(), ) } } } } /// Spawn an emergency shell. /// /// Currently this function attempts to spawn `/bin/busybox` first. If it doesn't exist, /// it will attempt `/bin/toybox` instead. If none exists (or all of them fail to /// properly handover execution), this function logs an error to `kmsg`. /// /// If the emergency shell is spawned, this function never returns. pub fn spawn_emergency_shell(kcon: &mut KConsole) -> Result<Infallible, ()> { kcrit!(kcon, "attempting to spawn emergency shell"); let argv = [cstr!("sh"), cstr!("-I")]; let exists_b = match execv(cstr!("/bin/busybox"), &argv).unwrap_err() { Errno::ENOENT => false, e => { let e = Err::<(), _>(e) .printable(PROGRAM_NAME, "unable to execute /bin/busybox") .unwrap_err(); kcrit!(kcon, "{}", e); true } }; // If we are here, busybox doesn't exist or execv failed, so try toybox let err_t = match execv(cstr!("/bin/toybox"), &argv).unwrap_err() { Errno::ENOENT => None, e => Some( Err::<(), _>(e) .printable(PROGRAM_NAME, "unable to execute /bin/toybox") .unwrap_err(), ), }; // Both failed to execute if !exists_b { kcrit!( kcon, "unable to execute /bin/busybox: {}", Errno::ENOENT.desc() ); } match err_t { Some(e) => { kcrit!(kcon, "{}", e); } None => { kcrit!( kcon, "unable to execute /bin/toybox: {}", Errno::ENOENT.desc() ); } } Err(()) }
{ let memfd = memfd_create(cstr!("systemd-state"), MemFdCreateFlag::empty()) .printable(PROGRAM_NAME, "unable to create systemd-state")?; // SAFETY: memfd isn't used anywhere else Ok(unsafe { File::from_raw_fd(memfd) }) }
dot.test.ts
import { Mode } from '../../../src/mode/mode'; import { Configuration } from '../../testConfiguration'; import { newTest } from '../../testSimplifier'; import { cleanUpWorkspace, setupWorkspace } from './../../testUtils'; suite('Dot Operator', () => { setup(async () => { const configuration = new Configuration(); configuration.tabstop = 4; configuration.expandtab = false; await setupWorkspace(configuration); }); teardown(cleanUpWorkspace); newTest({ title: "Can repeat '~' with <num>", start: ['|teXt'], keysPressed: '4~', end: ['TEx|T'], }); newTest({ title: "Can repeat '~' with dot", start: ['|teXt'], keysPressed: '~...', end: ['TEx|T'], }); newTest({ title: "Can repeat 'x'", start: ['|text'], keysPressed: 'x.', end: ['|xt'], }); newTest({ title: "Can repeat 'J'", start: ['|one', 'two', 'three'], keysPressed: 'J.', end: ['one two| three'], }); newTest({ title: 'Can handle dot with A', start: ['|one', 'two', 'three'], keysPressed: 'A!<Esc>j.j.', end: ['one!', 'two!', 'three|!'], }); newTest({ title: 'Can handle dot with I', start: ['on|e', 'two', 'three'], keysPressed: 'I!<Esc>j.j.', end: ['!one', '!two', '|!three'], }); newTest({ title: 'Can repeat actions that require selections', start: ['on|e', 'two'], keysPressed: 'Vj>.', end: ['\t\t|one', '\t\ttwo'], }); }); suite('Repeat content change', () => { setup(async () => { const configuration = new Configuration(); configuration.tabstop = 4; configuration.expandtab = false; await setupWorkspace(configuration); }); teardown(cleanUpWorkspace); newTest({ title: 'Can repeat `<BS>`', start: ['abcd|e', 'ABCDE'], keysPressed: 'i<BS><Esc>' + 'j$.', end: ['abce', 'AB|CE'], endMode: Mode.Normal, }); newTest({ title: 'Can repeat `<BS><BS>`', start: ['abcd|e', 'ABCDE'], keysPressed: 'i<BS><BS><Esc>' + 'j$.', end: ['abe', 'A|BE'], endMode: Mode.Normal, }); newTest({ title: 'Can repeat `<BS>` within larger insertion', start: ['abcd|e', 'ABCDE'], keysPressed: 'ixy<BS>z<Esc>' + 'j$.', end: ['abcdxze', 'ABCDx|zE'], endMode: Mode.Normal, }); newTest({ title: 'Can repeat `<Del>`', start: ['|abcde', 'ABCDE'], keysPressed: 'a<Del><Esc>' + 'j0.', end: ['acde', '|ACDE'], endMode: Mode.Normal, }); newTest({ title: 'Can repeat `<Del><Del>`', start: ['|abcde', 'ABCDE'], keysPressed: 'a<Del><Del><Esc>' + 'j0.', end: ['ade', '|ADE'], endMode: Mode.Normal, }); newTest({ title: 'Can repeat `<Del>` within larger insertion', start: ['|abcde', 'ABCDE'], keysPressed: 'axy<Del>z<Esc>' + 'j0.', end: ['axyzcde', 'Axy|zCDE'], endMode: Mode.Normal, }); newTest({ title: 'Can repeat `<BS>` and `<Del>`', start: ['abc|def', 'ABCDEF'], keysPressed: 'i<BS><Del>0<Esc>' + 'j0fD.', end: ['ab0ef', 'AB|0EF'], endMode: Mode.Normal, }); newTest({ title: 'Can repeat insertion with newline', start: ['ab|cde', 'ABCDE'], keysPressed: 'i1\n2<Esc>' + 'j0ll.', end: ['ab1', '2cde', 'AB1', '|2CDE'], endMode: Mode.Normal, }); newTest({ title: 'Can repeat insertion with auto-matched brackets', start: ['|', ''], keysPressed: 'ifoo(bar<Esc>' + 'j.', end: ['foo(bar)', 'foo(ba|r)'], endMode: Mode.Normal, }); newTest({ title: 'Repeat insertion with auto-matched parentheses in the middle', start: ['geometry.append(|width);', 'geometry.append(height);'], keysPressed: 'ce' + 'std::to_string(' + '<C-r>"' + '<Esc>' + 'j0fh' + '.', end: ['geometry.append(std::to_string(width));', 'geometry.append(std::to_string(heigh|t));'], }); newTest({ title: 'Repeat insertion that deletes auto-matched closing parenthesis', start: ['|', ''], keysPressed: 'i' + '[(' + '<Del>' + 'xyz' + '<Esc>' + 'j.', end: ['[(xyz]', '[(xy|z]'], }); newTest({ title: 'Can repeat `<C-y>`', start: ['abcde', '|12', 'ABCDE', '12'], keysPressed: 'A<C-y><C-y><Esc>' + 'jj0.', end: ['abcde', '12cd', 'ABCDE', '12c|d'], endMode: Mode.Normal, }); newTest({ title: 'Can repeat `<C-e>`', start: ['abcde', '|12', 'ABCDE', '12'], keysPressed: 'A<C-e><C-e><Esc>' + 'jj0.', end: ['abcde', '12CD', 'ABCDE', '12C|D'], endMode: Mode.Normal, }); newTest({ title: "Can repeat '<C-t>'", start: ['on|e', 'two'], keysPressed: 'a<C-t><Esc>j.', end: ['\tone', '\ttw|o'], }); newTest({ title: "Can repeat insert change and '<C-t>'", start: ['on|e', 'two'], keysPressed: 'a<C-t>b<Esc>j.', end: ['\toneb', '\ttwo|b'], }); newTest({ title: 'Can repeat change by `<C-a>`', start: ['on|e', 'two'], keysPressed: 'a<C-t>b<Esc>ja<C-a><Esc>', end: ['\toneb', '\ttwo|b'], }); newTest({ title: 'Repeating insertion with arrows ignores everything before last arrow', start: ['one |two three'], keysPressed: 'i' + 'X<left>Y<left>Z' + '<Esc>' + 'W.', end: ['one ZYXtwo |Zthree'], }); newTest({ title: 'Repeating insertion with arrows always inserts just before cursor', start: ['o|ne two three'], keysPressed: 'A' + 'X<left>Y<left>Z' + '<Esc>' + '0W.', end: ['one |Ztwo threeZYX'], }); newTest({ title: 'Cached content change will be cleared by arrow keys', start: ['on|e', 'two'], keysPressed: 'a<C-t>b<left>c<Esc>j.', end: ['\tonecb', 'tw|co'],
suite('Dot Operator repeat with remap', () => { setup(async () => { const configuration = new Configuration(); configuration.insertModeKeyBindings = [ { before: ['j', 'j', 'k'], after: ['<esc>'], }, ]; configuration.normalModeKeyBindings = [ { before: ['<leader>', 'w'], after: ['d', 'w'], }, ]; configuration.leader = ' '; await setupWorkspace(configuration); }); teardown(cleanUpWorkspace); newTest({ title: "Can repeat content change using 'jjk' mapped to '<Esc>' without trailing characters", start: ['on|e', 'two'], keysPressed: 'ciwfoojjkj.', end: ['foo', 'fo|o'], }); newTest({ title: "Can repeat '<leader>w' when mapped to 'dw'", start: ['|one two three'], keysPressed: ' w.', end: ['|three'], }); });
}); });
randompath.ts
import { range } from "d3-array"; import { randomNormal } from "d3-random"; interface PathInputs { sigma: number; drift: number; } export default function (size: number, options?: PathInputs) { const { sigma = 0.1, drift = 0 } = options || {}; const t = range(0, +size, 1), data = [{ x: 0, y: 0 }], norm = randomNormal(0, 1); let dx: number; for (let i = 1; i < t.length; i++) { dx = drift + sigma * norm(); data[i] = { x: i, y: data[i - 1].y + dx,
}
}; } return data;
dispatch.rs
use std::boxed::Box; use std::collections::HashMap; use std::fmt; use std::io; use std::net::SocketAddr; use std::string::ToString; use std::sync::{Arc, RwLock}; use std::time::{Duration, Instant}; use futures::future; use futures::stream::{SplitSink, SplitStream}; use futures::sync::mpsc; use futures::sync::oneshot; use futures::{Async, AsyncSink, Future, IntoFuture, Poll, Sink, Stream}; use tokio::net::tcp::{ConnectFuture, TcpStream}; use tokio::timer::delay_queue; use tokio::timer::{Delay, DelayQueue}; use tokio_codec::{Decoder, Framed}; use crate::tarantool::codec::{RequestId, TarantoolCodec, TarantoolFramedRequest}; use crate::tarantool::packets::{AuthPacket, CommandPacket, TarantoolRequest, TarantoolResponse}; pub type TarantoolFramed = Framed<TcpStream, TarantoolCodec>; pub type CallbackSender = oneshot::Sender<io::Result<TarantoolResponse>>; pub type ReconnectNotifySender = mpsc::UnboundedSender<ClientStatus>; static ERROR_SERVER_DISCONNECT: &str = "SERVER DISCONNECTED!"; pub static ERROR_DISPATCH_THREAD_IS_DEAD: &str = "DISPATCH THREAD IS DEAD!"; pub static ERROR_CLIENT_DISCONNECTED: &str = "CLIENT DISCONNECTED!"; static ERROR_TIMEOUT: &str = "TIMEOUT!"; /// /// Tarantool client config /// /// # Examples /// ```text /// let client = ClientConfig::new(addr, "rust", "rust") /// .set_timeout_time_ms(1000) /// .set_reconnect_time_ms(10000) /// .build(); /// #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub struct ClientConfig { addr: SocketAddr, login: String, password: String, reconnect_time_ms: u64, timeout_time_ms: Option<u64>, } impl ClientConfig { pub fn new<S, S1>(addr: SocketAddr, login: S, password: S1) -> ClientConfig where S: Into<String>, S1: Into<String>, { ClientConfig { addr, login: login.into(), password: password.into(), reconnect_time_ms: 10000, timeout_time_ms: None, } } pub fn set_timeout_time_ms(mut self, timeout_time_ms: u64) -> ClientConfig { self.timeout_time_ms = Some(timeout_time_ms); self } pub fn set_reconnect_time_ms(mut self, reconnect_time_ms: u64) -> ClientConfig { self.reconnect_time_ms = reconnect_time_ms; self } } #[derive(Clone, Debug)] pub enum ClientStatus { Init, Connecting, Handshaking, Connected, Disconnecting(String), Disconnected(String), } enum DispatchState { New, OnConnect(ConnectFuture), OnHandshake(Box<dyn Future<Item = TarantoolFramed, Error = io::Error> + Send>), OnProcessing((SplitSink<TarantoolFramed>, SplitStream<TarantoolFramed>)), OnReconnect(String), OnSleep(Delay, String), } impl fmt::Display for DispatchState { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let status = match *self { DispatchState::New => "New", DispatchState::OnConnect(_) => "OnConnect", DispatchState::OnHandshake(_) => "OnHandshake", DispatchState::OnProcessing(_) => "OnProcessing", DispatchState::OnReconnect(_) => "OnReconnect", DispatchState::OnSleep(_, _) => "OnSleep", }; write!(f, "{}", status) } } impl DispatchState { fn get_client_status(&self) -> ClientStatus { match *self { DispatchState::New => ClientStatus::Init, DispatchState::OnConnect(_) => ClientStatus::Connecting, DispatchState::OnHandshake(_) => ClientStatus::Handshaking, DispatchState::OnProcessing(_) => ClientStatus::Connected, DispatchState::OnReconnect(ref error_message) => { ClientStatus::Disconnecting(error_message.clone()) } DispatchState::OnSleep(_, ref error_message) => { ClientStatus::Disconnected(error_message.clone()) } } } } struct DispatchEngine { command_receiver: mpsc::UnboundedReceiver<(CommandPacket, CallbackSender)>, awaiting_callbacks: HashMap<RequestId, CallbackSender>, notify_callbacks: Arc<RwLock<Vec<ReconnectNotifySender>>>, buffered_command: Option<TarantoolFramedRequest>, command_counter: RequestId, timeout_time_ms: Option<u64>, timeout_queue: DelayQueue<RequestId>, timeout_id_to_key: HashMap<RequestId, delay_queue::Key>, } impl DispatchEngine { fn new( command_receiver: mpsc::UnboundedReceiver<(CommandPacket, CallbackSender)>, timeout_time_ms: Option<u64>, notify_callbacks: Arc<RwLock<Vec<ReconnectNotifySender>>>, ) -> DispatchEngine
fn send_notify(&mut self, status: &ClientStatus) { let mut guard = self.notify_callbacks.write().unwrap(); let callbacks: &mut Vec<ReconnectNotifySender> = guard.as_mut(); //dirty code - send status to all callbacks and remove dead callbacks let mut i = 0; while i != callbacks.len() { if let Ok(_) = &callbacks[i].unbounded_send(status.clone()) { i = i + 1; } else { callbacks.remove(i); } } } fn try_send_buffered_command(&mut self, sink: &mut SplitSink<TarantoolFramed>) -> bool { if let Some(command) = self.buffered_command.take() { if let Ok(AsyncSink::NotReady(command)) = sink.start_send(command) { //return command to buffer self.buffered_command = Some(command); return false; } } true } fn send_error_to_all(&mut self, error_description: &String) { for (_, callback_sender) in self.awaiting_callbacks.drain() { let _res = callback_sender.send(Err(io::Error::new( io::ErrorKind::Other, error_description.clone(), ))); } self.buffered_command = None; if let Some(_) = self.timeout_time_ms { self.timeout_id_to_key.clear(); self.timeout_queue.clear(); } loop { match self.command_receiver.poll() { Ok(Async::Ready(Some((_, callback_sender)))) => { let _res = callback_sender.send(Err(io::Error::new( io::ErrorKind::Other, error_description.clone(), ))); } _ => break, }; } } fn process_commands(&mut self, sink: &mut SplitSink<TarantoolFramed>) -> Poll<(), ()> { let mut continue_send = self.try_send_buffered_command(sink); while continue_send { continue_send = match self.command_receiver.poll() { Ok(Async::Ready(Some((command_packet, callback_sender)))) => { let request_id = self.increment_command_counter(); self.awaiting_callbacks.insert(request_id, callback_sender); self.buffered_command = Some((request_id, TarantoolRequest::Command(command_packet))); if let Some(timeout_time_ms) = self.timeout_time_ms { let delay_key = self.timeout_queue.insert_at( request_id, Instant::now() + Duration::from_millis(timeout_time_ms), ); self.timeout_id_to_key.insert(request_id, delay_key); } self.try_send_buffered_command(sink) } Ok(Async::Ready(None)) => { //inbound sink is finished. close coroutine return Ok(Async::Ready(())); } _ => false, }; } //skip results of poll complete let _r = sink.poll_complete(); Ok(Async::NotReady) } fn process_tarantool_responses(&mut self, stream: &mut SplitStream<TarantoolFramed>) -> bool { loop { match stream.poll() { Ok(Async::Ready(Some((request_id, command_packet)))) => { debug!("receive command! {} {:?} ", request_id, command_packet); if let Some(_) = self.timeout_time_ms { if let Some(delay_key) = self.timeout_id_to_key.remove(&request_id) { self.timeout_queue.remove(&delay_key); } } self.awaiting_callbacks .remove(&request_id) .map(|sender| sender.send(command_packet)); } Ok(Async::Ready(None)) | Err(_) => { return true; } _ => { return false; } } } } fn process_timeouts(&mut self) { if let Some(_) = self.timeout_time_ms { loop { match self.timeout_queue.poll() { Ok(Async::Ready(Some(request_id_ref))) => { let request_id = request_id_ref.get_ref(); info!("timeout command! {} ", request_id); self.timeout_id_to_key.remove(request_id); if let Some(callback_sender) = self.awaiting_callbacks.remove(request_id) { //don't process result of send let _res = callback_sender .send(Err(io::Error::new(io::ErrorKind::Other, ERROR_TIMEOUT))); } } _ => { return; } } } } } fn increment_command_counter(&mut self) -> RequestId { self.command_counter = self.command_counter + 1; self.command_counter } fn clean_command_counter(&mut self) { self.command_counter = 3; } } pub struct Dispatch { config: ClientConfig, state: DispatchState, engine: DispatchEngine, status: Arc<RwLock<ClientStatus>>, } impl Dispatch { pub fn new( config: ClientConfig, command_receiver: mpsc::UnboundedReceiver<(CommandPacket, CallbackSender)>, status: Arc<RwLock<ClientStatus>>, notify_callbacks: Arc<RwLock<Vec<ReconnectNotifySender>>>, ) -> Dispatch { let timeout_time_ms = config.timeout_time_ms.clone(); Dispatch { state: DispatchState::New, config, engine: DispatchEngine::new(command_receiver, timeout_time_ms, notify_callbacks), status, } } fn update_status(&mut self) { let status_tmp = self.state.get_client_status(); let mut status = self.status.write().unwrap(); *status = status_tmp.clone(); self.engine.send_notify(&status_tmp); } fn get_auth_seq( stream: TcpStream, config: &ClientConfig, ) -> Box<dyn Future<Item = TarantoolFramed, Error = io::Error> + Send> { let login = config.login.clone(); let password = config.password.clone(); Box::new( TarantoolCodec::new() .framed(stream) .into_future() .map_err(|e| e.0) .and_then(|(_first_resp, framed_io)| { framed_io .send((2, TarantoolRequest::Auth(AuthPacket { login, password }))) .into_future() }) .and_then(|framed| framed.into_future().map_err(|e| e.0)) .and_then(|(r, framed_io)| match r { Some((_, Err(e))) => future::err(e), _ => future::ok(framed_io), }), ) } } impl Future for Dispatch { type Item = (); type Error = (); fn poll(&mut self) -> Poll<(), ()> { debug!("poll ! {}", self.state); loop { let new_state = match self.state { DispatchState::New => Some(DispatchState::OnConnect(TcpStream::connect( &self.config.addr, ))), DispatchState::OnReconnect(ref error_description) => { error!("Reconnect! error={}", error_description); self.engine.send_error_to_all(error_description); let delay_future = Delay::new( Instant::now() + Duration::from_millis(self.config.reconnect_time_ms), ); Some(DispatchState::OnSleep( delay_future, error_description.clone(), )) } DispatchState::OnSleep(ref mut delay_future, _) => match delay_future.poll() { Ok(Async::Ready(_)) => Some(DispatchState::New), Ok(Async::NotReady) => None, Err(err) => Some(DispatchState::OnReconnect(err.to_string())), }, DispatchState::OnConnect(ref mut connect_future) => match connect_future.poll() { Ok(Async::Ready(stream)) => Some(DispatchState::OnHandshake( Dispatch::get_auth_seq(stream, &self.config), )), Ok(Async::NotReady) => None, Err(err) => Some(DispatchState::OnReconnect(err.to_string())), }, DispatchState::OnHandshake(ref mut handshake_future) => { match handshake_future.poll() { Ok(Async::Ready(framed)) => { self.engine.clean_command_counter(); Some(DispatchState::OnProcessing(framed.split())) } Ok(Async::NotReady) => None, Err(err) => Some(DispatchState::OnReconnect(err.to_string())), } } DispatchState::OnProcessing((ref mut sink, ref mut stream)) => { match self.engine.process_commands(sink) { Ok(Async::Ready(())) => { // stop client !!! exit from event loop ! return Ok(Async::Ready(())); } _ => {} } if self.engine.process_tarantool_responses(stream) { Some(DispatchState::OnReconnect( ERROR_SERVER_DISCONNECT.to_string(), )) } else { self.engine.process_timeouts(); None } } }; if let Some(new_state_value) = new_state { self.state = new_state_value; self.update_status(); } else { break; } } Ok(Async::NotReady) } }
{ DispatchEngine { command_receiver, buffered_command: None, awaiting_callbacks: HashMap::new(), notify_callbacks, command_counter: 3, timeout_time_ms, timeout_queue: DelayQueue::new(), timeout_id_to_key: HashMap::new(), } }
CM-uni-name-filter.py
#Maral Dadvar #09/01/2019 #This script filters the names with initialls. import unicodedata import os , glob import rdflib from rdflib import Namespace, URIRef, Graph , Literal , OWL, RDFS , RDF from SPARQLWrapper import SPARQLWrapper2, XML , JSON , TURTLE import re import pprint os.chdir('C:\\Users\\Maral\\Desktop') graphout = Graph() foaf = Namespace("http://xmlns.com/foaf/0.1/") rdf = Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") jl = Namespace("http://data.judaicalink.org/ontology/") gndo = Namespace("http://d-nb.info/standards/elementset/gnd#") skos = Namespace("http://www.w3.org/2004/02/skos/core#") dc = Namespace ("http://purl.org/dc/elements/1.1/") edm = Namespace("http://www.europeana.eu/schemas/edm/") graphout.bind('jl', jl) graphout.bind('rdfs',RDFS) graphout.bind('foaf',foaf) graphout.bind('skos',skos) graphout.bind('owl',OWL) graphout.bind('gndo',gndo) graphout.bind('dc',dc) graphout.bind('edm',edm) graph = Graph() graph.parse('C:\\Users\\Maral\\Desktop\\cm-authors-context-GND-uni-02.rdf', format="turtle") spar1= """ PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX gndo: <http://d-nb.info/standards/elementset/gnd#> PREFIX pro: <http://purl.org/hpi/patchr#>
PREFIX skos: <http://www.w3.org/2004/02/skos/core#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX dblp: <http://dblp.org/rdf/schema-2015-01-26#> PREFIX dcterms: <http://purl.org/dc/terms/> PREFIX dbpedia: <http://dbpedia.org/resource/> PREFIX jl: <http://data.judaicalink.org/ontology/> SELECT ?x ?label ?id ?desc ?title ?gnd where { ?x a edm:ProvidedCHO. ?x dc:creator ?label. ?x dc:identifier ?id. ?x dc:description ?desc. ?x dc:title ?title. ?x gndo:gndIdentifier ?gnd. } """ result = graph.query(spar1) for item in result: labels = item[1].value print (labels) if re.search(r'\w{1}\.\s*\w{1}\.',labels): print ('not valid') elif re.search(r'\w{1}\.',labels): print ('not valid') else: graphout.add((URIRef(item[0]), RDF.type , edm.ProvidedCHO )) graphout.add( (URIRef(item[0]), dc.creator , Literal(item[1].value) ) ) graphout.add( (URIRef(item[0]), dc.identifier , Literal(item[2].value) ) ) graphout.add( (URIRef(item[0]), gndo.gndIdentifier , URIRef(item[5]) ) ) graphout.add ((URIRef(item[0]) , dc.description , Literal((item[3].value)))) graphout.add ((URIRef(item[0]) , dc.title , Literal((item[4])))) graphout.serialize(destination = 'cm-uni-names-filtered.ttl' , format="turtle")
PREFIX owl: <http://www.w3.org/2002/07/owl#> PREFIX edm: <http://www.europeana.eu/schemas/edm/> PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX foaf: <http://xmlns.com/foaf/0.1/>
extract_features_16bp.py
from __future__ import division import os, glob import pandas as pd import math import numpy as np from scipy.spatial import ConvexHull import scipy from configparser import ConfigParser, NoOptionError, NoSectionError from numba import jit from simba.rw_dfs import * import re def extract_features_wotarget_16(inifile): config = ConfigParser() configFile = str(inifile) config.read(configFile) projectPath = config.get('General settings', 'project_path') csv_dir_in, csv_dir_out = os.path.join(projectPath, 'csv', 'outlier_corrected_movement_location'), os.path.join(projectPath,'csv', 'features_extracted') vidInfPath = os.path.join(projectPath, 'logs', 'video_info.csv') try: wfileType = config.get('General settings', 'workflow_file_type') except NoOptionError: wfileType = 'csv' vidinfDf = pd.read_csv(vidInfPath) #change videos name to str vidinfDf.Video = vidinfDf.Video.astype('str') def count_values_in_range(series, values_in_range_min, values_in_range_max): return series.between(left=values_in_range_min, right=values_in_range_max).sum() def angle3pt(ax, ay, bx, by, cx, cy): ang = math.degrees( math.atan2(cy - by, cx - bx) - math.atan2(ay - by, ax - bx)) return ang + 360 if ang < 0 else ang @jit(nopython=True, cache=True) def EuclidianDistCald(bp1xVals, bp1yVals, bp2xVals, bp2yVals, currPixPerMM): series = (np.sqrt((bp1xVals - bp2xVals) ** 2 + (bp1yVals - bp2yVals) ** 2)) / currPixPerMM return series roll_windows, loopy = [], 0 roll_windows_values = [2, 5, 6, 7.5, 15] #REMOVE WINDOWS THAT ARE TOO SMALL minimum_fps = vidinfDf['fps'].min() for win in range(len(roll_windows_values)): if minimum_fps < roll_windows_values[win]: roll_windows_values[win] = minimum_fps else: pass roll_windows_values = list(set(roll_windows_values)) ########### FIND CSV FILES ########### print(csv_dir_in) filesFound = glob.glob(csv_dir_in + '/*.' + wfileType) print('Extracting features from ' + str(len(filesFound)) + ' files...') ########### CREATE PD FOR RAW DATA AND PD FOR MOVEMENT BETWEEN FRAMES ########### for currentFile in filesFound: M1_hull_large_euclidean_list, M1_hull_small_euclidean_list, M1_hull_mean_euclidean_list, M1_hull_sum_euclidean_list, M2_hull_large_euclidean_list, M2_hull_small_euclidean_list, M2_hull_mean_euclidean_list, M2_hull_sum_euclidean_list = [], [], [], [], [], [], [], [] currVidName = os.path.basename(currentFile).replace('.' +wfileType, '') # get current pixels/mm currVideoSettings = vidinfDf.loc[vidinfDf['Video'] == currVidName] try: currPixPerMM = float(currVideoSettings['pixels/mm']) except TypeError: print('Error: make sure all the videos that are going to be analyzed are represented in the project_folder/logs/video_info.csv file') fps = float(currVideoSettings['fps']) print('Processing ' + '"' + str(currVidName) + '".' + ' Fps: ' + str(fps) + ". mm/ppx: " + str(currPixPerMM)) for i in range(len(roll_windows_values)): roll_windows.append(int(fps / roll_windows_values[i])) loopy += 1 columnHeaders = ["Ear_left_1_x", "Ear_left_1_y", "Ear_left_1_p", "Ear_right_1_x", "Ear_right_1_y", "Ear_right_1_p", "Nose_1_x", "Nose_1_y", "Nose_1_p", "Center_1_x", "Center_1_y", "Center_1_p", "Lat_left_1_x", "Lat_left_1_y", "Lat_left_1_p", "Lat_right_1_x", "Lat_right_1_y", "Lat_right_1_p", "Tail_base_1_x", "Tail_base_1_y", "Tail_base_1_p", "Tail_end_1_x", "Tail_end_1_y", "Tail_end_1_p", "Ear_left_2_x", "Ear_left_2_y", "Ear_left_2_p", "Ear_right_2_x", "Ear_right_2_y", "Ear_right_2_p", "Nose_2_x", "Nose_2_y", "Nose_2_p", "Center_2_x", "Center_2_y", "Center_2_p", "Lat_left_2_x", "Lat_left_2_y", "Lat_left_2_p", "Lat_right_2_x", "Lat_right_2_y", "Lat_right_2_p", "Tail_base_2_x", "Tail_base_2_y", "Tail_base_2_p", "Tail_end_2_x", "Tail_end_2_y", "Tail_end_2_p"] csv_df = read_df(currentFile, wfileType) try: csv_df = csv_df.set_index('scorer') except KeyError: pass csv_df.columns = columnHeaders csv_df = csv_df.fillna(0) #csv_df = csv_df.drop(csv_df.index[[0]]) csv_df = csv_df.apply(pd.to_numeric) csv_df = csv_df.reset_index() csv_df = csv_df.reset_index(drop=True) print('Evaluating convex hulls...') ########### MOUSE AREAS ########################################### try: csv_df['Mouse_1_poly_area'] = csv_df.apply(lambda x: ConvexHull(np.array( [[x['Ear_left_1_x'], x["Ear_left_1_y"]], [x['Ear_right_1_x'], x["Ear_right_1_y"]], [x['Nose_1_x'], x["Nose_1_y"]], [x['Lat_left_1_x'], x["Lat_left_1_y"]], \ [x['Lat_right_1_x'], x["Lat_right_1_y"]], [x['Tail_base_1_x'], x["Tail_base_1_y"]], [x['Center_1_x'], x["Center_1_y"]]])).area, axis=1) except scipy.spatial.qhull.QhullError as e: print(e) print('ERROR: For more information, go to https://github.com/sgoldenlab/simba/blob/SimBA_no_TF/docs/FAQ.md#i-get-a-qhull-eg-qh6154-or-6013-error-when-extracting-the-features') csv_df['Mouse_1_poly_area'] = csv_df.eval('Mouse_1_poly_area / @currPixPerMM') try: csv_df['Mouse_2_poly_area'] = csv_df.apply(lambda x: ConvexHull(np.array( [[x['Ear_left_2_x'], x["Ear_left_2_y"]], [x['Ear_right_2_x'], x["Ear_right_2_y"]], [x['Nose_2_x'], x["Nose_2_y"]], [x['Lat_left_2_x'], x["Lat_left_2_y"]], \ [x['Lat_right_2_x'], x["Lat_right_2_y"]], [x['Tail_base_2_x'], x["Tail_base_2_y"]], [x['Center_2_x'], x["Center_2_y"]]])).area, axis=1) except scipy.spatial.qhull.QhullError as e: print(e) print('ERROR: For more information, check https://github.com/sgoldenlab/simba/blob/SimBA_no_TF/docs/FAQ.md#i-get-a-qhull-eg-qh6154-or-6013-error-when-extracting-the-features') ########### CREATE SHIFTED DATAFRAME FOR DISTANCE CALCULATIONS ########################################### csv_df_shifted = csv_df.shift(periods=1) csv_df_shifted = csv_df_shifted.rename( columns={'Ear_left_1_x': 'Ear_left_1_x_shifted', 'Ear_left_1_y': 'Ear_left_1_y_shifted', 'Ear_left_1_p': 'Ear_left_1_p_shifted', 'Ear_right_1_x': 'Ear_right_1_x_shifted', \ 'Ear_right_1_y': 'Ear_right_1_y_shifted', 'Ear_right_1_p': 'Ear_right_1_p_shifted', 'Nose_1_x': 'Nose_1_x_shifted', 'Nose_1_y': 'Nose_1_y_shifted', \ 'Nose_1_p': 'Nose_1_p_shifted', 'Center_1_x': 'Center_1_x_shifted', 'Center_1_y': 'Center_1_y_shifted', 'Center_1_p': 'Center_1_p_shifted', 'Lat_left_1_x': \ 'Lat_left_1_x_shifted', 'Lat_left_1_y': 'Lat_left_1_y_shifted', 'Lat_left_1_p': 'Lat_left_1_p_shifted', 'Lat_right_1_x': 'Lat_right_1_x_shifted', 'Lat_right_1_y': 'Lat_right_1_y_shifted', \ 'Lat_right_1_p': 'Lat_right_1_p_shifted', 'Tail_base_1_x': 'Tail_base_1_x_shifted', 'Tail_base_1_y': 'Tail_base_1_y_shifted', \ 'Tail_base_1_p': 'Tail_base_1_p_shifted', 'Tail_end_1_x': 'Tail_end_1_x_shifted', 'Tail_end_1_y': 'Tail_end_1_y_shifted', 'Tail_end_1_p': 'Tail_end_1_p_shifted', 'Ear_left_2_x': 'Ear_left_2_x_shifted', 'Ear_left_2_y': 'Ear_left_2_y_shifted', 'Ear_left_2_p': 'Ear_left_2_p_shifted', 'Ear_right_2_x': 'Ear_right_2_x_shifted', \ 'Ear_right_2_y': 'Ear_right_2_y_shifted', 'Ear_right_2_p': 'Ear_right_2_p_shifted', 'Nose_2_x': 'Nose_2_x_shifted', 'Nose_2_y': 'Nose_2_y_shifted', \ 'Nose_2_p': 'Nose_2_p_shifted', 'Center_2_x': 'Center_2_x_shifted', 'Center_2_y': 'Center_2_y_shifted', 'Center_2_p': 'Center_2_p_shifted', 'Lat_left_2_x': \ 'Lat_left_2_x_shifted', 'Lat_left_2_y': 'Lat_left_2_y_shifted', 'Lat_left_2_p': 'Lat_left_2_p_shifted', 'Lat_right_2_x': 'Lat_right_2_x_shifted', 'Lat_right_2_y': 'Lat_right_2_y_shifted', \ 'Lat_right_2_p': 'Lat_right_2_p_shifted', 'Tail_base_2_x': 'Tail_base_2_x_shifted', 'Tail_base_2_y': 'Tail_base_2_y_shifted', \ 'Tail_base_2_p': 'Tail_base_2_p_shifted', 'Tail_end_2_x': 'Tail_end_2_x_shifted', 'Tail_end_2_y': 'Tail_end_2_y_shifted', 'Tail_end_2_p': 'Tail_end_2_p_shifted', 'Mouse_1_poly_area': 'Mouse_1_poly_area_shifted', 'Mouse_2_poly_area': 'Mouse_2_poly_area_shifted'}) csv_df_combined = pd.concat([csv_df, csv_df_shifted], axis=1, join='inner') csv_df_combined = csv_df_combined.fillna(0) csv_df_combined = csv_df_combined.reset_index(drop=True) print('Calculating euclidean distances...') ########### EUCLIDEAN DISTANCES ########################################### csv_df['Mouse_1_nose_to_tail'] = EuclidianDistCald(csv_df['Nose_1_x'].values, csv_df['Nose_1_y'].values, csv_df['Tail_base_1_x'].values, csv_df['Tail_base_1_y'].values, currPixPerMM) csv_df['Mouse_2_nose_to_tail'] = EuclidianDistCald(csv_df['Nose_2_x'].values, csv_df['Nose_2_y'].values, csv_df['Tail_base_2_x'].values, csv_df['Tail_base_2_y'].values, currPixPerMM) csv_df['Mouse_1_width'] = EuclidianDistCald(csv_df['Lat_left_1_x'].values, csv_df['Lat_left_1_y'].values, csv_df['Lat_right_1_x'].values, csv_df['Lat_right_1_y'].values, currPixPerMM) csv_df['Mouse_2_width'] = EuclidianDistCald(csv_df['Lat_left_2_x'].values, csv_df['Lat_left_2_y'].values, csv_df['Lat_right_2_x'].values, csv_df['Lat_right_2_y'].values, currPixPerMM) csv_df['Mouse_1_Ear_distance'] = EuclidianDistCald(csv_df['Ear_left_1_x'].values, csv_df['Ear_left_1_y'].values, csv_df['Ear_right_1_x'].values, csv_df['Ear_right_1_y'].values, currPixPerMM) csv_df['Mouse_2_Ear_distance'] = EuclidianDistCald(csv_df['Ear_left_2_x'].values, csv_df['Ear_left_2_y'].values, csv_df['Ear_right_2_x'].values, csv_df['Ear_right_2_y'].values, currPixPerMM) csv_df['Mouse_1_Nose_to_centroid'] = EuclidianDistCald(csv_df['Nose_1_x'].values, csv_df['Nose_1_y'].values, csv_df['Center_1_x'].values, csv_df['Center_1_y'].values, currPixPerMM) csv_df['Mouse_2_Nose_to_centroid'] = EuclidianDistCald(csv_df['Nose_2_x'].values, csv_df['Nose_2_y'].values,csv_df['Center_2_x'].values, csv_df['Center_2_y'].values, currPixPerMM) csv_df['Mouse_1_Nose_to_lateral_left'] = EuclidianDistCald(csv_df['Nose_1_x'].values, csv_df['Nose_1_y'].values,csv_df['Lat_left_1_x'].values, csv_df['Lat_left_1_y'].values, currPixPerMM) csv_df['Mouse_2_Nose_to_lateral_left'] = EuclidianDistCald(csv_df['Nose_2_x'].values, csv_df['Nose_2_y'].values,csv_df['Lat_left_2_x'].values, csv_df['Lat_left_2_y'].values, currPixPerMM) csv_df['Mouse_1_Nose_to_lateral_right'] = EuclidianDistCald(csv_df['Nose_1_x'].values, csv_df['Nose_1_y'].values,csv_df['Lat_right_1_x'].values, csv_df['Lat_right_1_y'].values, currPixPerMM) csv_df['Mouse_2_Nose_to_lateral_right'] = EuclidianDistCald(csv_df['Nose_2_x'].values, csv_df['Nose_2_y'].values,csv_df['Lat_right_2_x'].values, csv_df['Lat_right_2_y'].values, currPixPerMM) csv_df['Mouse_1_Centroid_to_lateral_left'] = EuclidianDistCald(csv_df['Center_1_x'].values, csv_df['Center_1_y'].values,csv_df['Lat_left_1_x'].values, csv_df['Lat_left_1_y'].values, currPixPerMM) csv_df['Mouse_2_Centroid_to_lateral_left'] = EuclidianDistCald(csv_df['Center_2_x'].values, csv_df['Center_2_y'].values,csv_df['Lat_left_2_x'].values, csv_df['Lat_left_2_y'].values, currPixPerMM) csv_df['Mouse_1_Centroid_to_lateral_right'] = EuclidianDistCald(csv_df['Center_1_x'].values, csv_df['Center_1_y'].values,csv_df['Lat_right_1_x'].values, csv_df['Lat_right_1_y'].values, currPixPerMM) csv_df['Mouse_2_Centroid_to_lateral_right'] = EuclidianDistCald(csv_df['Center_2_x'].values, csv_df['Center_2_y'].values,csv_df['Lat_right_2_x'].values, csv_df['Lat_right_2_y'].values, currPixPerMM) csv_df['Centroid_distance'] = EuclidianDistCald(csv_df['Center_2_x'].values, csv_df['Center_2_y'].values,csv_df['Center_1_x'].values, csv_df['Center_1_y'].values, currPixPerMM) csv_df['Nose_to_nose_distance'] = EuclidianDistCald(csv_df['Nose_1_x'].values, csv_df['Nose_1_y'].values,csv_df['Nose_2_x'].values, csv_df['Nose_2_y'].values, currPixPerMM) csv_df['M1_Nose_to_M2_lat_left'] = EuclidianDistCald(csv_df['Nose_1_x'].values, csv_df['Nose_1_y'].values,csv_df['Lat_left_2_x'].values, csv_df['Lat_left_2_y'].values, currPixPerMM) csv_df['M1_Nose_to_M2_lat_right'] = EuclidianDistCald(csv_df['Nose_1_x'].values, csv_df['Nose_1_y'].values,csv_df['Lat_right_2_x'].values, csv_df['Lat_right_2_y'].values, currPixPerMM) csv_df['M2_Nose_to_M1_lat_left'] = EuclidianDistCald(csv_df['Nose_2_x'].values, csv_df['Nose_2_y'].values,csv_df['Lat_left_1_x'].values, csv_df['Lat_left_1_y'].values, currPixPerMM) csv_df['M2_Nose_to_M1_lat_right'] = EuclidianDistCald(csv_df['Nose_2_x'].values, csv_df['Nose_2_y'].values,csv_df['Lat_right_1_x'].values, csv_df['Lat_right_1_y'].values, currPixPerMM) csv_df['M1_Nose_to_M2_tail_base'] = EuclidianDistCald(csv_df['Nose_1_x'].values, csv_df['Nose_1_y'].values,csv_df['Tail_base_2_x'].values, csv_df['Tail_base_2_y'].values, currPixPerMM) csv_df['M2_Nose_to_M1_tail_base'] = EuclidianDistCald(csv_df['Nose_2_x'].values, csv_df['Nose_2_y'].values,csv_df['Tail_base_1_x'].values, csv_df['Tail_base_1_y'].values, currPixPerMM) csv_df['Movement_mouse_1_centroid'] = EuclidianDistCald(csv_df_combined['Center_1_x_shifted'].values, csv_df_combined['Center_1_y_shifted'].values,csv_df_combined['Center_1_x'].values, csv_df_combined['Center_1_y'].values, currPixPerMM) csv_df['Movement_mouse_2_centroid'] = EuclidianDistCald(csv_df_combined['Center_2_x_shifted'].values, csv_df_combined['Center_2_y_shifted'].values,csv_df_combined['Center_2_x'].values, csv_df_combined['Center_2_y'].values, currPixPerMM) csv_df['Movement_mouse_1_nose'] = EuclidianDistCald(csv_df_combined['Nose_1_x_shifted'].values, csv_df_combined['Nose_1_y_shifted'].values,csv_df_combined['Nose_1_x'].values, csv_df_combined['Nose_1_y'].values, currPixPerMM) csv_df['Movement_mouse_2_nose'] = EuclidianDistCald(csv_df_combined['Nose_2_x_shifted'].values, csv_df_combined['Nose_2_y_shifted'].values,csv_df_combined['Nose_2_x'].values, csv_df_combined['Nose_2_y'].values, currPixPerMM) csv_df['Movement_mouse_1_tail_base'] = EuclidianDistCald(csv_df_combined['Tail_base_1_x_shifted'].values, csv_df_combined['Tail_base_1_y_shifted'].values,csv_df_combined['Tail_base_1_x'].values, csv_df_combined['Tail_base_1_y'].values, currPixPerMM) csv_df['Movement_mouse_2_tail_base'] = EuclidianDistCald(csv_df_combined['Tail_base_2_x_shifted'].values, csv_df_combined['Tail_base_2_y_shifted'].values,csv_df_combined['Tail_base_2_x'].values, csv_df_combined['Tail_base_2_y'].values, currPixPerMM) csv_df['Movement_mouse_1_tail_end'] = EuclidianDistCald(csv_df_combined['Tail_end_1_x_shifted'].values, csv_df_combined['Tail_end_1_y_shifted'].values,csv_df_combined['Tail_end_1_x'].values, csv_df_combined['Tail_end_1_y'].values, currPixPerMM) csv_df['Movement_mouse_2_tail_end'] = EuclidianDistCald(csv_df_combined['Tail_end_2_x_shifted'].values, csv_df_combined['Tail_end_2_y_shifted'].values,csv_df_combined['Tail_end_2_x'].values, csv_df_combined['Tail_end_2_y'].values, currPixPerMM) csv_df['Movement_mouse_1_left_ear'] = EuclidianDistCald(csv_df_combined['Ear_left_1_x_shifted'].values, csv_df_combined['Ear_left_1_y_shifted'].values,csv_df_combined['Ear_left_1_x'].values, csv_df_combined['Ear_left_1_y'].values, currPixPerMM) csv_df['Movement_mouse_2_left_ear'] = EuclidianDistCald(csv_df_combined['Ear_left_2_x_shifted'].values, csv_df_combined['Ear_left_2_y_shifted'].values,csv_df_combined['Ear_left_2_x'].values, csv_df_combined['Ear_left_2_y'].values, currPixPerMM) csv_df['Movement_mouse_1_right_ear'] = EuclidianDistCald(csv_df_combined['Ear_right_1_x_shifted'].values, csv_df_combined['Ear_right_1_y_shifted'].values,csv_df_combined['Ear_right_1_x'].values, csv_df_combined['Ear_right_1_y'].values, currPixPerMM) csv_df['Movement_mouse_2_right_ear'] = EuclidianDistCald(csv_df_combined['Ear_right_2_x_shifted'].values, csv_df_combined['Ear_right_2_y_shifted'].values,csv_df_combined['Ear_right_2_x'].values, csv_df_combined['Ear_right_2_y'].values, currPixPerMM) csv_df['Movement_mouse_1_lateral_left'] = EuclidianDistCald(csv_df_combined['Lat_left_1_x_shifted'].values, csv_df_combined['Lat_left_1_y_shifted'].values,csv_df_combined['Lat_left_1_x'].values, csv_df_combined['Lat_left_1_y'].values, currPixPerMM) csv_df['Movement_mouse_2_lateral_left'] = EuclidianDistCald(csv_df_combined['Lat_left_2_x_shifted'].values, csv_df_combined['Lat_left_2_y_shifted'].values,csv_df_combined['Lat_left_2_x'].values, csv_df_combined['Lat_left_2_y'].values, currPixPerMM) csv_df['Movement_mouse_1_lateral_right'] = EuclidianDistCald(csv_df_combined['Lat_right_1_x_shifted'].values, csv_df_combined['Lat_right_1_y_shifted'].values,csv_df_combined['Lat_right_1_x'].values, csv_df_combined['Lat_right_1_y'].values, currPixPerMM) csv_df['Movement_mouse_2_lateral_right'] = EuclidianDistCald(csv_df_combined['Lat_right_2_x_shifted'].values, csv_df_combined['Lat_right_2_y_shifted'].values,csv_df_combined['Lat_right_2_x'].values, csv_df_combined['Lat_right_2_y'].values, currPixPerMM) csv_df['Mouse_1_polygon_size_change'] = pd.eval("csv_df_combined.Mouse_1_poly_area_shifted - csv_df_combined.Mouse_1_poly_area") csv_df['Mouse_2_polygon_size_change'] = pd.eval("csv_df_combined.Mouse_2_poly_area_shifted - csv_df_combined.Mouse_2_poly_area") print('Calculating hull variables...') ########### HULL - EUCLIDEAN DISTANCES ########################################### for index, row in csv_df.iterrows(): M1_np_array = np.array( [[row['Ear_left_1_x'], row["Ear_left_1_y"]], [row['Ear_right_1_x'], row["Ear_right_1_y"]], [row['Nose_1_x'], row["Nose_1_y"]], [row['Center_1_x'], row["Center_1_y"]], [row['Lat_left_1_x'], row["Lat_left_1_y"]], [row['Lat_right_1_x'], row["Lat_right_1_y"]], [row['Tail_base_1_x'], row["Tail_base_1_y"]]]).astype(int) M2_np_array = np.array( [[row['Ear_left_2_x'], row["Ear_left_2_y"]], [row['Ear_right_2_x'], row["Ear_right_2_y"]], [row['Nose_2_x'], row["Nose_2_y"]], [row['Center_2_x'], row["Center_2_y"]], [row['Lat_left_2_x'], row["Lat_left_2_y"]], [row['Lat_right_2_x'], row["Lat_right_2_y"]], [row['Tail_base_2_x'], row["Tail_base_2_y"]]]).astype(int) M1_dist_euclidean = scipy.spatial.distance.cdist(M1_np_array, M1_np_array, metric='euclidean') M1_dist_euclidean = M1_dist_euclidean[M1_dist_euclidean != 0] M1_hull_large_euclidean = np.amax(M1_dist_euclidean) M1_hull_small_euclidean = np.min(M1_dist_euclidean) M1_hull_mean_euclidean = np.mean(M1_dist_euclidean) M1_hull_sum_euclidean = np.sum(M1_dist_euclidean) M1_hull_large_euclidean_list.append(M1_hull_large_euclidean) M1_hull_small_euclidean_list.append(M1_hull_small_euclidean) M1_hull_mean_euclidean_list.append(M1_hull_mean_euclidean) M1_hull_sum_euclidean_list.append(M1_hull_sum_euclidean) M2_dist_euclidean = scipy.spatial.distance.cdist(M2_np_array, M2_np_array, metric='euclidean') M2_dist_euclidean = M2_dist_euclidean[M2_dist_euclidean != 0] M2_hull_large_euclidean = np.amax(M2_dist_euclidean) M2_hull_small_euclidean = np.min(M2_dist_euclidean) M2_hull_mean_euclidean = np.mean(M2_dist_euclidean) M2_hull_sum_euclidean = np.sum(M2_dist_euclidean) M2_hull_large_euclidean_list.append(M2_hull_large_euclidean) M2_hull_small_euclidean_list.append(M2_hull_small_euclidean) M2_hull_mean_euclidean_list.append(M2_hull_mean_euclidean) M2_hull_sum_euclidean_list.append(M2_hull_sum_euclidean) csv_df['M1_largest_euclidean_distance_hull'] = list(map(lambda x: x / currPixPerMM, M1_hull_large_euclidean_list)) csv_df['M1_smallest_euclidean_distance_hull'] = list(map(lambda x: x / currPixPerMM, M1_hull_small_euclidean_list)) csv_df['M1_mean_euclidean_distance_hull'] = list(map(lambda x: x / currPixPerMM, M1_hull_mean_euclidean_list)) csv_df['M1_sum_euclidean_distance_hull'] = list(map(lambda x: x / currPixPerMM, M1_hull_sum_euclidean_list)) csv_df['M2_largest_euclidean_distance_hull'] = list(map(lambda x: x / currPixPerMM, M2_hull_large_euclidean_list)) csv_df['M2_smallest_euclidean_distance_hull'] = list(map(lambda x: x / currPixPerMM, M2_hull_small_euclidean_list)) csv_df['M2_mean_euclidean_distance_hull'] = list(map(lambda x: x / currPixPerMM, M2_hull_mean_euclidean_list)) csv_df['M2_sum_euclidean_distance_hull'] = list(map(lambda x: x / currPixPerMM, M2_hull_sum_euclidean_list)) csv_df['Sum_euclidean_distance_hull_M1_M2'] = (csv_df['M1_sum_euclidean_distance_hull'] + csv_df['M2_sum_euclidean_distance_hull']) ########### COLLAPSED MEASURES ########################################### csv_df['Total_movement_centroids'] = csv_df.eval("Movement_mouse_1_centroid + Movement_mouse_2_centroid") csv_df['Total_movement_tail_ends'] = csv_df.eval('Movement_mouse_1_tail_end + Movement_mouse_2_tail_end') csv_df['Total_movement_all_bodyparts_M1'] = csv_df.eval('Movement_mouse_1_nose + Movement_mouse_1_tail_end + Movement_mouse_1_tail_base + Movement_mouse_1_left_ear + Movement_mouse_1_right_ear + Movement_mouse_1_lateral_left + Movement_mouse_1_lateral_right') csv_df['Total_movement_all_bodyparts_M2'] = csv_df.eval('Movement_mouse_2_nose + Movement_mouse_2_tail_end + Movement_mouse_2_tail_base + Movement_mouse_2_left_ear + Movement_mouse_2_right_ear + Movement_mouse_2_lateral_left + Movement_mouse_2_lateral_right') csv_df['Total_movement_all_bodyparts_both_mice'] = csv_df.eval('Total_movement_all_bodyparts_M1 + Total_movement_all_bodyparts_M2') ########### CALC ROLLING WINDOWS MEDIANS AND MEANS ########################################### print('Calculating rolling windows: medians, medians, and sums...') for i in range(len(roll_windows_values)): currentColName = 'Sum_euclid_distances_hull_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Sum_euclidean_distance_hull_M1_M2'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Sum_euclid_distances_hull_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Sum_euclidean_distance_hull_M1_M2'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Sum_euclid_distances_hull_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Sum_euclidean_distance_hull_M1_M2'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Movement_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Total_movement_centroids'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Movement_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Total_movement_centroids'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Movement_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Total_movement_centroids'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Distance_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Centroid_distance'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Distance_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Centroid_distance'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Distance_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Centroid_distance'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Mouse1_width_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Mouse_1_width'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Mouse1_width_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Mouse_1_width'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Mouse1_width_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Mouse_1_width'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Mouse2_width_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Mouse_2_width'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Mouse2_width_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Mouse_2_width'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Mouse2_width_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Mouse_2_width'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Mouse1_mean_euclid_distances_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M1_mean_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Mouse1_mean_euclid_distances_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M1_mean_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Mouse1_mean_euclid_distances_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M1_mean_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Mouse2_mean_euclid_distances_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M2_mean_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Mouse2_mean_euclid_distances_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M2_mean_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Mouse2_mean_euclid_distances_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M2_mean_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Mouse1_smallest_euclid_distances_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M1_smallest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Mouse1_smallest_euclid_distances_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M1_smallest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Mouse1_smallest_euclid_distances_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M1_smallest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Mouse2_smallest_euclid_distances_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M2_smallest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Mouse2_smallest_euclid_distances_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M2_smallest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Mouse2_smallest_euclid_distances_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M2_smallest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Mouse1_largest_euclid_distances_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M1_largest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Mouse1_largest_euclid_distances_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M1_largest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Mouse1_largest_euclid_distances_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M1_largest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Mouse2_largest_euclid_distances_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M2_largest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Mouse2_largest_euclid_distances_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M2_largest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Mouse2_largest_euclid_distances_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['M2_largest_euclidean_distance_hull'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Total_movement_all_bodyparts_both_mice_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Total_movement_all_bodyparts_both_mice'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Total_movement_all_bodyparts_both_mice_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Total_movement_all_bodyparts_both_mice'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Total_movement_all_bodyparts_both_mice_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Total_movement_all_bodyparts_both_mice'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Total_movement_centroids_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Total_movement_centroids'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Total_movement_centroids_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Total_movement_centroids'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Total_movement_centroids_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Total_movement_centroids'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Tail_base_movement_M1_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_tail_base'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Tail_base_movement_M1_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_tail_base'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Tail_base_movement_M1_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_tail_base'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Tail_base_movement_M2_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_tail_base'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Tail_base_movement_M2_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_tail_base'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Tail_base_movement_M2_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_tail_base'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Centroid_movement_M1_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_centroid'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Centroid_movement_M1_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_centroid'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Centroid_movement_M1_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_centroid'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Centroid_movement_M2_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_centroid'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Centroid_movement_M2_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_centroid'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Centroid_movement_M2_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_centroid'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Tail_end_movement_M1_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_tail_end'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Tail_end_movement_M1_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_tail_end'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Tail_end_movement_M1_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_tail_end'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Tail_end_movement_M2_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_tail_end'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Tail_end_movement_M2_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_tail_end'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Tail_end_movement_M2_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_tail_end'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Nose_movement_M1_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_nose'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Nose_movement_M1_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_nose'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Nose_movement_M1_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_1_nose'].rolling(roll_windows[i], min_periods=1).sum() for i in range(len(roll_windows_values)): currentColName = 'Nose_movement_M2_median_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_nose'].rolling(roll_windows[i], min_periods=1).median() currentColName = 'Nose_movement_M2_mean_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_nose'].rolling(roll_windows[i], min_periods=1).mean() currentColName = 'Nose_movement_M2_sum_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Movement_mouse_2_nose'].rolling(roll_windows[i], min_periods=1).sum() ########### BODY PARTS RELATIVE TO EACH OTHER ################## csv_df['Tail_end_relative_to_tail_base_centroid_nose'] = csv_df['Movement_mouse_1_tail_end'] - ( csv_df['Movement_mouse_1_tail_base'] + csv_df['Movement_mouse_1_centroid'] + csv_df[ 'Movement_mouse_1_nose']) for i in range(len(roll_windows_values)): currentColName_M1 = 'Tail_end_relative_to_tail_base_centroid_nose_M1_' + str(roll_windows_values[i]) tail_end_col_name = 'Tail_end_movement_M1_mean_' + str(roll_windows_values[i]) tail_base_col_name = 'Tail_base_movement_M1_mean_' + str(roll_windows_values[i]) centroid_col_name = 'Centroid_movement_M1_mean_' + str(roll_windows_values[i]) nose_col_name = 'Nose_movement_M1_mean_' + str(roll_windows_values[i]) currentColName_M2 = 'Tail_end_relative_to_tail_base_centroid_nose_M2_mean_' + str(roll_windows_values[i]) tail_end_col_name_M2 = 'Tail_end_movement_M2_mean_' + str(roll_windows_values[i]) tail_base_col_name_M2 = 'Tail_base_movement_M2_mean_' + str(roll_windows_values[i]) centroid_col_name_M2 = 'Centroid_movement_M2_mean_' + str(roll_windows_values[i]) nose_col_name_M2 = 'Nose_movement_M2_mean_' + str(roll_windows_values[i]) csv_df[currentColName_M1] = csv_df[tail_end_col_name] - ( csv_df[tail_base_col_name] + csv_df[centroid_col_name] + csv_df[nose_col_name]) csv_df[currentColName_M2] = csv_df[tail_end_col_name_M2] - ( csv_df[tail_base_col_name_M2] + csv_df[centroid_col_name_M2] + csv_df[nose_col_name_M2]) ########### ANGLES ########################################### print('Calculating angles...') csv_df['Mouse_1_angle'] = csv_df.apply( lambda x: angle3pt(x['Nose_1_x'], x['Nose_1_y'], x['Center_1_x'], x['Center_1_y'], x['Tail_base_1_x'], x['Tail_base_1_y']), axis=1) csv_df['Mouse_2_angle'] = csv_df.apply( lambda x: angle3pt(x['Nose_2_x'], x['Nose_2_y'], x['Center_2_x'], x['Center_2_y'], x['Tail_base_2_x'], x['Tail_base_2_y']), axis=1) csv_df['Total_angle_both_mice'] = csv_df['Mouse_1_angle'] + csv_df['Mouse_2_angle'] for i in range(len(roll_windows_values)): currentColName = 'Total_angle_both_mice_' + str(roll_windows_values[i]) csv_df[currentColName] = csv_df['Total_angle_both_mice'].rolling(roll_windows[i], min_periods=1).sum() ########### DEVIATIONS ########################################### print('Calculating deviations...') csv_df['Total_movement_all_bodyparts_both_mice_deviation'] = csv_df.eval('Total_movement_all_bodyparts_both_mice.mean() - Total_movement_all_bodyparts_both_mice') csv_df['Sum_euclid_distances_hull_deviation'] = csv_df.eval('Sum_euclidean_distance_hull_M1_M2.mean() - Sum_euclidean_distance_hull_M1_M2') csv_df['M1_smallest_euclid_distances_hull_deviation'] = csv_df.eval('M1_smallest_euclidean_distance_hull.mean() - M1_smallest_euclidean_distance_hull') csv_df['M1_largest_euclid_distances_hull_deviation'] = csv_df.eval('M1_largest_euclidean_distance_hull.mean() - M1_largest_euclidean_distance_hull') csv_df['M1_mean_euclid_distances_hull_deviation'] = csv_df.eval('M1_mean_euclidean_distance_hull.mean() - M1_mean_euclidean_distance_hull') csv_df['Centroid_distance_deviation'] = csv_df.eval('Centroid_distance.mean() - Centroid_distance') csv_df['Total_angle_both_mice_deviation'] = csv_df.eval('Total_angle_both_mice - Total_angle_both_mice') csv_df['Movement_mouse_1_deviation_centroid'] = csv_df.eval('Movement_mouse_1_centroid.mean() - Movement_mouse_1_centroid') csv_df['Movement_mouse_2_deviation_centroid'] = csv_df.eval('Movement_mouse_2_centroid.mean() - Movement_mouse_2_centroid') csv_df['Mouse_1_polygon_deviation'] = csv_df.eval('Mouse_1_poly_area.mean() - Mouse_1_poly_area') csv_df['Mouse_2_polygon_deviation'] = csv_df.eval('Mouse_2_poly_area.mean() - Mouse_2_poly_area') for i in roll_windows_values: currentColName = 'Total_movement_all_bodyparts_both_mice_mean_' + str(i) currentDev_colName = currentColName + '_deviation' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Sum_euclid_distances_hull_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_deviation' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Mouse1_smallest_euclid_distances_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_deviation' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Mouse1_largest_euclid_distances_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_deviation' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName])
currentDev_colName = currentColName + '_deviation' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Movement_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_deviation' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Distance_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_deviation' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Total_angle_both_mice_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_deviation' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) ########### PERCENTILE RANK ########################################### print('Calculating percentile ranks...') csv_df['Movement_percentile_rank'] = csv_df['Total_movement_centroids'].rank(pct=True) csv_df['Distance_percentile_rank'] = csv_df['Centroid_distance'].rank(pct=True) csv_df['Movement_mouse_1_percentile_rank'] = csv_df['Movement_mouse_1_centroid'].rank(pct=True) csv_df['Movement_mouse_2_percentile_rank'] = csv_df['Movement_mouse_1_centroid'].rank(pct=True) csv_df['Movement_mouse_1_deviation_percentile_rank'] = csv_df['Movement_mouse_1_deviation_centroid'].rank( pct=True) csv_df['Movement_mouse_2_deviation_percentile_rank'] = csv_df['Movement_mouse_2_deviation_centroid'].rank( pct=True) csv_df['Centroid_distance_percentile_rank'] = csv_df['Centroid_distance'].rank(pct=True) csv_df['Centroid_distance_deviation_percentile_rank'] = csv_df['Centroid_distance_deviation'].rank(pct=True) for i in range(len(roll_windows_values)): currentColName = 'Total_movement_all_bodyparts_both_mice_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_percentile_rank' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Sum_euclid_distances_hull_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_percentile_rank' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Mouse1_mean_euclid_distances_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_percentile_rank' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Mouse1_smallest_euclid_distances_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_percentile_rank' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Mouse1_largest_euclid_distances_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_percentile_rank' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Movement_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_percentile_rank' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) for i in range(len(roll_windows_values)): currentColName = 'Distance_mean_' + str(roll_windows_values[i]) currentDev_colName = currentColName + '_percentile_rank' csv_df[currentDev_colName] = (csv_df[currentColName].mean() - csv_df[currentColName]) ########### CALCULATE STRAIGHTNESS OF POLYLINE PATH: tortuosity ########################################### print('Calculating path tortuosities...') as_strided = np.lib.stride_tricks.as_strided win_size = 3 centroidList_Mouse1_x = as_strided(csv_df.Center_1_x, (len(csv_df) - (win_size - 1), win_size), (csv_df.Center_1_x.values.strides * 2)) centroidList_Mouse1_y = as_strided(csv_df.Center_1_y, (len(csv_df) - (win_size - 1), win_size), (csv_df.Center_1_y.values.strides * 2)) centroidList_Mouse2_x = as_strided(csv_df.Center_2_x, (len(csv_df) - (win_size - 1), win_size), (csv_df.Center_2_x.values.strides * 2)) centroidList_Mouse2_y = as_strided(csv_df.Center_2_y, (len(csv_df) - (win_size - 1), win_size), (csv_df.Center_2_y.values.strides * 2)) for k in range(len(roll_windows_values)): start = 0 end = start + int(roll_windows_values[k]) tortuosity_M1 = [] tortuosity_M2 = [] for y in range(len(csv_df)): tortuosity_List_M1 = [] tortuosity_List_M2 = [] CurrCentroidList_Mouse1_x = centroidList_Mouse1_x[start:end] CurrCentroidList_Mouse1_y = centroidList_Mouse1_y[start:end] CurrCentroidList_Mouse2_x = centroidList_Mouse2_x[start:end] CurrCentroidList_Mouse2_y = centroidList_Mouse2_y[start:end] for i in range(len(CurrCentroidList_Mouse1_x)): currMovementAngle_mouse1 = ( angle3pt(CurrCentroidList_Mouse1_x[i][0], CurrCentroidList_Mouse1_y[i][0], CurrCentroidList_Mouse1_x[i][1], CurrCentroidList_Mouse1_y[i][1], CurrCentroidList_Mouse1_x[i][2], CurrCentroidList_Mouse1_y[i][2])) currMovementAngle_mouse2 = ( angle3pt(CurrCentroidList_Mouse2_x[i][0], CurrCentroidList_Mouse2_y[i][0], CurrCentroidList_Mouse2_x[i][1], CurrCentroidList_Mouse2_y[i][1], CurrCentroidList_Mouse2_x[i][2], CurrCentroidList_Mouse2_y[i][2])) tortuosity_List_M1.append(currMovementAngle_mouse1) tortuosity_List_M2.append(currMovementAngle_mouse2) tortuosity_M1.append(sum(tortuosity_List_M1) / (2 * math.pi)) tortuosity_M2.append(sum(tortuosity_List_M2) / (2 * math.pi)) start += 1 end += 1 currentColName1 = str('Tortuosity_Mouse1_') + str(roll_windows_values[k]) #currentColName2 = str('Tortuosity_Mouse2_') + str(roll_windows_values[k]) csv_df[currentColName1] = tortuosity_M1 #csv_df[currentColName2] = tortuosity_M2 ########### CALC THE NUMBER OF LOW PROBABILITY DETECTIONS & TOTAL PROBABILITY VALUE FOR ROW########################################### print('Calculating pose probability scores...') csv_df['Sum_probabilities'] = csv_df.eval('Ear_left_1_p + Ear_right_1_p + Nose_1_p + Center_1_p + Lat_left_1_p + Lat_right_1_p + Tail_base_1_p + Tail_end_1_p + Ear_left_2_p + Ear_right_2_p + Nose_2_p + Center_2_p + Lat_left_2_p + Lat_right_2_p + Tail_base_2_p + Tail_end_2_p') csv_df['Sum_probabilities_deviation'] = csv_df.eval('Sum_probabilities.mean() - Sum_probabilities') csv_df['Sum_probabilities_deviation_percentile_rank'] = csv_df['Sum_probabilities_deviation'].rank(pct=True) csv_df['Sum_probabilities_percentile_rank'] = csv_df['Sum_probabilities_deviation_percentile_rank'].rank(pct=True) csv_df_probability = csv_df.filter( ['Ear_left_1_p', 'Ear_right_1_p', 'Nose_1_p', 'Center_1_p', 'Lat_left_1_p', 'Lat_right_1_p', 'Tail_base_1_p', 'Tail_end_1_p', 'Ear_left_2_p', 'Ear_right_2_p', 'Nose_2_p', 'Center_2_p', 'Lat_left_2_p', 'Lat_right_2_p', 'Tail_base_2_p', 'Tail_end_2_p']) values_in_range_min, values_in_range_max = 0.0, 0.1 csv_df["Low_prob_detections_0.1"] = csv_df_probability.apply(func=lambda row: count_values_in_range(row, values_in_range_min, values_in_range_max), axis=1) values_in_range_min, values_in_range_max = 0.000000000, 0.5 csv_df["Low_prob_detections_0.5"] = csv_df_probability.apply( func=lambda row: count_values_in_range(row, values_in_range_min, values_in_range_max), axis=1) values_in_range_min, values_in_range_max = 0.000000000, 0.75 csv_df["Low_prob_detections_0.75"] = csv_df_probability.apply( func=lambda row: count_values_in_range(row, values_in_range_min, values_in_range_max), axis=1) ########### DROP COORDINATE COLUMNS ########################################### csv_df = csv_df.reset_index(drop=True) csv_df = csv_df.fillna(0) csv_df = csv_df.drop(columns=['index'], axis=1, errors='ignore') fileName = os.path.basename(currentFile) saveFN = os.path.join(csv_dir_out, fileName) save_df(csv_df, wfileType, saveFN) print('Feature extraction complete for ' + '"' + str(currVidName) + '".') print('All feature extraction complete.')
for i in range(len(roll_windows_values)): currentColName = 'Mouse1_mean_euclid_distances_mean_' + str(roll_windows_values[i])
Footer.js
import React from "react"; import styled from "styled-components"; import media from "styled-media-query"; const Foot = styled.footer` width: 100%; height: 100%; background: white; color: black; text-align: center; font-size: 15px; padding: 40px 0; h1 { margin-block-start: 10px; } .email { padding: 20px 0; font-size: 30px; } img { width: 200px; height: 200px; border: 3px solid black; border-radius: 50%; } .logo { position: relative; display: block; perspective: 600px; display: flex; justify-content: center; align-items: center; } .front { transform-style: preserve-3d; transform: rotateY(0deg); transition: all 0.5s ease-in-out; backface-visibility: hidden; } .back { position: absolute; top: 0px; height: 100%; z-index: -1; transform-style: preserve-3d; text-align: center; transform: rotateY(-180deg); transition: all 0.5s ease-in-out; backface-visibility: hidden; } .logo:hover .front { transform: rotateY(180deg); } .logo:hover .back { transform: rotateY(0deg); z-index: 1; } ${media.lessThan("600px")` padding: 20px 0; .email { font-size: 15px; } img { width: 100px; height: 100px; } `} `; function Body() { return ( <Foot id="footer"> <h1>Contact</h1> <div className="email">Email: [email protected]</div> <div className="logo"> <figure className="front"> <a href="https://github.com/cjdtjr6rl"> <img src="https://encrypted-tbn0.gstatic.com/images?q=tbn%3AANd9GcSKdAFbdqBEx0dF3-F_eir36e6NL3mv_jwEpg&usqp=CAU" alt="github logo" /> </a> </figure> <figure className="back"> <a href="https://github.com/cjdtjr6rl"> <img src="https://avatars3.githubusercontent.com/u/43205396?s=460&u=93e86c1b3ab399f2301e3a7d2c94280649c50a31&v=4" alt="cjdtjr6rl git" /> </a> </figure> </div> </Foot> ); }
export default Body;
d09.go
package y2021m06 // 每日一题 20210609 // // 879. 盈利计划 // Link: https://leetcode-cn.com/problems/profitable-schemes/ func profitableScheme
t int, group []int, profit []int) int { max := func(a, b int) int { if a > b { return a } return b } const mod int = 1e9 + 7 ng := len(group) dp := make([][][]int, ng+1) for i := range dp { dp[i] = make([][]int, n+1) for j := range dp[i] { dp[i][j] = make([]int, minProfit+1) } } dp[0][0][0] = 1 for i, members := range group { earn := profit[i] for j := 0; j <= n; j++ { for k := 0; k <= minProfit; k++ { if j < members { dp[i+1][j][k] = dp[i][j][k] } else { dp[i+1][j][k] = (dp[i][j][k] + dp[i][j-members][max(0, k-earn)]) % mod } } } } sum := 0 for _, d := range dp[ng] { sum = (sum + d[minProfit]) % mod } return sum }
s(n int, minProfi
timeout.rs
#![allow(deprecated)] use std::time::{SystemTime, UNIX_EPOCH}; use ws::Sender; use ws::util::{Token, Timeout}; pub const RESPONSE_TIMEOUT: TimeoutWindow = TimeoutWindow { min: 3000, max: 5500 }; pub const PING_TIMEOUT: TimeoutWindow = TimeoutWindow { min: 12000, max: 16000 }; #[derive(Copy, Clone)] pub struct TimeoutWindow { min: u64, max: u64 } pub struct AbsoluteTimeoutWindow { min: u64, max: u64 } impl AbsoluteTimeoutWindow { fn new(timeout_window: &TimeoutWindow) -> AbsoluteTimeoutWindow { let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() * 1000; AbsoluteTimeoutWindow { min: now + timeout_window.min, max: now + timeout_window.max } } } #[derive(Copy, Clone)] pub enum TimeoutState { Deathline, Normal } pub struct
{ window: AbsoluteTimeoutWindow, state: TimeoutState, timeout: Option<Timeout>, token: Token } impl TimeoutManager { pub fn new(sender: &Sender, window: TimeoutWindow, state: TimeoutState) -> TimeoutManager { let absolute_window = AbsoluteTimeoutWindow::new(&window); let token = Token(2); sender.timeout(window.max, token).unwrap(); TimeoutManager { window: absolute_window, state, timeout: None, token } } pub fn arm(&mut self, sender: &Sender, new_window: TimeoutWindow, new_state: TimeoutState) { self.state = new_state; let new_absolute_window = AbsoluteTimeoutWindow::new(&new_window); if self.window.max < new_absolute_window.min || self.window.max > new_absolute_window.max { self.window = new_absolute_window; self.timeout.take().map(|timeout| sender.cancel(timeout)); self.token = Token(self.token.0 + 1); sender.timeout(new_window.max, self.token).unwrap(); } } pub fn disarm(&mut self) { self.timeout = None; } pub fn on_new_timeout(&mut self, token: Token, timeout: Timeout) { if token == self.token { self.timeout = Some(timeout); } } pub fn on_timeout(&mut self, token: Token) -> Option<TimeoutState> { if token == self.token { self.timeout = None; Some(self.state) } else { None } } }
TimeoutManager
repo_chown.go
package main import ( "fmt"
var repoChownCmd = cli.Command{ Name: "chown", Usage: "assume ownership of a repository", Action: repoChown, } func repoChown(c *cli.Context) error { repo := c.Args().First() owner, name, err := parseRepo(repo) if err != nil { return err } client, err := newClient(c) if err != nil { return err } if _, err := client.RepoChown(owner, name); err != nil { return err } fmt.Printf("Successfully assumed ownership of repository %s/%s\n", owner, name) return nil }
"github.com/urfave/cli" )
main.rs
use std::{ collections::HashMap, error::Error, sync::{Arc, Mutex}, time::Duration, }; use chrono::Local; use garden_rpc::{ garden_pi_server::{GardenPi, GardenPiServer}, set_valve_state_response, GetModeResponse, GetScheduleResponse, GetValveStateRequest, GetValveStateResponse, GetValveStatesResponse, Mode, Schedule, SetModeRequest, SetScheduleRequest, SetScheduleResponse, SetValveStateRequest, SetValveStateResponse, Timespan, Timestamp, Valve, ValveState, }; use tonic::{transport::Server, Request, Response, Status}; /// This struct will be shared between the worker thread and the server. The server will /// update it and the worker thread will periodically update the open valves accordingly. struct GardenState { mode: Mode, // This array is used to save the valve states when using `Mode::Manual`. manual_valve_states: [ValveState; 8], // The schedule that is used when using `Mode::Scheduled`. schedule: Schedule, } impl GardenState { fn new() -> Self { Self { mode: Mode::Scheduled, manual_valve_states: [ValveState::Off; 8], schedule: Schedule { valve1: Vec::new(), valve2: Vec::new(), valve3: Vec::new(), valve4: Vec::new(), valve5: Vec::new(), valve6: Vec::new(), valve7: Vec::new(), valve8: Vec::new(), }, } } fn get_current_valves(&self) -> [ValveState; 8] { [ValveState::Off; 8] } } /// A mock implementation of the garden server. /// This is to be used for testing. struct GardenPiImpl { state: Arc<Mutex<GardenState>>, } impl GardenPiImpl { fn new(state: Arc<Mutex<GardenState>>) -> Self { Self { state } } // fn show(&self) -> () { // print!( // "\n\n---------------------------------------------------------------------------------\n|" // ); // for i in 0..8 { // let valve = Valve::from_i32(i).expect("There should be 8 valves, numbered from 0 to 7"); // let state = *self // .valves // .lock() // .expect("Mutex shouldn't be poisoned") // .get(&valve) // .expect("All valves have state"); // // let state_string = match state { // ValveState::On => "On ", // ValveState::Off => "Off", // }; // print!(" {} |", state_string); // } // print!(" {}", Local::now().format("[%d-%b-%Y](%H:%M:%S)")); // println!( // "\n---------------------------------------------------------------------------------" // ); // } } #[tonic::async_trait] impl GardenPi for GardenPiImpl { async fn set_mode(&self, request: Request<SetModeRequest>) -> Result<Response<()>, Status> { let new_mode = request.into_inner().mode(); self.state.lock().unwrap().mode = new_mode; Ok(Response::new(())) } async fn get_mode(&self, _: Request<()>) -> Result<Response<GetModeResponse>, Status> { let current_mode = self.state.lock().unwrap().mode; let mut response = GetModeResponse::default(); response.set_mode(current_mode); Ok(Response::new(response)) } async fn set_schedule( &self, request: Request<SetScheduleRequest>, ) -> Result<Response<SetScheduleResponse>, Status> { let request = request.into_inner(); let new_schedule = request.schedule.unwrap(); self.state.lock().unwrap().schedule = new_schedule; let mut response = SetScheduleResponse::default(); response.success = true; Ok(Response::new(response)) } async fn get_schedule(&self, _: Request<()>) -> Result<Response<GetScheduleResponse>, Status> { let schedule = self.state.lock().unwrap().schedule.clone(); let mut response = GetScheduleResponse::default(); response.schedule = Some(schedule); Ok(Response::new(response)) } async fn set_valve_state( &self, request: Request<SetValveStateRequest>, ) -> Result<Response<SetValveStateResponse>, Status> { let request = request.into_inner(); let mut response = SetValveStateResponse::default(); response.success = { let mut state = self.state.lock().unwrap(); match state.mode { Mode::Scheduled => false, Mode::Manual => { state.manual_valve_states[request.valve as usize] = request.state(); true } } }; Ok(Response::new(response)) } async fn get_valve_state( &self, request: Request<GetValveStateRequest>, ) -> Result<Response<GetValveStateResponse>, Status>
async fn get_valve_states( &self, _request: Request<()>, ) -> Result<Response<GetValveStatesResponse>, Status> { let mut response = GetValveStatesResponse::default(); { let current_valves = self.state.lock().unwrap().get_current_valves(); response.set_valve1_state(current_valves[0]); response.set_valve2_state(current_valves[1]); response.set_valve3_state(current_valves[2]); response.set_valve4_state(current_valves[3]); response.set_valve5_state(current_valves[4]); response.set_valve6_state(current_valves[5]); response.set_valve7_state(current_valves[6]); response.set_valve8_state(current_valves[7]); } let response = Response::new(response); Ok(response) } } fn worker_thread(state: Arc<Mutex<GardenState>>, interval: Duration) { loop { // Get the current state of the valves. let valve_states = { let state = state.lock().unwrap(); state.get_current_valves() }; // Update the real valves. let mut line = String::new(); for valve_state in &valve_states { let state = match valve_state { ValveState::Off => "| Off ", ValveState::On => "| On ", }; line.push_str(state); } // Create the separator **before** appending the date, so that the // chars count is accurate. let separator = "-".repeat(line.chars().count() + 1); line.push_str(&format!("| {}", Local::now().format("%d %h %Y [%H:%M:%S]"))); println!("{}", separator); println!("{}", line); println!("{}\n\n", separator); std::thread::sleep(interval); } } #[tokio::main] async fn main() -> Result<(), Box<dyn Error>> { let address = "[::1]:50050".parse()?; let state = Arc::new(Mutex::new(GardenState::new())); let garden_pi = GardenPiImpl::new(state.clone()); let worker_state = state.clone(); std::thread::spawn(move || worker_thread(worker_state, Duration::from_secs(5))); Server::builder() .add_service(GardenPiServer::new(garden_pi)) .serve(address) .await?; Ok(()) }
{ let request = request.into_inner(); let mut response = GetValveStateResponse::default(); let valve_state = { let state = self.state.lock().unwrap(); state.get_current_valves()[request.valve() as usize] }; response.set_state(valve_state); let response = Response::new(response); Ok(response) }
client.go
/* Copyright 2019 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package routetables import ( "context" "encoding/json" "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2021-02-01/network" "github.com/Azure/go-autorest/autorest" azureautorest "github.com/Azure/go-autorest/autorest/azure" "github.com/pkg/errors" infrav1 "sigs.k8s.io/cluster-api-provider-azure/api/v1beta1" "sigs.k8s.io/cluster-api-provider-azure/azure" "sigs.k8s.io/cluster-api-provider-azure/util/reconciler" "sigs.k8s.io/cluster-api-provider-azure/util/tele" ) // azureClient contains the Azure go-sdk Client. type azureClient struct { routetables network.RouteTablesClient } // newClient creates a new route tables client from subscription ID. func newClient(auth azure.Authorizer) *azureClient { c := newRouteTablesClient(auth.SubscriptionID(), auth.BaseURI(), auth.Authorizer()) return &azureClient{c} } // newRouteTablesClient creates a new route tables client from subscription ID. func newRouteTablesClient(subscriptionID string, baseURI string, authorizer autorest.Authorizer) network.RouteTablesClient
// Get gets the specified route table. func (ac *azureClient) Get(ctx context.Context, spec azure.ResourceSpecGetter) (result interface{}, err error) { ctx, _, done := tele.StartSpanWithLogger(ctx, "routetables.azureClient.Get") defer done() return ac.routetables.Get(ctx, spec.ResourceGroupName(), spec.ResourceName(), "") } // CreateOrUpdateAsync creates or updates a route table asynchronously. // It sends a PUT request to Azure and if accepted without error, the func will return a Future which can be used to track the ongoing // progress of the operation. func (ac *azureClient) CreateOrUpdateAsync(ctx context.Context, spec azure.ResourceSpecGetter, parameters interface{}) (result interface{}, future azureautorest.FutureAPI, err error) { ctx, _, done := tele.StartSpanWithLogger(ctx, "routetables.azureClient.CreateOrUpdateAsync") defer done() rt, ok := parameters.(network.RouteTable) if !ok { return nil, nil, errors.Errorf("%T is not a network.RouteTable", parameters) } createFuture, err := ac.routetables.CreateOrUpdate(ctx, spec.ResourceGroupName(), spec.ResourceName(), rt) if err != nil { return nil, nil, err } ctx, cancel := context.WithTimeout(ctx, reconciler.DefaultAzureCallTimeout) defer cancel() err = createFuture.WaitForCompletionRef(ctx, ac.routetables.Client) if err != nil { // if an error occurs, return the future. // this means the long-running operation didn't finish in the specified timeout. return nil, &createFuture, err } result, err = createFuture.Result(ac.routetables) // if the operation completed, return a nil future return result, nil, err } // DeleteAsync deletes a route table asynchronously. DeleteAsync sends a DELETE // request to Azure and if accepted without error, the func will return a Future which can be used to track the ongoing // progress of the operation. func (ac *azureClient) DeleteAsync(ctx context.Context, spec azure.ResourceSpecGetter) (future azureautorest.FutureAPI, err error) { ctx, _, done := tele.StartSpanWithLogger(ctx, "routetables.azureClient.DeleteAsync") defer done() deleteFuture, err := ac.routetables.Delete(ctx, spec.ResourceGroupName(), spec.ResourceName()) if err != nil { return nil, err } ctx, cancel := context.WithTimeout(ctx, reconciler.DefaultAzureCallTimeout) defer cancel() err = deleteFuture.WaitForCompletionRef(ctx, ac.routetables.Client) if err != nil { // if an error occurs, return the future. // this means the long-running operation didn't finish in the specified timeout. return &deleteFuture, err } _, err = deleteFuture.Result(ac.routetables) // if the operation completed, return a nil future. return nil, err } // IsDone returns true if the long-running operation has completed. func (ac *azureClient) IsDone(ctx context.Context, future azureautorest.FutureAPI) (isDone bool, err error) { ctx, _, done := tele.StartSpanWithLogger(ctx, "routetables.azureClient.IsDone") defer done() isDone, err = future.DoneWithContext(ctx, ac.routetables) if err != nil { return false, errors.Wrap(err, "failed checking if the operation was complete") } return isDone, nil } // Result fetches the result of a long-running operation future. func (ac *azureClient) Result(ctx context.Context, future azureautorest.FutureAPI, futureType string) (result interface{}, err error) { _, _, done := tele.StartSpanWithLogger(ctx, "routetables.azureClient.Result") defer done() if future == nil { return nil, errors.Errorf("cannot get result from nil future") } switch futureType { case infrav1.PutFuture: // Marshal and Unmarshal the future to put it into the correct future type so we can access the Result function. // Unfortunately the FutureAPI can't be casted directly to RouteTablesCreateOrUpdateFuture because it is a azureautorest.Future, which doesn't implement the Result function. See PR #1686 for discussion on alternatives. // It was converted back to a generic azureautorest.Future from the CAPZ infrav1.Future type stored in Status: https://github.com/kubernetes-sigs/cluster-api-provider-azure/blob/main/azure/converters/futures.go#L49. var createFuture *network.RouteTablesCreateOrUpdateFuture jsonData, err := future.MarshalJSON() if err != nil { return nil, errors.Wrap(err, "failed to marshal future") } if err := json.Unmarshal(jsonData, &createFuture); err != nil { return nil, errors.Wrap(err, "failed to unmarshal future data") } return createFuture.Result(ac.routetables) case infrav1.DeleteFuture: // Delete does not return a result route table. return nil, nil default: return nil, errors.Errorf("unknown future type %q", futureType) } }
{ routeTablesClient := network.NewRouteTablesClientWithBaseURI(baseURI, subscriptionID) azure.SetAutoRestClientDefaults(&routeTablesClient.Client, authorizer) return routeTablesClient }
App.js
import React, {useEffect, useState, useRef} from "react"; import {useDispatch, useSelector} from "react-redux"; import {connect} from "./redux/blockchain/blockchainActions"; import {fetchData} from "./redux/data/dataActions"; import * as s from "./styles/globalStyles"; import styled from "styled-components"; import './styles/evolution-apez.webflow-v1.css' import Particles from "react-tsparticles"; // import './styles/cube.css' import logo from './images/logo.png'
export const StyledButton = styled.button` padding: 10px; border-radius: 50px; border: none; background-color: var(--secondary); padding: 10px; font-weight: bold; color: var(--secondary-text); width: 100px; cursor: pointer; box-shadow: 0px 6px 0px -2px rgba(250, 250, 250, 0.3); -webkit-box-shadow: 0px 6px 0px -2px rgba(250, 250, 250, 0.3); -moz-box-shadow: 0px 6px 0px -2px rgba(250, 250, 250, 0.3); :active { box-shadow: none; -webkit-box-shadow: none; -moz-box-shadow: none; } `; export const StyledRoundButton = styled.button` padding: 10px; border-radius: 30%; border: none; background-color: var(--primary); padding: 10px; font-weight: bold; font-size: 24px; fontStyle: "Ubuntu" color: var(--primary-text); width: 30px; height: 30px; cursor: pointer; display: flex; align-items: center; justify-content: center; box-shadow: 0px 4px 0px -2px rgba(250, 250, 250, 0.3); -webkit-box-shadow: 0px 4px 0px -2px rgba(250, 250, 250, 0.3); -moz-box-shadow: 0px 4px 0px -2px rgba(250, 250, 250, 0.3); :active { box-shadow: none; -webkit-box-shadow: none; -moz-box-shadow: none; } `; export const ResponsiveWrapper = styled.div` display: flex; flex: 1; flex-direction: column; justify-content: stretched; align-items: stretched; width: 100%; @media (min-width: 1087px) { flex-direction: row; } z-index: 5; `; export const StyledLogo = styled.img` width: 200px; @media (min-width: 767px) { width: 300px; } transition: width 0.5s; transition: height 0.5s; z-index: 5; `; export const StyledImg = styled.img` box-shadow: 0px 5px 11px 2px rgba(0, 0, 0, 0.7); border: 4px solid var(--secondary); background-color: var(--accent); border-radius: 100%; width: 200px; @media (max-width: 600px) { display: none; } @media (min-width: 1000px) { width: 300px; } transition: width 0.5s; `; export const StyledLink = styled.a` color: var(--secondary); text-decoration: none; `; function App() { const dispatch = useDispatch(); const particleState = 'links' // links || circle || polygon const blockchain = useSelector((state) => state.blockchain); const data = useSelector((state) => state.data); const [claimingNft, setClaimingNft] = useState(false); const [feedback, setFeedback] = useState(`Click 'MINT' to adopt your Billionaire Babies!`); const [mintAmount, setMintAmount] = useState(1); const [CONFIG, SET_CONFIG] = useState({ CONTRACT_ADDRESS: "", SCAN_LINK: "", NETWORK: { NAME: "", SYMBOL: "", ID: 0, }, NFT_NAME: "", SYMBOL: "", MAX_SUPPLY: 2100, WEI_COST: 0, DISPLAY_COST: 0, GAS_LIMIT: 0, MARKETPLACE: "", MARKETPLACE_LINK: "", SHOW_BACKGROUND: false, }); const [count, setCount] = useState(1) const minus_handle = () => { if (count !== 0) setCount(count - 1) } const positive_handle = () => { setCount(count + 1) } const claimNFTs = () => { let cost = CONFIG.WEI_COST; let gasLimit = CONFIG.GAS_LIMIT; let totalCostWei = String(cost * mintAmount); let totalGasLimit = String(gasLimit * mintAmount); console.log("Cost: ", totalCostWei); console.log("Gas limit: ", totalGasLimit); setFeedback(`Minting your Billionaire Baby...`); setClaimingNft(true); blockchain.smartContract.methods .mint(blockchain.account, mintAmount) .send({ gasLimit: String(totalGasLimit), to: CONFIG.CONTRACT_ADDRESS, from: blockchain.account, value: totalCostWei, }) .once("error", (err) => { console.log(err); setFeedback("Sorry, something went wrong please try again later."); setClaimingNft(false); }) .then((receipt) => { console.log(receipt); setFeedback( `Yay! You're now a happy parent! Head over to https://opensea.io/collection/billionairebabyclubnftofficial to view it!` ); setClaimingNft(false); dispatch(fetchData(blockchain.account)); }); }; const decrementMintAmount = () => { let newMintAmount = mintAmount - 1; if (newMintAmount < 1) { newMintAmount = 1; } setMintAmount(newMintAmount); }; const incrementMintAmount = () => { let newMintAmount = mintAmount + 1; if (newMintAmount > 20) { newMintAmount = 20; } setMintAmount(newMintAmount); }; const getData = () => { if (blockchain.account !== "" && blockchain.smartContract !== null) { dispatch(fetchData(blockchain.account)); } }; const getConfig = async () => { const configResponse = await fetch("/config/config.json", { headers: { "Content-Type": "application/json", Accept: "application/json", }, }); const config = await configResponse.json(); SET_CONFIG(config); }; useEffect(() => { getConfig(); }, []); useEffect(() => { getData(); }, [blockchain.account]); return ( <div style={{width: '100%', height: '100%'}}> <Particles style={{zIndex: -1}} options={{ background: { color: { value: "#010c1f", }, }, fpsLimit: 30, particles: { size: { value: particleState === 'links' ? 3 : particleState === 'circle' ? 3 : 71, }, color: { value: particleState === 'links' ? '#FFFFFF' : particleState === 'circle' ? '#FFFFFF' : "#2C2E43", }, collisions: { enable: true, }, line_linked: { "enable": particleState === 'links' && true, "distance": 300, "color": "#ffffff", "opacity": 0.4, "width": 2 }, move: { direction: "none", enable: true, outMode: "bounce", random: false, speed: 1, straight: false, }, number: { density: { enable: true, area: 800, }, value: particleState === 'links' ? 30 : particleState === 'circle' ? 30 : 3, }, opacity: { value: 0.5, }, shape: { type: particleState === 'links' ? 'circle' : particleState === 'snow' ? 'circle' : 'polygon', }, }, detectRetina: true, }} /> <div style={{display: 'flex', flexDirection: 'column', padding: 30}}> <img src={logo} style={{margin: '20px auto', zIndex: 5, width: 250}}/> <div className="row"> <div className="col-xl-6 col-lg-6 col-md-12 col-sm-12 col-xs-12" style={{padding:10 ,backgroundColor: '#010C1F', zIndex: 5}}> <div style={{ width: "100%", border: '4px solid white', padding: "70px 20px", textAlign: 'center', borderRadius: 12 }} > <p style={{fontSize: 40, fontWeight: 700}}>PRE-SALE</p> <p style={{fontSize: 35, fontWeight: 700, margin: "10px 0"}}><span style={{color: '#61D6C8'}}>{Number(data.totalSupply) == 0 ? ("X" + "/" + CONFIG.MAX_SUPPLY) : ("" + data.totalSupply + "/" + CONFIG.MAX_SUPPLY)}</span> MINTED </p> <a target={'_black'} style={{textDecoration: 'none', color: '#61D6C8', fontSize: 14}} href={CONFIG.SCAN_LINK}>{truncate(CONFIG.CONTRACT_ADDRESS, 15)}</a> {Number(data.totalSupply) >= CONFIG.MAX_SUPPLY ? ( <> <s.TextTitle style={{textAlign: "center", color: "var(--accent-text)"}} > The sale has ended. </s.TextTitle> <s.TextDescription style={{textAlign: "center", color: "var(--accent-text)"}} > You can still find {CONFIG.NFT_NAME} on </s.TextDescription> <s.SpacerSmall/> <StyledLink target={"_blank"} href={CONFIG.MARKETPLACE_LINK}> {CONFIG.MARKETPLACE} </StyledLink> </> ) : ( <> <p style={{margin: '20px 0', fontSize: 20}}>{CONFIG.DISPLAY_COST} ETH<span style={{color: '#61D6C8'}}>Ξ</span> Billionaire Baby</p> <p style={{marginBottom: 10}}>excluding gas fees</p> {blockchain.account === "" || blockchain.smartContract === null ? ( <> <p style={{marginBottom: 10}}>Connect to the {CONFIG.NETWORK.NAME} network</p> <button onClick={(e) => { e.preventDefault(); dispatch(connect()); getData(); }} className={'mint-btn'}>Connect </button> {blockchain.errorMsg !== "" ? ( <> <p style={{ margin: '10px 0', textAlign: "center", color: "white", }} > {blockchain.errorMsg} </p> </> ) : null} </> ) : ( <> <div style={{width: '100%', display: 'flex'}}> <div style={{display: 'flex', margin: '20px auto'}}> <div className={'minus-controller'} onClick={decrementMintAmount}> <p>-</p> </div> <div className={'coin-num'}><p>{mintAmount}</p></div> <div className={'plus-controller'} onClick={incrementMintAmount}> <p>+</p> </div> <button disabled={claimingNft ? 1 : 0} onClick={(e) => { e.preventDefault(); claimNFTs(); getData(); }} className={'mint-btn'}>{claimingNft ? "BUSY" : "MINT"} </button> </div> </div> <p style={{fontSize: 17, fontWeight: 700}}>Total | {CONFIG.DISPLAY_COST * mintAmount} ETH</p> </> )} </> )} </div> </div> <div className="col-xl-6 col-lg-6 col-md-12 col-sm-12 col-xs-12" style={{height: 500, padding: 10}}> <div id="w-node-_7091a729-31c6-d62d-5b76-ecf964dc09cd-eecfd6cf" data-w-id="7091a729-31c6-d62d-5b76-ecf964dc09cd" className="cube-animation-section wf-section"> <div className="demo-container"> <div className="demo-wrapper"> <div className="_3d-wrapper"> <div data-w-id="7091a729-31c6-d62d-5b76-ecf964dc09d9" className="cube-wrapper"> <div className="cube-front"></div> <div className="cube-right"></div> <div className="cube-left"></div> <div className="cube-bottom"></div> <div className="cube-top"></div> <div className="cube-back"></div> </div> </div> </div> </div> </div> </div> </div> </div> <div style={{display: 'flex'}}> <div style={{zIndex: 5, margin: '0 auto', width: '65%'}}> <p style={{zIndex: 5, color: 'white', textAlign: 'center', marginBottom: 20}}>Please make sure you are connected to the right network ( {CONFIG.NETWORK.NAME} Mainnet) and the correct address. Please note: Once you make the purchase, you cannot undo this action.</p> <p style={{zIndex: 5, color: 'white', textAlign: 'center'}}>We have set the gas limit to {CONFIG.GAS_LIMIT} for the contract to successfully mint your NFT. We recommend that you don't lower the gas limit.</p> </div> </div> </div> ); } export default App; {/* <s.Container jc={"center"} ai={"center"} style={{width: "70%"}}> <s.TextDescription style={{ textAlign: "center", color: "var(--primary-text)", }} > Please make sure you are connected to the right network ( {CONFIG.NETWORK.NAME} Mainnet) and the correct address. Please note: Once you make the purchase, you cannot undo this action. </s.TextDescription> <s.SpacerSmall/> <s.TextDescription style={{ textAlign: "center", color: "var(--primary-text)", }} > We have set the gas limit to {CONFIG.GAS_LIMIT} for the contract to successfully mint your NFT. We recommend that you don't lower the gas limit. </s.TextDescription> <s.SpacerSmall/> </s.Container>*/ }
const truncate = (input, len) => input.length > len ? `${input.substring(0, len)}...` : input;
query_test.go
// Copyright 2017 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. //go:build cgo // +build cgo package app import ( "encoding/json" "fmt" "io" "mime/multipart" "net/http" "net/url" "reflect" "testing" "golang.org/x/perf/storage" "golang.org/x/perf/storage/benchfmt" ) func TestQuery(t *testing.T)
func TestUploads(t *testing.T) { app := createTestApp(t) defer app.Close() // Write 9 uploads to the database. These uploads have 1-9 // results each, a common label "i" set to the upload number, // and a label "j" set to the record number within the upload. var uploadIDs []string for i := 0; i < 9; i++ { status := app.uploadFiles(t, func(mpw *multipart.Writer) { w, err := mpw.CreateFormFile("file", "path/1.txt") if err != nil { t.Errorf("CreateFormFile: %v", err) } bp := benchfmt.NewPrinter(w) for j := 0; j <= i; j++ { r := &benchfmt.Result{Labels: map[string]string{"i": fmt.Sprintf("%d", i)}, NameLabels: make(map[string]string), Content: "BenchmarkName 1 ns/op"} r.Labels["j"] = fmt.Sprintf("%d", j) if err := bp.Print(r); err != nil { t.Fatalf("Print: %v", err) } } }) uploadIDs = append(uploadIDs, status.UploadID) } tests := []struct { q string extraLabels []string want []storage.UploadInfo }{ {"", nil, []storage.UploadInfo{ {9, uploadIDs[8], nil}, {8, uploadIDs[7], nil}, {7, uploadIDs[6], nil}, {6, uploadIDs[5], nil}, {5, uploadIDs[4], nil}, {4, uploadIDs[3], nil}, {3, uploadIDs[2], nil}, {2, uploadIDs[1], nil}, {1, uploadIDs[0], nil}, }}, {"j:5", nil, []storage.UploadInfo{{1, uploadIDs[8], nil}, {1, uploadIDs[7], nil}, {1, uploadIDs[6], nil}, {1, uploadIDs[5], nil}}}, {"i:5", []string{"i"}, []storage.UploadInfo{{6, uploadIDs[5], benchfmt.Labels{"i": "5"}}}}, {"not:found", nil, nil}, } for _, test := range tests { t.Run("query="+test.q, func(t *testing.T) { u := app.srv.URL + "/uploads" uv := url.Values{} if test.q != "" { uv["q"] = []string{test.q} } if test.extraLabels != nil { uv["extra_label"] = test.extraLabels } if len(uv) > 0 { u += "?" + uv.Encode() } resp, err := http.Get(u) if err != nil { t.Fatal(err) } defer resp.Body.Close() if resp.StatusCode != 200 { t.Fatalf("get /uploads: %v", resp.Status) } dec := json.NewDecoder(resp.Body) i := 0 for { var ui storage.UploadInfo if err := dec.Decode(&ui); err == io.EOF { break } else if err != nil { t.Fatalf("failed to parse UploadInfo: %v", err) } if i > len(test.want) { t.Fatalf("too many responses: have %d+ want %d", i, len(test.want)) } if !reflect.DeepEqual(ui, test.want[i]) { t.Errorf("uploadinfo = %#v, want %#v", ui, test.want[i]) } i++ } if i < len(test.want) { t.Fatalf("missing responses: have %d want %d", i, len(test.want)) } }) } }
{ app := createTestApp(t) defer app.Close() // Write 1024 test results to the database. These results // have labels named label0, label1, etc. Each label's value // is an integer whose value is (record number) / (1 << label // number). So 1 record has each value of label0, 2 records // have each value of label1, 4 records have each value of // label2, etc. This allows writing queries that match 2^n records. status := app.uploadFiles(t, func(mpw *multipart.Writer) { w, err := mpw.CreateFormFile("file", "path/1.txt") if err != nil { t.Errorf("CreateFormFile: %v", err) } bp := benchfmt.NewPrinter(w) for i := 0; i < 1024; i++ { r := &benchfmt.Result{Labels: make(map[string]string), NameLabels: make(map[string]string), Content: "BenchmarkName 1 ns/op"} for j := uint(0); j < 10; j++ { r.Labels[fmt.Sprintf("label%d", j)] = fmt.Sprintf("%d", i/(1<<j)) } r.NameLabels["name"] = "Name" if err := bp.Print(r); err != nil { t.Fatalf("Print: %v", err) } } }) tests := []struct { q string want []int }{ {"label0:0", []int{0}}, {"label1:0", []int{0, 1}}, {"label0:5 name:Name", []int{5}}, {"label0:0 label0:5", nil}, } for _, test := range tests { t.Run("query="+test.q, func(t *testing.T) { u := app.srv.URL + "/search?" + url.Values{"q": []string{test.q}}.Encode() resp, err := http.Get(u) if err != nil { t.Fatal(err) } defer resp.Body.Close() if resp.StatusCode != 200 { t.Fatalf("get /search: %v", resp.Status) } br := benchfmt.NewReader(resp.Body) for i, num := range test.want { if !br.Next() { t.Fatalf("#%d: Next() = false, want true (Err() = %v)", i, br.Err()) } r := br.Result() if r.Labels["upload"] != status.UploadID { t.Errorf("#%d: upload = %q, want %q", i, r.Labels["upload"], status.UploadID) } if r.Labels["upload-part"] != status.FileIDs[0] { t.Errorf("#%d: upload-part = %q, want %q", i, r.Labels["upload-part"], status.FileIDs[0]) } if r.Labels["upload-file"] != "1.txt" { t.Errorf("#%d: upload-file = %q, want %q", i, r.Labels["upload-file"], "1.txt") } if r.Labels["label0"] != fmt.Sprintf("%d", num) { t.Errorf("#%d: label0 = %q, want %d", i, r.Labels["label0"], num) } if r.NameLabels["name"] != "Name" { t.Errorf("#%d: name = %q, want %q", i, r.NameLabels["name"], "Name") } if r.Labels["by"] != "user" { t.Errorf("#%d: by = %q, want %q", i, r.Labels["uploader"], "user") } } if br.Next() { t.Fatalf("Next() = true, want false") } if err := br.Err(); err != nil { t.Errorf("Err() = %v, want nil", err) } }) } }
model.rs
use anyhow::*; use std::ops::Range; use std::path::Path; use wgpu::util::DeviceExt; use crate::texture; pub trait Vertex { fn desc<'a>() -> wgpu::VertexBufferDescriptor<'a>; } #[repr(C)] #[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)] pub struct ModelVertex { position: [f32; 3], tex_coords: [f32; 2], normal: [f32; 3], } impl Vertex for ModelVertex { fn desc<'a>() -> wgpu::VertexBufferDescriptor<'a> { use std::mem; wgpu::VertexBufferDescriptor { stride: mem::size_of::<ModelVertex>() as wgpu::BufferAddress, step_mode: wgpu::InputStepMode::Vertex, attributes: &[ wgpu::VertexAttributeDescriptor { offset: 0, shader_location: 0, format: wgpu::VertexFormat::Float3, }, wgpu::VertexAttributeDescriptor { offset: mem::size_of::<[f32; 3]>() as wgpu::BufferAddress, shader_location: 1, format: wgpu::VertexFormat::Float2, }, wgpu::VertexAttributeDescriptor { offset: mem::size_of::<[f32; 5]>() as wgpu::BufferAddress, shader_location: 2, format: wgpu::VertexFormat::Float3, }, ], } } } pub struct Material { pub name: String, pub diffuse_texture: texture::Texture, pub bind_group: wgpu::BindGroup, } pub struct Mesh { pub name: String, pub vertex_buffer: wgpu::Buffer, pub index_buffer: wgpu::Buffer, pub num_elements: u32, pub material: usize, } pub struct Model { pub meshes: Vec<Mesh>, pub materials: Vec<Material>, } impl Model { pub fn load<P: AsRef<Path>>( device: &wgpu::Device, queue: &wgpu::Queue, layout: &wgpu::BindGroupLayout, path: P, ) -> Result<Self> { let (obj_models, obj_materials) = tobj::load_obj(path.as_ref(), true)?; // We're assuming that the texture files are stored with the obj file let containing_folder = path.as_ref().parent().context("Directory has no parent")?; let mut materials = Vec::new(); for mat in obj_materials { let diffuse_path = mat.diffuse_texture; let diffuse_texture = texture::Texture::load(device, queue, containing_folder.join(diffuse_path))?; let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { layout, entries: &[ wgpu::BindGroupEntry { binding: 0, resource: wgpu::BindingResource::TextureView(&diffuse_texture.view), }, wgpu::BindGroupEntry { binding: 1, resource: wgpu::BindingResource::Sampler(&diffuse_texture.sampler), }, ], label: None, }); materials.push(Material { name: mat.name, diffuse_texture, bind_group,
let mut meshes = Vec::new(); for m in obj_models { let mut vertices = Vec::new(); for i in 0..m.mesh.positions.len() / 3 { vertices.push(ModelVertex { position: [ m.mesh.positions[i * 3], m.mesh.positions[i * 3 + 1], m.mesh.positions[i * 3 + 2], ], tex_coords: [m.mesh.texcoords[i * 2], m.mesh.texcoords[i * 2 + 1]], normal: [ m.mesh.normals[i * 3], m.mesh.normals[i * 3 + 1], m.mesh.normals[i * 3 + 2], ], }); } let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some(&format!("{:?} Vertex Buffer", path.as_ref())), contents: bytemuck::cast_slice(&vertices), usage: wgpu::BufferUsage::VERTEX, }); let index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some(&format!("{:?} Index Buffer", path.as_ref())), contents: bytemuck::cast_slice(&m.mesh.indices), usage: wgpu::BufferUsage::INDEX, }); meshes.push(Mesh { name: m.name, vertex_buffer, index_buffer, num_elements: m.mesh.indices.len() as u32, material: m.mesh.material_id.unwrap_or(0), }); } Ok(Self { meshes, materials }) } } pub trait DrawModel<'a, 'b> where 'b: 'a, { fn draw_mesh( &mut self, mesh: &'b Mesh, material: &'b Material, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ); fn draw_mesh_instanced( &mut self, mesh: &'b Mesh, material: &'b Material, instances: Range<u32>, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ); fn draw_model( &mut self, model: &'b Model, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ); fn draw_model_instanced( &mut self, model: &'b Model, instances: Range<u32>, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ); } impl<'a, 'b> DrawModel<'a, 'b> for wgpu::RenderPass<'a> where 'b: 'a, { fn draw_mesh( &mut self, mesh: &'b Mesh, material: &'b Material, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ) { self.draw_mesh_instanced(mesh, material, 0..1, uniforms, light); } fn draw_mesh_instanced( &mut self, mesh: &'b Mesh, material: &'b Material, instances: Range<u32>, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ) { self.set_vertex_buffer(0, mesh.vertex_buffer.slice(..)); self.set_index_buffer(mesh.index_buffer.slice(..)); self.set_bind_group(0, &material.bind_group, &[]); self.set_bind_group(1, &uniforms, &[]); self.set_bind_group(2, &light, &[]); self.draw_indexed(0..mesh.num_elements, 0, instances); } fn draw_model( &mut self, model: &'b Model, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ) { self.draw_model_instanced(model, 0..1, uniforms, light); } fn draw_model_instanced( &mut self, model: &'b Model, instances: Range<u32>, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ) { for mesh in &model.meshes { let material = &model.materials[mesh.material]; self.draw_mesh_instanced(mesh, material, instances.clone(), uniforms, light); } } } pub trait DrawLight<'a, 'b> where 'b: 'a, { fn draw_light_mesh( &mut self, mesh: &'b Mesh, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ); fn draw_light_mesh_instanced( &mut self, mesh: &'b Mesh, instances: Range<u32>, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ) where 'b: 'a; fn draw_light_model( &mut self, model: &'b Model, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ); fn draw_light_model_instanced( &mut self, model: &'b Model, instances: Range<u32>, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ); } impl<'a, 'b> DrawLight<'a, 'b> for wgpu::RenderPass<'a> where 'b: 'a, { fn draw_light_mesh( &mut self, mesh: &'b Mesh, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ) { self.draw_light_mesh_instanced(mesh, 0..1, uniforms, light); } fn draw_light_mesh_instanced( &mut self, mesh: &'b Mesh, instances: Range<u32>, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ) { self.set_vertex_buffer(0, mesh.vertex_buffer.slice(..)); self.set_index_buffer(mesh.index_buffer.slice(..)); self.set_bind_group(0, uniforms, &[]); self.set_bind_group(1, light, &[]); self.draw_indexed(0..mesh.num_elements, 0, instances); } fn draw_light_model( &mut self, model: &'b Model, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ) { self.draw_light_model_instanced(model, 0..1, uniforms, light); } fn draw_light_model_instanced( &mut self, model: &'b Model, instances: Range<u32>, uniforms: &'b wgpu::BindGroup, light: &'b wgpu::BindGroup, ) { for mesh in &model.meshes { self.draw_light_mesh_instanced(mesh, instances.clone(), uniforms, light); } } }
}); }
account.getTmpPassword_handler.go
// Copyright (c) 2018-present, NebulaChat Studio (https://nebula.chat). // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Author: Benqi ([email protected]) package account import ( "github.com/golang/glog" "github.com/PluralityNET/PluralityServer/pkg/grpc_util" "github.com/PluralityNET/PluralityServer/pkg/logger" "github.com/PluralityNET/PluralityServer/mtproto"
) // account.getTmpPassword#4a82327e password_hash:bytes period:int = account.TmpPassword; func (s *AccountServiceImpl) AccountGetTmpPassword(ctx context.Context, request *mtproto.TLAccountGetTmpPassword) (*mtproto.Account_TmpPassword, error) { md := grpc_util.RpcMetadataFromIncoming(ctx) glog.Infof("AccountGetTmpPassword - metadata: %s, request: %s", logger.JsonDebugData(md), logger.JsonDebugData(request)) // TODO(@benqi): Check password_hash invalid, android source code // byte[] hash = new byte[currentPassword.current_salt.length * 2 + passwordBytes.length]; // System.arraycopy(currentPassword.current_salt, 0, hash, 0, currentPassword.current_salt.length); // System.arraycopy(passwordBytes, 0, hash, currentPassword.current_salt.length, passwordBytes.length); // System.arraycopy(currentPassword.current_salt, 0, hash, hash.length - currentPassword.current_salt.length, currentPassword.current_salt.length); // account.tmpPassword#db64fd34 tmp_password:bytes valid_until:int = account.TmpPassword; tmpPassword := mtproto.NewTLAccountTmpPassword() tmpPassword.SetTmpPassword([]byte("01234567899876543210")) tmpPassword.SetValidUntil(int32(time.Now().Unix()) + request.Period) glog.Infof("AccountServiceImpl - reply: %s", logger.JsonDebugData(tmpPassword)) return tmpPassword.To_Account_TmpPassword(), nil }
"golang.org/x/net/context" "time"
Book.py
# Class definition for Book entity class Book: bookId = 1 def __init__(self, title, publisher, author, edition, publishedOn): self.__title = title self.__publisher = publisher self.__author = author self.__edition = edition self.__publishedOn = publishedOn self.__bookId = Book.bookId Book.bookId += 1 def __repr__(self): return "\nBook Id: {0}\nTitle: {1}\nAuthor: {2}\nEdition: {3}".format(str(self.__bookId), str(self.__title), str(self.__author), str(self.__edition)) def getBookTitle(self): return self.__title def
(self): return self.__author def UnitTestBook(): b1 = Book("Deception Point", "Rekha Publications", "Dan Brown", "2", "21/11/2018") print(b1) def main(): UnitTestBook() if __name__ == "__main__": main()
getBookAuthor
group__Generic.js
[ "acc_service_status_t", "group__Generic.html#ga0fe34d6e342de54ae87279d7e10b5066", null ], [ "acc_service_status_enum_t", "group__Generic.html#gaf872bd7ad1e331ca9e41ea47eb5dfc75", [ [ "ACC_SERVICE_STATUS_OK", "group__Generic.html#ggaf872bd7ad1e331ca9e41ea47eb5dfc75af11d3960899895c1eaa94ed573da65b1", null ], [ "ACC_SERVICE_STATUS_FAILURE_UNSPECIFIED", "group__Generic.html#ggaf872bd7ad1e331ca9e41ea47eb5dfc75a7dc54772144765d998d565a4cafa6d17", null ] ] ], [ "acc_service_activate", "group__Generic.html#ga83f29fef7b79d216d4c6d1959b7eda03", null ], [ "acc_service_create", "group__Generic.html#ga165f40004fae21f84757a839f7253db0", null ], [ "acc_service_deactivate", "group__Generic.html#ga17fbd89e106d36a25a18a038cc625bb9", null ], [ "acc_service_destroy", "group__Generic.html#gad7cc3154b0227e63d18b11aa672de3af", null ], [ "acc_service_get_sweep_configuration", "group__Generic.html#ga0f281d408a20366b6f7504f52dcad964", null ], [ "acc_service_is_service_active", "group__Generic.html#ga5a2d777e945aa9ff278278283d95f369", null ], [ "acc_service_status_name_get", "group__Generic.html#ga90c67ad99fbc5b8038f29279900082a3", null ] ];
var group__Generic = [ [ "acc_service_configuration_t", "group__Generic.html#ga3659144b760468217ed4cb1fc3447330", null ], [ "acc_service_handle_t", "group__Generic.html#gac31f87097f46e587c8d54fb0406f62d4", null ],
setup.py
# (C) Datadog, Inc. 2018-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) from codecs import open # To use a consistent encoding from os import path from setuptools import setup HERE = path.dirname(path.abspath(__file__)) # Get version info ABOUT = {} with open(path.join(HERE, 'datadog_checks', 'activemq', '__about__.py')) as f: exec(f.read(), ABOUT) # Get the long description from the README file with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: long_description = f.read() def get_dependencies(): dep_file = path.join(HERE, 'requirements.in') if not path.isfile(dep_file): return [] with open(dep_file, encoding='utf-8') as f: return f.readlines() CHECKS_BASE_REQ = 'datadog-checks-base' setup( name='datadog-activemq', version=ABOUT['__version__'], description='The ActiveMQ check', long_description=long_description, long_description_content_type='text/markdown', keywords='datadog agent activemq check', # The project's main homepage. url='https://github.com/DataDog/integrations-core', # Author details author='Datadog', author_email='[email protected]', # License license='BSD', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: System Administrators', 'Topic :: System :: Monitoring', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', ], # The package we're going to ship packages=['datadog_checks.activemq'], # Run-time dependencies install_requires=[CHECKS_BASE_REQ], extras_require={'deps': get_dependencies()}, # Extra files to ship with the wheel package include_package_data=True, )
classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers',
getIotHubResourceEventHubConsumerGroup.go
// *** WARNING: this file was generated by the Pulumi SDK Generator. *** // *** Do not edit by hand unless you're certain you know what you are doing! *** package v20210701 import ( "github.com/pulumi/pulumi/sdk/v3/go/pulumi" ) func
(ctx *pulumi.Context, args *LookupIotHubResourceEventHubConsumerGroupArgs, opts ...pulumi.InvokeOption) (*LookupIotHubResourceEventHubConsumerGroupResult, error) { var rv LookupIotHubResourceEventHubConsumerGroupResult err := ctx.Invoke("azure-native:devices/v20210701:getIotHubResourceEventHubConsumerGroup", args, &rv, opts...) if err != nil { return nil, err } return &rv, nil } type LookupIotHubResourceEventHubConsumerGroupArgs struct { EventHubEndpointName string `pulumi:"eventHubEndpointName"` Name string `pulumi:"name"` ResourceGroupName string `pulumi:"resourceGroupName"` ResourceName string `pulumi:"resourceName"` } // The properties of the EventHubConsumerGroupInfo object. type LookupIotHubResourceEventHubConsumerGroupResult struct { Etag string `pulumi:"etag"` Id string `pulumi:"id"` Name string `pulumi:"name"` Properties interface{} `pulumi:"properties"` Type string `pulumi:"type"` }
LookupIotHubResourceEventHubConsumerGroup
client.go
/* * Strava API v3 * * The [Swagger Playground](https://developers.strava.com/playground) is the easiest way to familiarize yourself with the Strava API by submitting HTTP requests and observing the responses before you write any client code. It will show what a response will look like with different endpoints depending on the authorization scope you receive from your athletes. To use the Playground, go to https://www.strava.com/settings/api and change your “Authorization Callback Domain” to developers.strava.com. Please note, we only support Swagger 2.0. There is a known issue where you can only select one scope at a time. For more information, please check the section “client code” at https://developers.strava.com/docs. * * API version: 3.0.0 * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) */ package swagger import ( "bytes" "context" "encoding/json" "encoding/xml" "errors" "fmt" "io" "mime/multipart" "net/http" "net/url" "os" "path/filepath" "reflect" "regexp" "strconv" "strings" "time" "unicode/utf8" "golang.org/x/oauth2" ) var ( jsonCheck = regexp.MustCompile("(?i:(?:application|text)/json)") xmlCheck = regexp.MustCompile("(?i:(?:application|text)/xml)") ) // APIClient manages communication with the Strava API v3 API v3.0.0 // In most cases there should be only one, shared, APIClient. type APIClient struct { cfg *Configuration common service // Reuse a single struct instead of allocating one for each service on the heap. // API Services ActivitiesApi *ActivitiesApiService AthletesApi *AthletesApiService ClubsApi *ClubsApiService GearsApi *GearsApiService RoutesApi *RoutesApiService SegmentEffortsApi *SegmentEffortsApiService SegmentsApi *SegmentsApiService StreamsApi *StreamsApiService UploadsApi *UploadsApiService } type service struct { client *APIClient } // NewAPIClient creates a new API client. Requires a userAgent string describing your application. // optionally a custom http.Client to allow for advanced features such as caching. func NewAPIClient(cfg *Configuration) *APIClient { if cfg.HTTPClient == nil { cfg.HTTPClient = http.DefaultClient } c := &APIClient{} c.cfg = cfg c.common.client = c // API Services c.ActivitiesApi = (*ActivitiesApiService)(&c.common) c.AthletesApi = (*AthletesApiService)(&c.common) c.ClubsApi = (*ClubsApiService)(&c.common) c.GearsApi = (*GearsApiService)(&c.common) c.RoutesApi = (*RoutesApiService)(&c.common) c.SegmentEffortsApi = (*SegmentEffortsApiService)(&c.common) c.SegmentsApi = (*SegmentsApiService)(&c.common) c.StreamsApi = (*StreamsApiService)(&c.common) c.UploadsApi = (*UploadsApiService)(&c.common) return c } func atoi(in string) (int, error) { return strconv.Atoi(in) } // selectHeaderContentType select a content type from the available list. func selectHeaderContentType(contentTypes []string) string { if len(contentTypes) == 0 { return "" } if contains(contentTypes, "application/json") { retu
contentTypes[0] // use the first content type specified in 'consumes' } // selectHeaderAccept join all accept types and return func selectHeaderAccept(accepts []string) string { if len(accepts) == 0 { return "" } if contains(accepts, "application/json") { return "application/json" } return strings.Join(accepts, ",") } // contains is a case insenstive match, finding needle in a haystack func contains(haystack []string, needle string) bool { for _, a := range haystack { if strings.ToLower(a) == strings.ToLower(needle) { return true } } return false } // Verify optional parameters are of the correct type. func typeCheckParameter(obj interface{}, expected string, name string) error { // Make sure there is an object. if obj == nil { return nil } // Check the type is as expected. if reflect.TypeOf(obj).String() != expected { return fmt.Errorf("Expected %s to be of type %s but received %s.", name, expected, reflect.TypeOf(obj).String()) } return nil } // parameterToString convert interface{} parameters to string, using a delimiter if format is provided. func parameterToString(obj interface{}, collectionFormat string) string { var delimiter string switch collectionFormat { case "pipes": delimiter = "|" case "ssv": delimiter = " " case "tsv": delimiter = "\t" case "csv": delimiter = "," } if reflect.TypeOf(obj).Kind() == reflect.Slice { return strings.Trim(strings.Replace(fmt.Sprint(obj), " ", delimiter, -1), "[]") } return fmt.Sprintf("%v", obj) } // callAPI do the request. func (c *APIClient) callAPI(request *http.Request) (*http.Response, error) { return c.cfg.HTTPClient.Do(request) } // Change base path to allow switching to mocks func (c *APIClient) ChangeBasePath(path string) { c.cfg.BasePath = path } // prepareRequest build the request func (c *APIClient) prepareRequest( ctx context.Context, path string, method string, postBody interface{}, headerParams map[string]string, queryParams url.Values, formParams url.Values, fileName string, fileBytes []byte) (localVarRequest *http.Request, err error) { var body *bytes.Buffer // Detect postBody type and post. if postBody != nil { contentType := headerParams["Content-Type"] if contentType == "" { contentType = detectContentType(postBody) headerParams["Content-Type"] = contentType } body, err = setBody(postBody, contentType) if err != nil { return nil, err } } // add form parameters and file if available. if strings.HasPrefix(headerParams["Content-Type"], "multipart/form-data") && len(formParams) > 0 || (len(fileBytes) > 0 && fileName != "") { if body != nil { return nil, errors.New("Cannot specify postBody and multipart form at the same time.") } body = &bytes.Buffer{} w := multipart.NewWriter(body) for k, v := range formParams { for _, iv := range v { if strings.HasPrefix(k, "@") { // file err = addFile(w, k[1:], iv) if err != nil { return nil, err } } else { // form value w.WriteField(k, iv) } } } if len(fileBytes) > 0 && fileName != "" { w.Boundary() //_, fileNm := filepath.Split(fileName) part, err := w.CreateFormFile("file", filepath.Base(fileName)) if err != nil { return nil, err } _, err = part.Write(fileBytes) if err != nil { return nil, err } // Set the Boundary in the Content-Type headerParams["Content-Type"] = w.FormDataContentType() } // Set Content-Length headerParams["Content-Length"] = fmt.Sprintf("%d", body.Len()) w.Close() } if strings.HasPrefix(headerParams["Content-Type"], "application/x-www-form-urlencoded") && len(formParams) > 0 { if body != nil { return nil, errors.New("Cannot specify postBody and x-www-form-urlencoded form at the same time.") } body = &bytes.Buffer{} body.WriteString(formParams.Encode()) // Set Content-Length headerParams["Content-Length"] = fmt.Sprintf("%d", body.Len()) } // Setup path and query parameters url, err := url.Parse(path) if err != nil { return nil, err } // Adding Query Param query := url.Query() for k, v := range queryParams { for _, iv := range v { query.Add(k, iv) } } // Encode the parameters. url.RawQuery = query.Encode() // Generate a new request if body != nil { localVarRequest, err = http.NewRequest(method, url.String(), body) } else { localVarRequest, err = http.NewRequest(method, url.String(), nil) } if err != nil { return nil, err } // add header parameters, if any if len(headerParams) > 0 { headers := http.Header{} for h, v := range headerParams { headers.Set(h, v) } localVarRequest.Header = headers } // Override request host, if applicable if c.cfg.Host != "" { localVarRequest.Host = c.cfg.Host } // Add the user agent to the request. localVarRequest.Header.Add("User-Agent", c.cfg.UserAgent) if ctx != nil { // add context to the request localVarRequest = localVarRequest.WithContext(ctx) // Walk through any authentication. // OAuth2 authentication if tok, ok := ctx.Value(ContextOAuth2).(oauth2.TokenSource); ok { // We were able to grab an oauth2 token from the context var latestToken *oauth2.Token if latestToken, err = tok.Token(); err != nil { return nil, err } latestToken.SetAuthHeader(localVarRequest) } // Basic HTTP Authentication if auth, ok := ctx.Value(ContextBasicAuth).(BasicAuth); ok { localVarRequest.SetBasicAuth(auth.UserName, auth.Password) } // AccessToken Authentication if auth, ok := ctx.Value(ContextAccessToken).(string); ok { localVarRequest.Header.Add("Authorization", "Bearer "+auth) } } for header, value := range c.cfg.DefaultHeader { localVarRequest.Header.Add(header, value) } return localVarRequest, nil } func (c *APIClient) decode(v interface{}, b []byte, contentType string) (err error) { if strings.Contains(contentType, "application/xml") { if err = xml.Unmarshal(b, v); err != nil { return err } return nil } else if strings.Contains(contentType, "application/json") { if err = json.Unmarshal(b, v); err != nil { return err } return nil } return errors.New("undefined response type") } // Add a file to the multipart request func addFile(w *multipart.Writer, fieldName, path string) error { file, err := os.Open(path) if err != nil { return err } defer file.Close() part, err := w.CreateFormFile(fieldName, filepath.Base(path)) if err != nil { return err } _, err = io.Copy(part, file) return err } // Prevent trying to import "fmt" func reportError(format string, a ...interface{}) error { return fmt.Errorf(format, a...) } // Set request body from an interface{} func setBody(body interface{}, contentType string) (bodyBuf *bytes.Buffer, err error) { if bodyBuf == nil { bodyBuf = &bytes.Buffer{} } if reader, ok := body.(io.Reader); ok { _, err = bodyBuf.ReadFrom(reader) } else if b, ok := body.([]byte); ok { _, err = bodyBuf.Write(b) } else if s, ok := body.(string); ok { _, err = bodyBuf.WriteString(s) } else if s, ok := body.(*string); ok { _, err = bodyBuf.WriteString(*s) } else if jsonCheck.MatchString(contentType) { err = json.NewEncoder(bodyBuf).Encode(body) } else if xmlCheck.MatchString(contentType) { xml.NewEncoder(bodyBuf).Encode(body) } if err != nil { return nil, err } if bodyBuf.Len() == 0 { err = fmt.Errorf("Invalid body type %s\n", contentType) return nil, err } return bodyBuf, nil } // detectContentType method is used to figure out `Request.Body` content type for request header func detectContentType(body interface{}) string { contentType := "text/plain; charset=utf-8" kind := reflect.TypeOf(body).Kind() switch kind { case reflect.Struct, reflect.Map, reflect.Ptr: contentType = "application/json; charset=utf-8" case reflect.String: contentType = "text/plain; charset=utf-8" default: if b, ok := body.([]byte); ok { contentType = http.DetectContentType(b) } else if kind == reflect.Slice { contentType = "application/json; charset=utf-8" } } return contentType } // Ripped from https://github.com/gregjones/httpcache/blob/master/httpcache.go type cacheControl map[string]string func parseCacheControl(headers http.Header) cacheControl { cc := cacheControl{} ccHeader := headers.Get("Cache-Control") for _, part := range strings.Split(ccHeader, ",") { part = strings.Trim(part, " ") if part == "" { continue } if strings.ContainsRune(part, '=') { keyval := strings.Split(part, "=") cc[strings.Trim(keyval[0], " ")] = strings.Trim(keyval[1], ",") } else { cc[part] = "" } } return cc } // CacheExpires helper function to determine remaining time before repeating a request. func CacheExpires(r *http.Response) time.Time { // Figure out when the cache expires. var expires time.Time now, err := time.Parse(time.RFC1123, r.Header.Get("date")) if err != nil { return time.Now() } respCacheControl := parseCacheControl(r.Header) if maxAge, ok := respCacheControl["max-age"]; ok { lifetime, err := time.ParseDuration(maxAge + "s") if err != nil { expires = now } expires = now.Add(lifetime) } else { expiresHeader := r.Header.Get("Expires") if expiresHeader != "" { expires, err = time.Parse(time.RFC1123, expiresHeader) if err != nil { expires = now } } } return expires } func strlen(s string) int { return utf8.RuneCountInString(s) } // GenericSwaggerError Provides access to the body, error and model on returned errors. type GenericSwaggerError struct { body []byte error string model interface{} } // Error returns non-empty string if there was an error. func (e GenericSwaggerError) Error() string { return e.error } // Body returns the raw bytes of the response func (e GenericSwaggerError) Body() []byte { return e.body } // Model returns the unpacked model of the error func (e GenericSwaggerError) Model() interface{} { return e.model }
rn "application/json" } return
Wizard.js
import './styles.scss'; import React, { Fragment, useState } from 'react'; import PropTypes from 'prop-types'; import StepWizard from 'react-step-wizard'; import { Steps } from '../../components/Steps/Steps'; import { Constrain } from '../Constrain/Constrain'; import { Link, useParams } from 'react-router-dom'; /** * Component for Wizard layout element. * @param {node} children of the component * @param {array} labels of steps * @param {bool} showNext Whether to enable the 'Next' button or not * @param {func} stepHandler React hook to share the current step of the Wizard * @return {object} ( * <Wizard labels={labels} showNext={showNext} stepHandler={stepHandler}>
/* eslint react/prop-types: 0 */ export const Wizard = ({ children, labels, showNext, stepHandler }) => { const { experimentId } = useParams(); const [state, updateState] = useState({ form: {}, }); const [step, setStep] = useState(1); // update step value const onStepChange = (stats) => { setStep(WizInstance.currentStep); stepHandler(WizInstance.currentStep); window.setTimeout(function() { window.scrollTo({ top: 0, // left: 0, behavior: 'smooth', }); }, 800); }; const setInstance = (WizInstance) => updateState({ ...state, WizInstance, }); const { WizInstance } = state; const renderWizard = () => { return ( <Fragment> <StepWizard className="wizard" onStepChange={onStepChange} nav={ <Steps labels={labels} /> } instance={setInstance} > {children} </StepWizard> { WizInstance && <Controls currentStep={step} WizInstance={WizInstance} experimentId={experimentId} showNext={showNext} /> } </Fragment> ); }; return ( <div> { children.length === labels.length ? renderWizard() : <Constrain modifierClasses="constrain--small"> A message goes here. </Constrain> } </div> ); }; /** * Fragment for Wizard's controls. * * @return {object} ( * <Fragment WizInstance={WizInstance} /> * ) */ const Controls = ({ experimentId, currentStep, WizInstance, showNext }) => <Fragment> <div className="wizard__controls"> {currentStep !== 1 && <button className={'wizard__button button--link'} onClick={WizInstance.previousStep} >Go Back</button> } {currentStep === WizInstance.totalSteps ? <Link className={'wizard__button button button--tertiary'} to={`/study/${experimentId}/user/123`} >Complete Study</Link> : <button onClick={WizInstance.nextStep} disabled={!showNext} className={'wizard__button button button--tertiary'} >Next</button> } </div> </Fragment>; Wizard.propTypes = { /** * Wizards's children */ children: PropTypes.node, /** * Wizard's labels */ labels: PropTypes.array, /** * Wizard's 'Next' button condition (whether to show it or not) */ showNext: PropTypes.bool, /** * React hook to share the current step of the Wizard */ stepHandler: PropTypes.func, }; Wizard.defaultProps = { children: '', labels: [ 'Step 1', 'Step 2', 'Step 3', 'Step 4', ], showNext: true, stepHandler: (step) => step, };
* {children} * <Wizard /> * ) */
classifier_builder.py
'''This program contains the functions averageFinder and midpointFinder. AverageData calculates the averages of the "columns" of a list of numbers (a list of lists of numbers) for real and fake samples (separately) and midpointFinder finds the midpoint between the real and fake averages. Data is either given from the test case or from user input, which is run through incomingData. Assignment 2: classifier_builder Name: Anna Wood Student Number: 20091785 NetID: 17aaw2''' def averageFinder(sample_data): '''will take a list of attributes and: averageFinder calculates the average of each of the attributes across all the samples with the same classification (0 or 1) input: sample list / list of numbers output: none, averages are passed to midpointFinder note - 1 IS REAL 0 IS COUNTERFEIT ''' real_avgs_counter = 0 counter_avgs_counter = 0 real_avgs = [] counter_avgs = [] avg_len_real = 0 indx = 0 while indx < 4: # while-loop that sums each attribute and adds it to the list of its category (real or counter) for i in range(0,len(sample_data)): # loop to separate data into 0 and 1 if sample_data[i][4] == 1: real_avgs_counter += sample_data[i][indx]# if real, attribute is summed in counter avg_len_real = avg_len_real + 1 /4 # used to count the length of how many real bills elif sample_data[i][4] == 0: # attribute sum for counterfeit bills counter_avgs_counter += sample_data[i][indx] real_avgs.append(real_avgs_counter) # after each attribute is summed it is added to the final list counter_avgs.append(counter_avgs_counter) real_avgs_counter = 0 # counters are reset to 0 after each list counter_avgs_counter = 0 indx += 1 # index for counting the "columns" avg_len_counter = len(sample_data) - avg_len_real # number of real / counter bills calculated for finding the average for i in range(0, 4): # divides the real, counterfeit sums by the amount of real & counterfeit items respectively real_avgs[i] = round((real_avgs[i] / avg_len_real), 3) counter_avgs[i] = round((counter_avgs[i] / avg_len_counter), 3) # each average rounded to 3 decimal points return real_avgs, counter_avgs def midpointFinder(real_avgs, counter_avgs): '''part 2 of the building classifier, takes the averages of the real and and fake samples and finds the midpoint (divides by 2). midpoints list should then be returned to classifier for further classifying input: averages of real, fake samples output: midpoints (returned to incomingData)''' midpoints = [] # empty list for midpoints for i in range(0,4): # finds midpoints by adding averages and dividing by 2 midpoint = (real_avgs[i] + counter_avgs[i]) / 2 midpoints.append(round(midpoint,3)) return midpoints #returns midpoints to incomingData def
(training_data): '''function runs from here when data is passed from our main interface input: training_data output: midpoints''' real_avgs, counter_avgs = averageFinder(training_data) midpoints = midpointFinder(real_avgs, counter_avgs) return midpoints # midpoints returned to main interface if __name__ == '__main__': sample_data_main = [[ 3, 8, -2, 0, 0], [4, 8, -2, -1,0],[3, -2, 1, 0, 0], [2, 1, 0, -2, 0], # fake samples (5th item 0) [0, 3, -3, -2, 1], [-3, 3, 0, -3, 1], [-6, 7, 0, -3, 1] ] # real samples (5th item is 1) real_avgs , counter_avgs = averageFinder(sample_data_main) midpoints = midpointFinder(real_avgs, counter_avgs) print('real averages (test case)',real_avgs, 'should be -3 , 4.333, -1. -2.667') print('counter averages (test case)',counter_avgs, 'should be 3, 3.75, -0.75, -0.75') print('midpoints (test case)', midpoints, 'should be 0, 4.041 ish, -0.875, -1.708')
incomingData
.size-limit.js
const fs = require('fs'); function
() { const dirname = '.next/static/commons'; const files = fs.readdirSync(dirname); const [file] = files .reduce((result, filename) => { if (!/^main-[a-f0-9]+\.js$/.test(filename)) { return result; } const path = `${dirname}/${filename}`; return [...result, { path, ctime: fs.statSync(path).ctimeMs }]; }, []) .sort((x, y) => y.ctime - x.ctime); return file; } module.exports = [ { name: 'The initial cost people pay for using one component', webpack: true, path: 'packages/material-ui/build/Paper/index.js', limit: '17.6 KB', }, { name: 'The size of all the modules of material-ui.', webpack: true, path: 'packages/material-ui/build/index.js', limit: '95.3 KB', }, { name: 'The main bundle of the docs', webpack: false, path: getMainFile().path, limit: '177 KB', }, { name: 'The home page of the docs', webpack: false, path: '.next/bundles/pages/index.js', limit: '6 KB', }, ];
getMainFile
web_page.py
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: MIT. See LICENSE import re from jinja2.exceptions import TemplateSyntaxError import frappe from frappe import _ from frappe.utils import get_datetime, now, quoted, strip_html from frappe.utils.jinja import render_template from frappe.utils.safe_exec import safe_exec from frappe.website.doctype.website_slideshow.website_slideshow import get_slideshow from frappe.website.utils import ( extract_title, find_first_image, get_comment_list, get_html_content_based_on_type, ) from frappe.website.website_generator import WebsiteGenerator class WebPage(WebsiteGenerator): def validate(self): self.validate_dates() self.set_route() if not self.dynamic_route: self.route = quoted(self.route) def get_feed(self): return self.title def on_update(self): super(WebPage, self).on_update() def on_trash(self): super(WebPage, self).on_trash() def get_context(self, context):
def render_dynamic(self, context): # dynamic is_jinja = context.dynamic_template or "<!-- jinja -->" in context.main_section if is_jinja or ("{{" in context.main_section): try: context["main_section"] = render_template(context.main_section, context) if not "<!-- static -->" in context.main_section: context["no_cache"] = 1 except TemplateSyntaxError: if is_jinja: raise def set_breadcrumbs(self, context): """Build breadcrumbs template""" if self.breadcrumbs: context.parents = frappe.safe_eval(self.breadcrumbs, {"_": _}) if not "no_breadcrumbs" in context: if "<!-- no-breadcrumbs -->" in context.main_section: context.no_breadcrumbs = 1 def set_title_and_header(self, context): """Extract and set title and header from content or context.""" if not "no_header" in context: if "<!-- no-header -->" in context.main_section: context.no_header = 1 if not context.title: context.title = extract_title(context.main_section, context.path_name) # header if context.no_header and "header" in context: context.header = "" if not context.no_header: # if header not set and no h1 tag in the body, set header as title if not context.header and "<h1" not in context.main_section: context.header = context.title # add h1 tag to header if context.get("header") and not re.findall("<h.>", context.header): context.header = "<h1>" + context.header + "</h1>" # if title not set, set title from header if not context.title and context.header: context.title = strip_html(context.header) def set_page_blocks(self, context): if self.content_type != "Page Builder": return out = get_web_blocks_html(self.page_blocks) context.page_builder_html = out.html context.page_builder_scripts = out.scripts context.page_builder_styles = out.styles def add_hero(self, context): """Add a hero element if specified in content or hooks. Hero elements get full page width.""" context.hero = "" if "<!-- start-hero -->" in context.main_section: parts1 = context.main_section.split("<!-- start-hero -->") parts2 = parts1[1].split("<!-- end-hero -->") context.main_section = parts1[0] + parts2[1] context.hero = parts2[0] def check_for_redirect(self, context): if "<!-- redirect:" in context.main_section: frappe.local.flags.redirect_location = ( context.main_section.split("<!-- redirect:")[1].split("-->")[0].strip() ) raise frappe.Redirect def set_metatags(self, context): if not context.metatags: context.metatags = { "name": self.meta_title or self.title, "description": self.meta_description, "image": self.meta_image or find_first_image(context.main_section or ""), "og:type": "website", } def validate_dates(self): if self.end_date: if self.start_date and get_datetime(self.end_date) < get_datetime(self.start_date): frappe.throw(_("End Date cannot be before Start Date!")) # If the current date is past end date, and # web page is published, empty the end date if self.published and now() > self.end_date: self.end_date = None frappe.msgprint(_("Clearing end date, as it cannot be in the past for published pages.")) def check_publish_status(): # called via daily scheduler web_pages = frappe.get_all("Web Page", fields=["name", "published", "start_date", "end_date"]) now_date = get_datetime(now()) for page in web_pages: start_date = page.start_date if page.start_date else "" end_date = page.end_date if page.end_date else "" if page.published: # Unpublish pages that are outside the set date ranges if (start_date and now_date < start_date) or (end_date and now_date > end_date): frappe.db.set_value("Web Page", page.name, "published", 0) else: # Publish pages that are inside the set date ranges if start_date: if not end_date or (end_date and now_date < end_date): frappe.db.set_value("Web Page", page.name, "published", 1) def get_web_blocks_html(blocks): """Converts a list of blocks into Raw HTML and extracts out their scripts for deduplication""" out = frappe._dict(html="", scripts=[], styles=[]) extracted_scripts = [] extracted_styles = [] for block in blocks: web_template = frappe.get_cached_doc("Web Template", block.web_template) rendered_html = frappe.render_template( "templates/includes/web_block.html", context={ "web_block": block, "web_template_html": web_template.render(block.web_template_values), "web_template_type": web_template.type, }, ) html, scripts, styles = extract_script_and_style_tags(rendered_html) out.html += html if block.web_template not in extracted_scripts: out.scripts += scripts extracted_scripts.append(block.web_template) if block.web_template not in extracted_styles: out.styles += styles extracted_styles.append(block.web_template) return out def extract_script_and_style_tags(html): from bs4 import BeautifulSoup soup = BeautifulSoup(html, "html.parser") scripts = [] styles = [] for script in soup.find_all("script"): scripts.append(script.string) script.extract() for style in soup.find_all("style"): styles.append(style.string) style.extract() return str(soup), scripts, styles
context.main_section = get_html_content_based_on_type(self, "main_section", self.content_type) context.source_content_type = self.content_type context.title = self.title if self.context_script: _locals = dict(context=frappe._dict()) safe_exec(self.context_script, None, _locals) context.update(_locals["context"]) self.render_dynamic(context) # if static page, get static content if context.slideshow: context.update(get_slideshow(self)) if self.enable_comments: context.comment_list = get_comment_list(self.doctype, self.name) context.guest_allowed = True context.update( { "style": self.css or "", "script": self.javascript or "", "header": self.header, "text_align": self.text_align, } ) if not self.show_title: context["no_header"] = 1 self.set_metatags(context) self.set_breadcrumbs(context) self.set_title_and_header(context) self.set_page_blocks(context) return context
mr.rs
#[doc = "Register `MR` reader"] pub struct R(crate::R<MR_SPEC>); impl core::ops::Deref for R { type Target = crate::R<MR_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::convert::From<crate::R<MR_SPEC>> for R { fn from(reader: crate::R<MR_SPEC>) -> Self { R(reader) } } #[doc = "Register `MR` writer"] pub struct W(crate::W<MR_SPEC>); impl core::ops::Deref for W { type Target = crate::W<MR_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl core::convert::From<crate::W<MR_SPEC>> for W { fn from(writer: crate::W<MR_SPEC>) -> Self { W(writer) } } #[doc = "SDRAMC Command Mode\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum MODE_A { #[doc = "0: Normal mode. Any access to the SDRAM is decoded normally. To activate this mode, command must be followed by a write to the SDRAM."] NORMAL = 0, #[doc = "1: The SDRAMC issues a NOP command when the SDRAM device is accessed regardless of the cycle. To activate this mode, command must be followed by a write to the SDRAM."] NOP = 1, #[doc = "2: The SDRAMC issues an \"All Banks Precharge\" command when the SDRAM device is accessed regardless of the cycle. To activate this mode, command must be followed by a write to the SDRAM."] ALLBANKS_PRECHARGE = 2, #[doc = "3: The SDRAMC issues a \"Load Mode Register\" command when the SDRAM device is accessed regardless of the cycle. To activate this mode, command must be followed by a write to the SDRAM."] LOAD_MODEREG = 3, #[doc = "4: The SDRAMC issues an \"Auto-Refresh\" Command when the SDRAM device is accessed regardless of the cycle. Previously, an \"All Banks Precharge\" command must be issued. To activate this mode, command must be followed by a write to the SDRAM."] AUTO_REFRESH = 4, #[doc = "5: The SDRAMC issues an \"Extended Load Mode Register\" command when the SDRAM device is accessed regardless of the cycle. To activate this mode, the \"Extended Load Mode Register\" command must be followed by a write to the SDRAM. The write in the SDRAM must be done in the appropriate bank; most low-power SDRAM devices use the bank 1."] EXT_LOAD_MODEREG = 5, #[doc = "6: Deep power-down mode. Enters deep power-down mode."] DEEP_POWERDOWN = 6, } impl From<MODE_A> for u8 { #[inline(always)] fn from(variant: MODE_A) -> Self
} #[doc = "Field `MODE` reader - SDRAMC Command Mode"] pub struct MODE_R(crate::FieldReader<u8, MODE_A>); impl MODE_R { pub(crate) fn new(bits: u8) -> Self { MODE_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<MODE_A> { match self.bits { 0 => Some(MODE_A::NORMAL), 1 => Some(MODE_A::NOP), 2 => Some(MODE_A::ALLBANKS_PRECHARGE), 3 => Some(MODE_A::LOAD_MODEREG), 4 => Some(MODE_A::AUTO_REFRESH), 5 => Some(MODE_A::EXT_LOAD_MODEREG), 6 => Some(MODE_A::DEEP_POWERDOWN), _ => None, } } #[doc = "Checks if the value of the field is `NORMAL`"] #[inline(always)] pub fn is_normal(&self) -> bool { **self == MODE_A::NORMAL } #[doc = "Checks if the value of the field is `NOP`"] #[inline(always)] pub fn is_nop(&self) -> bool { **self == MODE_A::NOP } #[doc = "Checks if the value of the field is `ALLBANKS_PRECHARGE`"] #[inline(always)] pub fn is_allbanks_precharge(&self) -> bool { **self == MODE_A::ALLBANKS_PRECHARGE } #[doc = "Checks if the value of the field is `LOAD_MODEREG`"] #[inline(always)] pub fn is_load_modereg(&self) -> bool { **self == MODE_A::LOAD_MODEREG } #[doc = "Checks if the value of the field is `AUTO_REFRESH`"] #[inline(always)] pub fn is_auto_refresh(&self) -> bool { **self == MODE_A::AUTO_REFRESH } #[doc = "Checks if the value of the field is `EXT_LOAD_MODEREG`"] #[inline(always)] pub fn is_ext_load_modereg(&self) -> bool { **self == MODE_A::EXT_LOAD_MODEREG } #[doc = "Checks if the value of the field is `DEEP_POWERDOWN`"] #[inline(always)] pub fn is_deep_powerdown(&self) -> bool { **self == MODE_A::DEEP_POWERDOWN } } impl core::ops::Deref for MODE_R { type Target = crate::FieldReader<u8, MODE_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `MODE` writer - SDRAMC Command Mode"] pub struct MODE_W<'a> { w: &'a mut W, } impl<'a> MODE_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MODE_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "Normal mode. Any access to the SDRAM is decoded normally. To activate this mode, command must be followed by a write to the SDRAM."] #[inline(always)] pub fn normal(self) -> &'a mut W { self.variant(MODE_A::NORMAL) } #[doc = "The SDRAMC issues a NOP command when the SDRAM device is accessed regardless of the cycle. To activate this mode, command must be followed by a write to the SDRAM."] #[inline(always)] pub fn nop(self) -> &'a mut W { self.variant(MODE_A::NOP) } #[doc = "The SDRAMC issues an \"All Banks Precharge\" command when the SDRAM device is accessed regardless of the cycle. To activate this mode, command must be followed by a write to the SDRAM."] #[inline(always)] pub fn allbanks_precharge(self) -> &'a mut W { self.variant(MODE_A::ALLBANKS_PRECHARGE) } #[doc = "The SDRAMC issues a \"Load Mode Register\" command when the SDRAM device is accessed regardless of the cycle. To activate this mode, command must be followed by a write to the SDRAM."] #[inline(always)] pub fn load_modereg(self) -> &'a mut W { self.variant(MODE_A::LOAD_MODEREG) } #[doc = "The SDRAMC issues an \"Auto-Refresh\" Command when the SDRAM device is accessed regardless of the cycle. Previously, an \"All Banks Precharge\" command must be issued. To activate this mode, command must be followed by a write to the SDRAM."] #[inline(always)] pub fn auto_refresh(self) -> &'a mut W { self.variant(MODE_A::AUTO_REFRESH) } #[doc = "The SDRAMC issues an \"Extended Load Mode Register\" command when the SDRAM device is accessed regardless of the cycle. To activate this mode, the \"Extended Load Mode Register\" command must be followed by a write to the SDRAM. The write in the SDRAM must be done in the appropriate bank; most low-power SDRAM devices use the bank 1."] #[inline(always)] pub fn ext_load_modereg(self) -> &'a mut W { self.variant(MODE_A::EXT_LOAD_MODEREG) } #[doc = "Deep power-down mode. Enters deep power-down mode."] #[inline(always)] pub fn deep_powerdown(self) -> &'a mut W { self.variant(MODE_A::DEEP_POWERDOWN) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x07) | (value as u32 & 0x07); self.w } } impl R { #[doc = "Bits 0:2 - SDRAMC Command Mode"] #[inline(always)] pub fn mode(&self) -> MODE_R { MODE_R::new((self.bits & 0x07) as u8) } } impl W { #[doc = "Bits 0:2 - SDRAMC Command Mode"] #[inline(always)] pub fn mode(&mut self) -> MODE_W { MODE_W { w: self } } #[doc = "Writes raw bits to the register."] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "SDRAMC Mode Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [mr](index.html) module"] pub struct MR_SPEC; impl crate::RegisterSpec for MR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [mr::R](R) reader structure"] impl crate::Readable for MR_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [mr::W](W) writer structure"] impl crate::Writable for MR_SPEC { type Writer = W; } #[doc = "`reset()` method sets MR to value 0"] impl crate::Resettable for MR_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
{ variant as _ }
kubernetes.go
// Copyright © 2018-2019 The OpenEBS Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package v1alpha1 import ( "bytes" "encoding/json" errors "github.com/openebs/maya/pkg/errors/v1alpha1" client "github.com/openebs/maya/pkg/kubernetes/client/v1alpha1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" clientset "k8s.io/client-go/kubernetes" "k8s.io/client-go/kubernetes/scheme" "k8s.io/client-go/rest" "k8s.io/client-go/tools/remotecommand" ) // getClientsetFn is a typed function that // abstracts fetching of clientset type getClientsetFn func() (clientset *clientset.Clientset, err error) // getClientsetFromPathFn is a typed function that // abstracts fetching of clientset from kubeConfigPath type getClientsetForPathFn func(kubeConfigPath string) (clientset *clientset.Clientset, err error) // getKubeConfigFn is a typed function that abstracts fetching // rest config type getKubeConfigFn func() (config *rest.Config, err error) // getKubeConfigForPathFn is a typed function that // abstracts fetching of config from kubeConfigPath type getKubeConfigForPathFn func(kubeConfigPath string) (config *rest.Config, err error) // createFn is a typed function that abstracts // creation of pod type createFn func(cli *clientset.Clientset, namespace string, pod *corev1.Pod) (*corev1.Pod, error) // listFn is a typed function that abstracts // listing of pods type listFn func(cli *clientset.Clientset, namespace string, opts metav1.ListOptions) (*corev1.PodList, error) // deleteFn is a typed function that abstracts // deleting of pod type deleteFn func(cli *clientset.Clientset, namespace, name string, opts *metav1.DeleteOptions) error // getFn is a typed function that abstracts // to get pod type getFn func(cli *clientset.Clientset, namespace, name string, opts metav1.GetOptions) (*corev1.Pod, error) // execFn is a typed function that abstracts // pod exec type execFn func(cli *clientset.Clientset, config *rest.Config, name, namespace string, opts *corev1.PodExecOptions) (*ExecOutput, error) // defaultExec is the default implementation of execFn func defaultExec(cli *clientset.Clientset, config *rest.Config, name, namespace string, opts *corev1.PodExecOptions) (*ExecOutput, error) { var stdout, stderr bytes.Buffer req := cli.CoreV1().RESTClient().Post(). Resource("pods"). Name(name). Namespace(namespace). SubResource("exec"). VersionedParams(opts, scheme.ParameterCodec) // create exec executor which is an interface for transporting shell-style streams. exec, err := remotecommand.NewSPDYExecutor(config, "POST", req.URL()) if err != nil { return nil, errors.Wrapf(err, "failed to exec into pod {%s}: failed to connect to the provided server", name) } // Stream initiates the transport of the standard shell streams. It will transport any // non-nil stream to a remote system, and return an error if a problem occurs. err = exec.Stream(remotecommand.StreamOptions{ Stdin: nil, Stdout: &stdout, Stderr: &stderr, Tty: opts.TTY, }) if err != nil { return nil, errors.Wrapf(err, "failed to exec into pod {%s}: failed to stream", name) } execOutput := &ExecOutput{ Stdout: stdout.String(), Stderr: stderr.String(), } return execOutput, nil } // KubeClient enables kubernetes API operations // on pod instance type KubeClient struct { // clientset refers to pod clientset // that will be responsible to // make kubernetes API calls clientset *clientset.Clientset // namespace holds the namespace on which // KubeClient has to operate namespace string // kubeConfig represents kubernetes config kubeConfig *rest.Config // kubeconfig path to get kubernetes clientset kubeConfigPath string // functions useful during mocking getKubeConfig getKubeConfigFn getKubeConfigForPath getKubeConfigForPathFn getClientset getClientsetFn getClientsetForPath getClientsetForPathFn create createFn list listFn del deleteFn get getFn exec execFn } // ExecOutput struct contains stdout and stderr type ExecOutput struct { Stdout string `json:"stdout"` Stderr string `json:"stderr"` } // KubeClientBuildOption defines the abstraction // to build a KubeClient instance type KubeClientBuildOption func(*KubeClient) // withDefaults sets the default options // of KubeClient instance func (k *KubeClient) withDefaults() { if k.getKubeConfig == nil { k.getKubeConfig = func() (config *rest.Config, err error) { return client.New().Config() } } if k.getKubeConfigForPath == nil { k.getKubeConfigForPath = func(kubeConfigPath string) (config *rest.Config, err error) { return client.New(client.WithKubeConfigPath(kubeConfigPath)).GetConfigForPathOrDirect() } } if k.getClientset == nil { k.getClientset = func() (clients *clientset.Clientset, err error) { return client.New().Clientset() } } if k.getClientsetForPath == nil { k.getClientsetForPath = func(kubeConfigPath string) (clients *clientset.Clientset, err error) { return client.New(client.WithKubeConfigPath(kubeConfigPath)).Clientset() } } if k.create == nil { k.create = func(cli *clientset.Clientset, namespace string, pod *corev1.Pod) (*corev1.Pod, error) { return cli.CoreV1().Pods(namespace).Create(pod) } } if k.list == nil { k.list = func(cli *clientset.Clientset, namespace string, opts metav1.ListOptions) (*corev1.PodList, error) { return cli.CoreV1().Pods(namespace).List(opts) } } if k.del == nil { k.del = func(cli *clientset.Clientset, namespace, name string, opts *metav1.DeleteOptions) error { return cli.CoreV1().Pods(namespace).Delete(name, opts) } } if k.get == nil { k.get = func(cli *clientset.Clientset, namespace, name string, opts metav1.GetOptions) (*corev1.Pod, error) { return cli.CoreV1().Pods(namespace).Get(name, opts) } } if k.exec == nil { k.exec = defaultExec } } // WithClientSet sets the kubernetes client against // the KubeClient instance func WithClientSet(c *clientset.Clientset) KubeClientBuildOption { return func(k *KubeClient) { k.clientset = c } } // WithKubeConfigPath sets the kubeConfig path // against client instance func WithKubeConfigPath(path string) KubeClientBuildOption { return func(k *KubeClient) { k.kubeConfigPath = path } } // NewKubeClient returns a new instance of KubeClient meant for // cstor volume replica operations func NewKubeClient(opts ...KubeClientBuildOption) *KubeClient { k := &KubeClient{} for _, o := range opts { o(k) } k.withDefaults() return k } // WithNamespace sets the kubernetes namespace against // the provided namespace func (k *KubeClient) WithNamespace(namespace string) *KubeClient { k.namespace = namespace return k } // WithKubeConfig sets the kubernetes config against // the KubeClient instance func (k *KubeClient) WithKubeConfig(config *rest.Config) *KubeClient { k.kubeConfig = config return k } func (k *KubeClient) getClientsetForPathOrDirect() ( *clientset.Clientset, error) { if k.kubeConfigPath != "" { return k.getClientsetForPath(k.kubeConfigPath) } return k.getClientset() } // getClientsetOrCached returns either a new instance // of kubernetes client or its cached copy func (k *KubeClient) getClientsetOrCached() (*clientset.Clientset, error) { if k.clientset != nil { return k.clientset, nil } cs, err := k.getClientsetForPathOrDirect() if err != nil { return nil, errors.Wrapf(err, "failed to get clientset") } k.clientset = cs return k.clientset, nil } func (k *KubeClient) getKubeConfigForPathOrDirect() (*rest.Config, error) { if k.kubeConfigPath != "" { return k.getKubeConfigForPath(k.kubeConfigPath) } return k.getKubeConfig() } // getKubeConfigOrCached returns either a new instance // of kubernetes config or its cached copy func (k *KubeClient) getKubeConfigOrCached() (*rest.Config, error) { if k.kubeConfig != nil { return k.kubeConfig, nil } kc, err := k.getKubeConfigForPathOrDirect() if err != nil { return nil, errors.Wrapf(err, "failed to get kube config") } k.kubeConfig = kc return k.kubeConfig, nil } // List returns a list of pod // instances present in kubernetes cluster func (k *KubeClient) List(opts metav1.ListOptions) (*corev1.PodList, error) { cli, err := k.getClientsetOrCached() if err != nil { return nil, errors.Wrapf(err, "failed to list pods") } return k.list(cli, k.namespace, opts) } // Delete deletes a pod instance present in kubernetes cluster func (k *KubeClient) Delete(name string, opts *metav1.DeleteOptions) error { if len(name) == 0 { return errors.New("failed to delete pod: missing pod name") } cli, err := k.getClientsetOrCached() if err != nil { return errors.Wrapf( err, "failed to delete pod {%s}: failed to get clientset", name, ) } return k.del(cli, k.namespace, name, opts) } // Create creates a pod in specified namespace in kubernetes cluster func (k *KubeClient) Create(pod *corev1.Pod) (*corev1.Pod, error) { if pod == nil { return nil, errors.New("failed to create pod: nil pod object") } cli, err := k.getClientsetOrCached() if err != nil { return nil, errors.Wrapf( err, "failed to create pod {%s} in namespace {%s}", pod.Name, pod.Namespace, ) } return k.create(cli, k.namespace, pod) } // Get gets a pod object present in kubernetes cluster func (k *KubeClient) Get(name string, opts metav1.GetOptions) (*corev1.Pod, error) { if len(name) == 0 { return nil, errors.New("failed to get pod: missing pod name") } cli, err := k.getClientsetOrCached() if err != nil { return nil, errors.Wrapf( err, "failed to get pod {%s}: failed to get clientset", name, ) } return k.get(cli, k.namespace, name, opts) } // GetRaw gets pod object for a given name and namespace present // in kubernetes cluster and returns result in raw byte. func (k *KubeClient) GetRaw(name string, opts metav1.GetOptions) ([]byte, error) { p, err := k.Get(name, opts) if err != nil { return nil, err } return json.Marshal(p) } // Exec runs a command remotely in a container of a pod func (k *KubeClient) Exec(name string, opts *corev1.PodExecOptions) (*ExecOutput, error) { cli, err := k.getClientsetOrCached() if err != nil { return nil, err } config, err := k.getKubeConfigOrCached() if err != nil { return nil, err
} return k.exec(cli, config, name, k.namespace, opts) } // ExecRaw runs a command remotely in a container of a pod // and returns raw output func (k *KubeClient) ExecRaw(name string, opts *corev1.PodExecOptions) ([]byte, error) { execOutput, err := k.Exec(name, opts) if err != nil { return nil, err } return json.Marshal(execOutput) }
initializer.ts
import { iocContextInitializer } from '@e2e/fixtures/common/ioc-context-initializer' import { WithoutOptionsController } from './without-options.controller' import { WithViewController } from '@e2e/fixtures/without-options/with-view.controller' export function
({ name, version }: { name?: string, version?: string } = {}) { return iocContextInitializer({ name, version, providers: [ WithoutOptionsController, WithViewController ] }) }
initializer
kvdb_map.go
package kvdb import ( "bufio" "errors" "hash/fnv" "sync" ) var shards uint32 = 20 /* The sharding has does not have to be cryptographically secure. However, speedy would be ideal Of all the options the standard golang library seems to have, the ideal ones seem to be maphash and fnv. Maphash is seeded, which is not something I want to deal with FNV seems ideal. Not sure how to pick between FNV1 and FNV1-a The SMHasher tool talks about other faster hashing implimentations, but they'll be overkill for now */ // mapShard is the basic block representing a shard of the overall map type mapShard struct { sync.RWMutex data map[string][]byte numKeys int } //type shardedMap = []mapShard type shardedMap struct { shards []mapShard } func newShardMap() shardedMap { m := shardedMap{shards: make([]mapShard, shards)} var i uint32 = 0 for ; i < shards; i++ { m.shards[i] = mapShard{data: make(map[string][]byte), numKeys: 0} } return m } func (m shardedMap) writeShardedMap(bufw *bufio.Writer) error { for shard := range m.shards { m.shards[shard].RLock() for key, value := range m.shards[shard].data { err := writeChunk([]byte(key), bufw) err = writeChunk([]byte(value), bufw) if err != nil { return err } } m.shards[shard].RUnlock() } return nil } func getRawMap(m shardedMap) map[string][]byte { return_data := make(map[string][]byte) for shard := range m.shards { m.shards[shard].RLock() for key, value := range m.shards[shard].data { return_data[key] = value } m.shards[shard].RUnlock() } return return_data } func getShardID(key string) uint32 { hash := fnv.New32a() hash.Write([]byte(key)) return (hash.Sum32() % shards) } func deleteFromShardedMap(m shardedMap, key string) { shard := getShardID(key) shardedMap := &m.shards[shard] shardedMap.Lock() defer shardedMap.Unlock() delete(shardedMap.data, key) } func insertIntoShardedMap(m shardedMap, key string, value []byte, overwrite bool) error { shard := getShardID(key) shardedMap := &m.shards[shard] shardedMap.RLock() _, ok := shardedMap.data[key] shardedMap.RUnlock() if overwrite == false && ok == true { return errors.New("Key exists. overwrite set to false") } shardedMap.Lock() defer shardedMap.Unlock() shardedMap.data[key] = value shardedMap.numKeys++ return nil } func
(m shardedMap, key string) ([]byte, error) { shard := getShardID(key) shardedMap := &m.shards[shard] shardedMap.RLock() defer shardedMap.RUnlock() value, ok := m.shards[shard].data[key] if ok == false { return nil, errors.New("Key not found in database") } return value, nil }
getFromShardedMap
testing.pb.go
// Code generated by protoc-gen-gogo. DO NOT EDIT. // source: github.com/cockroachdb/errors/errorspb/testing.proto package errorspb import ( fmt "fmt" proto "github.com/gogo/protobuf/proto" io "io" math "math" math_bits "math/bits" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package // TestError is meant for use in testing only. type TestError struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *TestError) Reset() { *m = TestError{} } func (m *TestError) String() string { return proto.CompactTextString(m) } func (*TestError) ProtoMessage() {} func (*TestError) Descriptor() ([]byte, []int) { return fileDescriptor_5b5173a07163c41e, []int{0} } func (m *TestError) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *TestError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { b = b[:cap(b)] n, err := m.MarshalTo(b) if err != nil { return nil, err } return b[:n], nil } func (m *TestError) XXX_Merge(src proto.Message) { xxx_messageInfo_TestError.Merge(m, src) } func (m *TestError) XXX_Size() int { return m.Size() } func (m *TestError) XXX_DiscardUnknown() { xxx_messageInfo_TestError.DiscardUnknown(m) } var xxx_messageInfo_TestError proto.InternalMessageInfo func init() { proto.RegisterType((*TestError)(nil), "cockroach.errorspb.TestError") } func init() { proto.RegisterFile("github.com/cockroachdb/errors/errorspb/testing.proto", fileDescriptor_5b5173a07163c41e) } var fileDescriptor_5b5173a07163c41e = []byte{ // 127 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x32, 0x49, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xce, 0x4f, 0xce, 0x2e, 0xca, 0x4f, 0x4c, 0xce, 0x48, 0x49, 0xd2, 0x4f, 0x2d, 0x2a, 0xca, 0x2f, 0x2a, 0x86, 0x52, 0x05, 0x49, 0xfa, 0x25, 0xa9, 0xc5, 0x25, 0x99, 0x79, 0xe9, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0x42, 0x70, 0xa5, 0x7a, 0x30, 0x15, 0x4a, 0xdc, 0x5c, 0x9c, 0x21, 0xa9, 0xc5, 0x25, 0xae, 0x20, 0xbe, 0x93, 0xd2, 0x89, 0x87, 0x72, 0x0c, 0x27, 0x1e, 0xc9, 0x31, 0x5e, 0x78, 0x24, 0xc7, 0x78, 0xe3, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x13, 0x1e, 0xcb, 0x31, 0x44, 0x71, 0xc0, 0x34, 0x24, 0xb1, 0x81, 0xcd, 0x32, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x19, 0xd6, 0xd6, 0x4f, 0x83, 0x00, 0x00, 0x00, } func (m *TestError) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *TestError) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l return i, nil } func encodeVarintTesting(dAtA []byte, offset int, v uint64) int { for v >= 1<<7 { dAtA[offset] = uint8(v&0x7f | 0x80) v >>= 7 offset++ } dAtA[offset] = uint8(v) return offset + 1 } func (m *TestError) Size() (n int) { if m == nil { return 0 } var l int _ = l return n } func sovTesting(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } func sozTesting(x uint64) (n int) { return sovTesting(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } func (m *TestError) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTesting } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: TestError: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: TestError: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum {
return err } if skippy < 0 { return ErrInvalidLengthTesting } if (iNdEx + skippy) < 0 { return ErrInvalidLengthTesting } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func skipTesting(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 for iNdEx < l { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowTesting } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } wireType := int(wire & 0x7) switch wireType { case 0: for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowTesting } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } iNdEx++ if dAtA[iNdEx-1] < 0x80 { break } } return iNdEx, nil case 1: iNdEx += 8 return iNdEx, nil case 2: var length int for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowTesting } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ length |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if length < 0 { return 0, ErrInvalidLengthTesting } iNdEx += length if iNdEx < 0 { return 0, ErrInvalidLengthTesting } return iNdEx, nil case 3: for { var innerWire uint64 var start int = iNdEx for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowTesting } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ innerWire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } innerWireType := int(innerWire & 0x7) if innerWireType == 4 { break } next, err := skipTesting(dAtA[start:]) if err != nil { return 0, err } iNdEx = start + next if iNdEx < 0 { return 0, ErrInvalidLengthTesting } } return iNdEx, nil case 4: return iNdEx, nil case 5: iNdEx += 4 return iNdEx, nil default: return 0, fmt.Errorf("proto: illegal wireType %d", wireType) } } panic("unreachable") } var ( ErrInvalidLengthTesting = fmt.Errorf("proto: negative length found during unmarshaling") ErrIntOverflowTesting = fmt.Errorf("proto: integer overflow") )
default: iNdEx = preIndex skippy, err := skipTesting(dAtA[iNdEx:]) if err != nil {
validate.go
package validate import ( "errors" "fmt" "github.com/logrusorgru/aurora" "github.com/shuguocloud/go-zero/tools/goctl/api/parser" "github.com/urfave/cli" ) // GoValidateApi verifies whether the api has a syntax error func GoValidateApi(c *cli.Context) error
{ apiFile := c.String("api") if len(apiFile) == 0 { return errors.New("missing -api") } _, err := parser.Parse(apiFile) if err == nil { fmt.Println(aurora.Green("api format ok")) } return err }
run_training.py
# Copyright (c) 2019, NVIDIA Corporation. All rights reserved. # # This work is made available under the Nvidia Source Code License-NC. # To view a copy of this license, visit # https://nvlabs.github.io/stylegan2/license.html # # Modified my Varun A. Kelkar - [email protected] import argparse import copy import os import sys import dnnlib from dnnlib import EasyDict from metrics.metric_defaults import metric_defaults #---------------------------------------------------------------------------- _valid_configs = [ # Table 1 'config-a', # Baseline StyleGAN 'config-b', # + Weight demodulation 'config-c', # + Lazy regularization 'config-d', # + Path length regularization 'config-e', # + No growing, new G & D arch. 'config-f', # + Large networks (default) 'config-g', # + zero sectioning in dlatent 'config-h', # f + selu nonlinearity 'config-i', # + zero sectioning in dlatent in a different way 'config-j', # h + mix all styles # Table 2 'config-e-Gorig-Dorig', 'config-e-Gorig-Dresnet', 'config-e-Gorig-Dskip', 'config-e-Gresnet-Dorig', 'config-e-Gresnet-Dresnet', 'config-e-Gresnet-Dskip', 'config-e-Gskip-Dorig', 'config-e-Gskip-Dresnet', 'config-e-Gskip-Dskip', 'config-frgb', 'config-hrgb', 'config-jrgb', # No latent noise series 'config-frgb-nonoise', ] #---------------------------------------------------------------------------- def run(dataset, data_dir, result_dir, config_id, num_gpus, total_kimg, gamma, mirror_augment, metrics, resume, resume_path, stall): train = EasyDict(run_func_name='training.training_loop.training_loop') # Options for training loop. G = EasyDict(func_name='training.networks_stylegan2.G_main') # Options for generator network. D = EasyDict(func_name='training.networks_stylegan2.D_stylegan2') # Options for discriminator network. G_opt = EasyDict(beta1=0.0, beta2=0.99, epsilon=1e-8) # Options for generator optimizer. D_opt = EasyDict(beta1=0.0, beta2=0.99, epsilon=1e-8) # Options for discriminator optimizer. G_loss = EasyDict(func_name='training.loss.G_logistic_ns_pathreg') # Options for generator loss. D_loss = EasyDict(func_name='training.loss.D_logistic_r1') # Options for discriminator loss. sched = EasyDict() # Options for TrainingSchedule. grid = EasyDict(size='8k', layout='random') # Options for setup_snapshot_image_grid(). sc = dnnlib.SubmitConfig() # Options for dnnlib.submit_run(). tf_config = {'rnd.np_random_seed': 1000} # Options for tflib.init_tf(). if resume: train.resume_pkl = resume_path train.resume_kimg = resume train.stall = stall train.data_dir = data_dir train.total_kimg = total_kimg train.mirror_augment = mirror_augment train.image_snapshot_ticks = train.network_snapshot_ticks = 10 sched.G_lrate_base = sched.D_lrate_base = 0.001 sched.minibatch_size_base = 32 sched.minibatch_gpu_base = 4 D_loss.gamma = 10 metrics = [metric_defaults[x] for x in metrics] desc = f'stylegan2-lr-{sched.D_lrate_base}' desc += '-' + dataset # nonoise series if '-nonoise' in config_id: desc += '-nonoise' G.if_latent_noise = False config_id = config_id.strip('-nonoise') # for rgb images if 'rgb' in config_id: dataset_args = EasyDict(tfrecord_dir=dataset, dtype='uint8', dynamic_range=[0,255]) config_id = config_id.strip('rgb') else: dataset_args = EasyDict(tfrecord_dir=dataset, dtype='float32', dynamic_range=[0,1]) assert num_gpus in [1, 2, 4, 8] sc.num_gpus = num_gpus desc += '-%dgpu' % num_gpus assert config_id in _valid_configs desc += '-' + config_id # Configs A-E: Shrink networks to match original StyleGAN. if config_id != 'config-f': G.fmap_base = D.fmap_base = 8 << 10 # Config E: Set gamma to 100 and override G & D architecture. if config_id.startswith('config-e'): D_loss.gamma = 100 if 'Gorig' in config_id: G.architecture = 'orig' if 'Gskip' in config_id: G.architecture = 'skip' # (default) if 'Gresnet' in config_id: G.architecture = 'resnet' if 'Dorig' in config_id: D.architecture = 'orig' if 'Dskip' in config_id: D.architecture = 'skip' if 'Dresnet' in config_id: D.architecture = 'resnet' # (default) # Configs A-D: Enable progressive growing and switch to networks that support it. if config_id in ['config-a', 'config-b', 'config-c', 'config-d']: sched.lod_initial_resolution = 8 sched.G_lrate_base = sched.D_lrate_base = 0.001 sched.G_lrate_dict = sched.D_lrate_dict = {128: 0.0015, 256: 0.002, 512: 0.003, 1024: 0.003} sched.minibatch_size_base = 32 # (default) sched.minibatch_size_dict = {8: 256, 16: 128, 32: 64, 64: 32} sched.minibatch_gpu_base = 4 # (default) sched.minibatch_gpu_dict = {8: 32, 16: 16, 32: 8, 64: 4} G.synthesis_func = 'G_synthesis_stylegan_revised' D.func_name = 'training.networks_stylegan2.D_stylegan' # Configs A-C: Disable path length regularization. if config_id in ['config-a', 'config-b', 'config-c']: G_loss = EasyDict(func_name='training.loss.G_logistic_ns') # Configs A-B: Disable lazy regularization. if config_id in ['config-a', 'config-b']: train.lazy_regularization = False # Config A: Switch to original StyleGAN networks. if config_id == 'config-a': G = EasyDict(func_name='training.networks_stylegan.G_style') D = EasyDict(func_name='training.networks_stylegan.D_basic') # Config G: Zero sectioning in dlatent if config_id == 'config-g': G.zero_section = 2 G.nonlinearity = 'lsoftplus' G.latent_size = 2048 G.dlatent_size = 2048 # Config H: Use ELU nonlinearity, and no zero sectioning if config_id == 'config-h': G.nonlinearity = 'lsoftplus' # Config I: Zero sectioning in dlatent in a different way if config_id == 'config-i': G.zero_section = [0.1, 0.2, 0.3, 0.45, 0.6, 0.8, 0.9, 1.] G.nonlinearity = 'lsoftplus' G.latent_size = 2048 G.dlatent_size = 2048 if config_id == 'config-j': G.mix_all = 1 # G.nonlinearity = 'lsoftplus' if gamma is not None: D_loss.gamma = gamma sc.submit_target = dnnlib.SubmitTarget.LOCAL sc.local.do_not_copy_source_files = True kwargs = EasyDict(train) kwargs.update(G_args=G, D_args=D, G_opt_args=G_opt, D_opt_args=D_opt, G_loss_args=G_loss, D_loss_args=D_loss) kwargs.update(dataset_args=dataset_args, sched_args=sched, grid_args=grid, metric_arg_list=metrics, tf_config=tf_config) kwargs.submit_config = copy.deepcopy(sc) kwargs.submit_config.run_dir_root = result_dir kwargs.submit_config.run_desc = desc dnnlib.submit_run(**kwargs) #---------------------------------------------------------------------------- def _str_to_bool(v): if isinstance(v, bool): return v if v.lower() in ('yes', 'true', 't', 'y', '1'): return True elif v.lower() in ('no', 'false', 'f', 'n', '0'): return False else: raise argparse.ArgumentTypeError('Boolean value expected.') def _parse_comma_sep(s): if s is None or s.lower() == 'none' or s == '': return [] return s.split(',') #---------------------------------------------------------------------------- _examples = '''examples: # Train StyleGAN2 using the FFHQ dataset python %(prog)s --num-gpus=8 --data-dir=~/datasets --config=config-f --dataset=ffhq --mirror-augment=true valid configs: ''' + ', '.join(_valid_configs) + ''' valid metrics: ''' + ', '.join(sorted([x for x in metric_defaults.keys()])) + ''' ''' def
(): parser = argparse.ArgumentParser( description='Train StyleGAN2.', epilog=_examples, formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument('--result-dir', help='Root directory for run results (default: %(default)s)', default='results', metavar='DIR') parser.add_argument('--data-dir', help='Dataset root directory', required=True) parser.add_argument('--dataset', help='Training dataset', required=True) parser.add_argument('--config', help='Training config (default: %(default)s)', default='config-f', required=True, dest='config_id', metavar='CONFIG') parser.add_argument('--num-gpus', help='Number of GPUs (default: %(default)s)', default=1, type=int, metavar='N') parser.add_argument('--total-kimg', help='Training length in thousands of images (default: %(default)s)', metavar='KIMG', default=25000, type=int) parser.add_argument('--gamma', help='R1 regularization weight (default is config dependent)', default=None, type=float) parser.add_argument('--mirror-augment', help='Mirror augment (default: %(default)s)', default=False, metavar='BOOL', type=_str_to_bool) parser.add_argument('--metrics', help='Comma-separated list of metrics or "none" (default: %(default)s)', default=None, type=_parse_comma_sep) parser.add_argument('--resume', help='Resume training from. (default: %(default)s)', default=0, type=float, metavar='N') parser.add_argument('--resume_path', help='Resume training from pkl. (default: %(default)s)', default='', type=str, metavar='N') parser.add_argument('--stall', help='Pause training (default: %(default)s)', default=False, metavar='BOOL', type=_str_to_bool) args = parser.parse_args() if not os.path.exists(args.data_dir): print ('Error: dataset root directory does not exist.') sys.exit(1) if args.config_id not in _valid_configs: print ('Error: --config value must be one of: ', ', '.join(_valid_configs)) sys.exit(1) for metric in args.metrics: if metric not in metric_defaults: print ('Error: unknown metric \'%s\'' % metric) sys.exit(1) run(**vars(args)) #---------------------------------------------------------------------------- if __name__ == "__main__": main() #----------------------------------------------------------------------------
main
canvas.py
""" PyColourChooser Copyright (C) 2002 Michael Gilfix <[email protected]> This file is part of PyColourChooser. This version of PyColourChooser is open source; you can redistribute it and/or modify it under the licensed terms. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. """ # 12/14/2003 - Jeff Grimmett ([email protected]) # # o 2.5 compatibility update. # # 12/21/2003 - Jeff Grimmett ([email protected]) # # o wxPyColorChooser -> PyColorChooser # o wxPyColourChooser -> PyColourChooser # # Tags: phoenix-port import wx class BitmapBuffer(wx.MemoryDC): """A screen buffer class. This class implements a screen output buffer. Data is meant to be drawn in the buffer class and then blitted directly to the output device, or on-screen window. """ def __init__(self, width, height, colour): """Initialize the empty buffer object.""" wx.MemoryDC.__init__(self) self.width = width self.height = height self.colour = colour self.bitmap = wx.Bitmap(self.width, self.height) self.SelectObject(self.bitmap) # Initialize the buffer to the background colour self.SetBackground(wx.Brush(self.colour, wx.BRUSHSTYLE_SOLID)) self.Clear() # Make each logical unit of the buffer equal to 1 pixel self.SetMapMode(wx.MM_TEXT) def GetBitmap(self): """Returns the internal bitmap for direct drawing.""" return self.bitmap # GetPixel seems to always return (-1, -1, -1, 255) # on OS X so this is a workaround for that issue. def GetPixelColour(self, x, y): """Gets the color value of the pixel at the given cords. """ img = self.GetAsBitmap().ConvertToImage() red = img.GetRed(x, y) green = img.GetGreen(x, y) blue = img.GetBlue(x, y) return wx.Colour(red, green, blue) class Canvas(wx.Window): """A canvas class for arbitrary drawing. The Canvas class implements a window that allows for drawing arbitrary graphics. It implements a double buffer scheme and blits the off-screen buffer to the window during paint calls by the windowing system for speed. Some other methods for determining the canvas colour and size are also provided. """ def __init__(self, parent, id, pos=wx.DefaultPosition, style=wx.SIMPLE_BORDER, forceClientSize=None): """Creates a canvas instance and initializes the off-screen buffer. Also sets the handler for rendering the canvas automatically via size and paint calls from the windowing system.""" wx.Window.__init__(self, parent, id, pos, style=style) if forceClientSize: self.SetMaxClientSize(forceClientSize) self.SetMinClientSize(forceClientSize) # Perform an intial sizing self.ReDraw() # Register event handlers self.Bind(wx.EVT_SIZE, self.onSize) self.Bind(wx.EVT_PAINT, self.onPaint) def MakeNewBuffer(self): size = self.GetClientSize() self.buffer = BitmapBuffer(size[0], size[1], self.GetBackgroundColour()) def onSize(self, event): """Perform actual redraw to off-screen buffer only when the size of the canvas has changed. This saves a lot of computation since the same image can be re-used, provided the canvas size hasn't changed.""" self.MakeNewBuffer() self.DrawBuffer() self.Refresh() def ReDraw(self): """Explicitly tells the canvas to redraw it's contents.""" self.onSize(None) def Refresh(self): """Re-draws the buffer contents on-screen.""" dc = wx.ClientDC(self) self.Blit(dc) def onPaint(self, event): """Renders the off-screen buffer on-screen.""" dc = wx.PaintDC(self) self.Blit(dc) def Blit(self, dc):
def GetBoundingRect(self): """Returns a tuple that contains the co-ordinates of the top-left and bottom-right corners of the canvas.""" x, y = self.GetPosition() w, h = self.GetSize() return(x, y + h, x + w, y) def DrawBuffer(self): """Actual drawing function for drawing into the off-screen buffer. To be overrideen in the implementing class. Do nothing by default.""" pass
"""Performs the blit of the buffer contents on-screen.""" width, height = self.buffer.GetSize() dc.Blit(0, 0, width, height, self.buffer, 0, 0)
main.go
package main import ( "fmt" "log" "os" "github.com/hashicorp/vault/helper/pluginutil" "github.com/hashicorp/vault/logical/plugin" "github.com/praekeltfoundation/vault-plugin-auth-mesos" "github.com/praekeltfoundation/vault-plugin-auth-mesos/version" ) func
() { apiClientMeta := &pluginutil.APIClientMeta{} flags := apiClientMeta.FlagSet() versionFlag := flags.Bool("version", false, "Print version information and exit.") if err := flags.Parse(os.Args[1:]); err != nil { log.Fatal(err) } if *versionFlag { fmt.Println(version.HumanReadable()) return } tlsConfig := apiClientMeta.GetTLSConfig() tlsProviderFunc := pluginutil.VaultPluginTLSProvider(tlsConfig) if err := plugin.Serve(&plugin.ServeOpts{ BackendFactoryFunc: mesosauth.Factory, TLSProviderFunc: tlsProviderFunc, }); err != nil { log.Fatal(err) } }
main
ecjson.go
// Package ecjson represents encrypted and compressed content using JSON-based package ecjson import ( "bytes" "compress/gzip" "crypto/cipher" "encoding/base64" "encoding/json" "fmt" "io" "github.com/pomerium/pomerium/internal/encoding" "github.com/pomerium/pomerium/pkg/cryptutil" ) // EncryptedCompressedJSON implements SecureEncoder for JSON using an AEAD cipher. // // See https://en.wikipedia.org/wiki/Authenticated_encryption type EncryptedCompressedJSON struct { aead cipher.AEAD } // New takes a base64 encoded secret key and returns a new XChacha20poly1305 cipher. func New(aead cipher.AEAD) encoding.MarshalUnmarshaler { return &EncryptedCompressedJSON{aead: aead} } // Marshal marshals the interface state as JSON, encrypts the JSON using the cipher // and base64 encodes the binary value as a string and returns the result // // can panic if source of random entropy is exhausted generating a nonce. func (c *EncryptedCompressedJSON) Marshal(s interface{}) ([]byte, error) { // encode json value plaintext, err := json.Marshal(s) if err != nil { return nil, err } // compress the plaintext bytes compressed, err := compress(plaintext) if err != nil { return nil, err } // encrypt the compressed JSON bytes ciphertext := cryptutil.Encrypt(c.aead, compressed, nil) // base64-encode the result encoded := base64.RawURLEncoding.EncodeToString(ciphertext) return []byte(encoded), nil } // Unmarshal takes the marshaled string, base64-decodes into a byte slice, decrypts the // byte slice the passed cipher, and unmarshals the resulting JSON into the struct pointer passed func (c *EncryptedCompressedJSON) Unmarshal(data []byte, s interface{}) error { // convert base64 string value to bytes ciphertext, err := base64.RawURLEncoding.DecodeString(string(data)) if err != nil { return err } // decrypt the bytes compressed, err := cryptutil.Decrypt(c.aead, ciphertext, nil) if err != nil { return err } // decompress the unencrypted bytes plaintext, err := decompress(compressed) if err != nil { return err } // unmarshal the unencrypted bytes err = json.Unmarshal(plaintext, s) if err != nil { return err } return nil } // compress gzips a set of bytes func compress(data []byte) ([]byte, error) { var buf bytes.Buffer writer, err := gzip.NewWriterLevel(&buf, gzip.DefaultCompression) if err != nil { return nil, fmt.Errorf("cryptutil: failed to create a gzip writer: %w", err) } if writer == nil { return nil, fmt.Errorf("cryptutil: failed to create a gzip writer") } if _, err = writer.Write(data); err != nil { return nil, fmt.Errorf("cryptutil: failed to compress data with err: %w", err) } if err = writer.Close(); err != nil { return nil, err } return buf.Bytes(), nil } // decompress un-gzips a set of bytes func
(data []byte) ([]byte, error) { reader, err := gzip.NewReader(bytes.NewReader(data)) if err != nil { return nil, fmt.Errorf("cryptutil: failed to create a gzip reader: %w", err) } defer reader.Close() var buf bytes.Buffer if _, err = io.Copy(&buf, reader); err != nil { return nil, err } return buf.Bytes(), nil }
decompress