patch
stringlengths
17
31.2k
y
int64
1
1
oldf
stringlengths
0
2.21M
idx
int64
1
1
id
int64
4.29k
68.4k
msg
stringlengths
8
843
proj
stringclasses
212 values
lang
stringclasses
9 values
@@ -76,8 +76,9 @@ public class Actions { * Note that the modifier key is <b>never</b> released implicitly - either * <i>keyUp(theKey)</i> or <i>sendKeys(Keys.NULL)</i> * must be called to release the modifier. - * @param theKey Either {@link Keys#SHIFT}, {@link Keys#ALT} or {@link Keys#CONTROL}. If the - * provided key is none of those, {@link IllegalArgumentException} is thrown. + * @param theKey Either {@link Keys#SHIFT}, {@link Keys#ALT}, {@link Keys#CONTROL} + * or {@link Keys#COMMAND}. If the provided key is none of those, + * {@link IllegalArgumentException} is thrown. * @return A self reference. */ public Actions keyDown(Keys theKey) {
1
/* Copyright 2007-2011 Selenium committers Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium.interactions; import org.openqa.selenium.HasInputDevices; import org.openqa.selenium.Keyboard; import org.openqa.selenium.Keys; import org.openqa.selenium.Mouse; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.internal.Locatable; /** * The user-facing API for emulating complex user gestures. Use this class rather than using the * Keyboard or Mouse directly. * * Implements the builder pattern: Builds a CompositeAction containing all actions specified by the * method calls. */ public class Actions { protected Mouse mouse; protected Keyboard keyboard; protected CompositeAction action; /** * Default constructor - uses the default keyboard, mouse implemented by the driver. * @param driver the driver providing the implementations to use. */ public Actions(WebDriver driver) { this(((HasInputDevices) driver).getKeyboard(), ((HasInputDevices) driver).getMouse()); } /** * A constructor that should only be used when the keyboard or mouse were extended to provide * additional functionality (for example, dragging-and-dropping from the desktop). * @param keyboard the {@link Keyboard} implementation to delegate to. * @param mouse the {@link Mouse} implementation to delegate to. */ public Actions(Keyboard keyboard, Mouse mouse) { this.mouse = mouse; this.keyboard = keyboard; resetCompositeAction(); } /** * Only used by the TouchActions class. * @param keyboard implementation to delegate to. */ public Actions(Keyboard keyboard) { this.keyboard = keyboard; resetCompositeAction(); } private void resetCompositeAction() { action = new CompositeAction(); } /** * Performs a modifier key press. Does not release the modifier key - subsequent interactions * may assume it's kept pressed. * Note that the modifier key is <b>never</b> released implicitly - either * <i>keyUp(theKey)</i> or <i>sendKeys(Keys.NULL)</i> * must be called to release the modifier. * @param theKey Either {@link Keys#SHIFT}, {@link Keys#ALT} or {@link Keys#CONTROL}. If the * provided key is none of those, {@link IllegalArgumentException} is thrown. * @return A self reference. */ public Actions keyDown(Keys theKey) { return this.keyDown(null, theKey); } /** * Performs a modifier key press after focusing on an element. Equivalent to: * <i>Actions.click(element).sendKeys(theKey);</i> * @see #keyDown(org.openqa.selenium.Keys) * * @param theKey Either {@link Keys#SHIFT}, {@link Keys#ALT} or {@link Keys#CONTROL}. If the * provided key is none of those, {@link IllegalArgumentException} is thrown. * @return A self reference. */ public Actions keyDown(WebElement element, Keys theKey) { action.addAction(new KeyDownAction(keyboard, mouse, (Locatable) element, theKey)); return this; } /** * Performs a modifier key release. Releasing a non-depressed modifier key will yield undefined * behaviour. * * @param theKey Either {@link Keys#SHIFT}, {@link Keys#ALT} or {@link Keys#CONTROL}. * @return A self reference. */ public Actions keyUp(Keys theKey) { return this.keyUp(null, theKey); } /** * Performs a modifier key release after focusing on an element. Equivalent to: * <i>Actions.click(element).sendKeys(theKey);</i> * @see #keyUp(org.openqa.selenium.Keys) on behaviour regarding non-depressed modifier keys. * * @param theKey Either {@link Keys#SHIFT}, {@link Keys#ALT} or {@link Keys#CONTROL}. * @return A self reference. */ public Actions keyUp(WebElement element, Keys theKey) { action.addAction(new KeyUpAction(keyboard, mouse, (Locatable) element, theKey)); return this; } /** * Sends keys to the active element. This differs from calling * {@link WebElement#sendKeys(CharSequence...)} on the active element in two ways: * <ul> * <li>The modifier keys included in this call are not released.</li> * <li>There is no attempt to re-focus the element - so sendKeys(Keys.TAB) for switching * elements should work. </li> * </ul> * * @see WebElement#sendKeys(CharSequence...) * * @param keysToSend The keys. * @return A self reference. */ public Actions sendKeys(CharSequence... keysToSend) { return this.sendKeys(null, keysToSend); } /** * Equivalent to calling: * <i>Actions.click(element).sendKeys(keysToSend).</i> * This method is different from {@link org.openqa.selenium.WebElement#sendKeys(CharSequence...)} - see * {@link Actions#sendKeys(CharSequence...)} for details how. * * @see #sendKeys(java.lang.CharSequence[]) * * @param element element to focus on. * @param keysToSend The keys. * @return A self reference. */ public Actions sendKeys(WebElement element, CharSequence... keysToSend) { action.addAction(new SendKeysAction(keyboard, mouse, (Locatable) element, keysToSend)); return this; } /** * Clicks (without releasing) in the middle of the given element. This is equivalent to: * <i>Actions.moveToElement(onElement).clickAndHold()</i> * * @param onElement Element to move to and click. * @return A self reference. */ public Actions clickAndHold(WebElement onElement) { action.addAction(new ClickAndHoldAction(mouse, (Locatable) onElement)); return this; } /** * Clicks (without releasing) at the current mouse location. * @return A self reference. */ public Actions clickAndHold() { return this.clickAndHold(null); } /** * Releases the depressed left mouse button, in the middle of the given element. * This is equivalent to: * <i>Actions.moveToElement(onElement).release()</i> * * Invoking this action without invoking {@link #clickAndHold()} first will result in * undefined behaviour. * * @param onElement Element to release the mouse button above. * @return A self reference. */ public Actions release(WebElement onElement) { action.addAction(new ButtonReleaseAction(mouse, (Locatable) onElement)); return this; } /** * Releases the depressed left mouse button at the current mouse location. * @see #release(org.openqa.selenium.WebElement) * @return A self reference. */ public Actions release() { return this.release(null); } /** * Clicks in the middle of the given element. Equivalent to: * <i>Actions.moveToElement(onElement).click()</i> * * @param onElement Element to click. * @return A self reference. */ public Actions click(WebElement onElement) { action.addAction(new ClickAction(mouse, (Locatable) onElement)); return this; } /** * Clicks at the current mouse location. Useful when combined with * {@link #moveToElement(org.openqa.selenium.WebElement, int, int)} or * {@link #moveByOffset(int, int)}. * @return A self reference. */ public Actions click() { return this.click(null); } /** * Performs a double-click at middle of the given element. Equivalent to: * <i>Actions.moveToElement(element).doubleClick()</i> * * @param onElement Element to move to. * @return A self reference. */ public Actions doubleClick(WebElement onElement) { action.addAction(new DoubleClickAction(mouse, (Locatable) onElement)); return this; } /** * Performs a double-click at the current mouse location. * @return A self reference. */ public Actions doubleClick() { return this.doubleClick(null); } /** * Moves the mouse to the middle of the element. The element is scrolled into view and its * location is calculated using getBoundingClientRect. * @param toElement element to move to. * @return A self reference. */ public Actions moveToElement(WebElement toElement) { action.addAction(new MoveMouseAction(mouse, (Locatable) toElement)); return this; } /** * Moves the mouse to an offset from the top-left corner of the element. * The element is scrolled into view and its location is calculated using getBoundingClientRect. * @param toElement element to move to. * @param xOffset Offset from the top-left corner. A negative value means coordinates right from * the element. * @param yOffset Offset from the top-left corner. A negative value means coordinates above * the element. * @return A self reference. */ public Actions moveToElement(WebElement toElement, int xOffset, int yOffset) { action.addAction(new MoveToOffsetAction(mouse, (Locatable) toElement, xOffset, yOffset)); return this; } /** * Moves the mouse from its current position (or 0,0) by the given offset. If the coordinates * provided are outside the viewport (the mouse will end up outside the browser window) then * the viewport is scrolled to match. * @param xOffset horizontal offset. A negative value means moving the mouse left. * @param yOffset vertical offset. A negative value means moving the mouse up. * @return A self reference. * @throws MoveTargetOutOfBoundsException if the provided offset is outside the document's * boundaries. */ public Actions moveByOffset(int xOffset, int yOffset) { action.addAction(new MoveToOffsetAction(mouse, null, xOffset, yOffset)); return this; } /** * Performs a context-click at middle of the given element. First performs a mouseMove * to the location of the element. * * @param onElement Element to move to. * @return A self reference. */ public Actions contextClick(WebElement onElement) { action.addAction(new ContextClickAction(mouse, (Locatable) onElement)); return this; } /** * Performs a context-click at the current mouse location. * @return A self reference. */ public Actions contextClick() { return this.contextClick(null); } /** * A convenience method that performs click-and-hold at the location of the source element, * moves to the location of the target element, then releases the mouse. * * @param source element to emulate button down at. * @param target element to move to and release the mouse at. * @return A self reference. */ public Actions dragAndDrop(WebElement source, WebElement target) { action.addAction(new ClickAndHoldAction(mouse, (Locatable) source)); action.addAction(new MoveMouseAction(mouse, (Locatable) target)); action.addAction(new ButtonReleaseAction(mouse, (Locatable) target)); return this; } /** * A convenience method that performs click-and-hold at the location of the source element, * moves by a given offset, then releases the mouse. * * @param source element to emulate button down at. * @param xOffset horizontal move offset. * @param yOffset vertical move offset. * @return A self reference. */ public Actions dragAndDropBy(WebElement source, int xOffset, int yOffset) { action.addAction(new ClickAndHoldAction(mouse, (Locatable) source)); action.addAction(new MoveToOffsetAction(mouse, null, xOffset, yOffset)); action.addAction(new ButtonReleaseAction(mouse, null)); return this; } /** * Generates a composite action containinig all actions so far, ready to be performed (and * resets the internal builder state, so subsequent calls to build() will contain fresh * sequences). * * @return the composite action */ public Action build() { CompositeAction toReturn = action; resetCompositeAction(); return toReturn; } /** * A convenience method for performing the actions without calling build() first. */ public void perform() { build().perform(); } }
1
10,429
Keys.COMMAND seems to be an alias to Keys.META. That isn't mentioned?
SeleniumHQ-selenium
rb
@@ -75,6 +75,14 @@ type diskBlockCacheSetter interface { MakeDiskBlockCacheIfNotExists() error } +type diskBlockCacheFractionSetter interface { + SetDiskBlockCacheFraction(float64) +} + +type syncBlockCacheFractionSetter interface { + SetSyncBlockCacheFraction(float64) +} + type clockGetter interface { Clock() Clock }
1
// Copyright 2016 Keybase Inc. All rights reserved. // Use of this source code is governed by a BSD // license that can be found in the LICENSE file. package libkbfs import ( "time" kbname "github.com/keybase/client/go/kbun" "github.com/keybase/client/go/logger" "github.com/keybase/client/go/protocol/chat1" "github.com/keybase/client/go/protocol/keybase1" "github.com/keybase/kbfs/kbfsblock" "github.com/keybase/kbfs/kbfscodec" "github.com/keybase/kbfs/kbfscrypto" "github.com/keybase/kbfs/kbfsedits" "github.com/keybase/kbfs/kbfsmd" "github.com/keybase/kbfs/tlf" metrics "github.com/rcrowley/go-metrics" "golang.org/x/net/context" billy "gopkg.in/src-d/go-billy.v4" ) type dataVersioner interface { // DataVersion returns the data version for this block DataVersion() DataVer } type logMaker interface { MakeLogger(module string) logger.Logger } type blockCacher interface { BlockCache() BlockCache } type keyGetterGetter interface { keyGetter() blockKeyGetter } type codecGetter interface { Codec() kbfscodec.Codec } type blockServerGetter interface { BlockServer() BlockServer } type cryptoPureGetter interface { cryptoPure() cryptoPure } type cryptoGetter interface { Crypto() Crypto } type chatGetter interface { Chat() Chat } type currentSessionGetterGetter interface { CurrentSessionGetter() CurrentSessionGetter } type signerGetter interface { Signer() kbfscrypto.Signer } type diskBlockCacheGetter interface { DiskBlockCache() DiskBlockCache } type diskBlockCacheSetter interface { MakeDiskBlockCacheIfNotExists() error } type clockGetter interface { Clock() Clock } type diskLimiterGetter interface { DiskLimiter() DiskLimiter } type syncedTlfGetterSetter interface { IsSyncedTlf(tlfID tlf.ID) bool SetTlfSyncState(tlfID tlf.ID, isSynced bool) error } type blockRetrieverGetter interface { BlockRetriever() BlockRetriever } // Offset is a generic representation of an offset to an indirect // pointer within an indirect Block. type Offset interface { Equals(other Offset) bool Less(other Offset) bool } // Block just needs to be (de)serialized using msgpack type Block interface { dataVersioner // GetEncodedSize returns the encoded size of this block, but only // if it has been previously set; otherwise it returns 0. GetEncodedSize() uint32 // SetEncodedSize sets the encoded size of this block, locally // caching it. The encoded size is not serialized. SetEncodedSize(size uint32) // NewEmpty returns a new block of the same type as this block NewEmpty() Block // Set sets this block to the same value as the passed-in block Set(other Block) // ToCommonBlock retrieves this block as a *CommonBlock. ToCommonBlock() *CommonBlock // IsIndirect indicates whether this block contains indirect pointers. IsIndirect() bool // OffsetExceedsData returns true if `off` is greater than the // data contained in a direct block, assuming it starts at // `startOff`. Note that the offset of the next block isn't // relevant; this function should only indicate whether the offset // is greater than what currently could be stored in this block. OffsetExceedsData(startOff, off Offset) bool // BytesCanBeDirtied returns the number of bytes that should be // marked as dirtied if this block is dirtied. BytesCanBeDirtied() int64 } // BlockWithPtrs defines methods needed for interacting with indirect // pointers. type BlockWithPtrs interface { Block // FirstOffset returns the offset of the indirect pointer that // points to the first (left-most) block in a block tree. FirstOffset() Offset // NumIndirectPtrs returns the number of indirect pointers in this // block. The behavior is undefined when called on a non-indirect // block. NumIndirectPtrs() int // IndirectPtr returns the block info and offset for the indirect // pointer at index `i`. The behavior is undefined when called on // a non-indirect block. IndirectPtr(i int) (BlockInfo, Offset) // AppendNewIndirectPtr appends a new indirect pointer at the // given offset. AppendNewIndirectPtr(ptr BlockPointer, off Offset) // ClearIndirectPtrSize clears the encoded size of the indirect // pointer stored at index `i`. ClearIndirectPtrSize(i int) // SetIndirectPtrType set the type of the indirect pointer stored // at index `i`. SetIndirectPtrType(i int, dt BlockDirectType) // SetIndirectPtrOff set the offset of the indirect pointer stored // at index `i`. SetIndirectPtrOff(i int, off Offset) // SetIndirectPtrInfo sets the block info of the indirect pointer // stored at index `i`. SetIndirectPtrInfo(i int, info BlockInfo) // SwapIndirectPtrs swaps the indirect ptr at `i` in this block // with the one at `otherI` in `other`. SwapIndirectPtrs(i int, other BlockWithPtrs, otherI int) } // NodeID is a unique but transient ID for a Node. That is, two Node // objects in memory at the same time represent the same file or // directory if and only if their NodeIDs are equal (by pointer). type NodeID interface { // ParentID returns the NodeID of the directory containing the // pointed-to file or directory, or nil if none exists. ParentID() NodeID } // Node represents a direct pointer to a file or directory in KBFS. // It is somewhat like an inode in a regular file system. Users of // KBFS can use Node as a handle when accessing files or directories // they have previously looked up. type Node interface { // GetID returns the ID of this Node. This should be used as a // map key instead of the Node itself. GetID() NodeID // GetFolderBranch returns the folder ID and branch for this Node. GetFolderBranch() FolderBranch // GetBasename returns the current basename of the node, or "" // if the node has been unlinked. GetBasename() string // Readonly returns true if KBFS should outright reject any write // attempts on data or directory structures of this node. Though // note that even if it returns false, KBFS can reject writes to // the node for other reasons, such as TLF permissions. An // implementation that wraps another `Node` (`inner`) must return // `inner.Readonly()` if it decides not to return `true` on its // own. Readonly(ctx context.Context) bool // ShouldCreateMissedLookup is called for Nodes representing // directories, whenever `name` is looked up but is not found in // the directory. If the Node decides a new entry should be // created matching this lookup, it should return `true` as well // as a context to use for the creation, the type of the new entry // and the symbolic link contents if the entry is a Sym; the // caller should then create this entry. Otherwise it should // return false. It may return the type `FakeDir` to indicate // that the caller should pretend the entry exists, even if it // really does not. An implementation that wraps another `Node` // (`inner`) must return `inner.ShouldCreateMissedLookup()` if it // decides not to return `true` on its own. ShouldCreateMissedLookup(ctx context.Context, name string) ( shouldCreate bool, newCtx context.Context, et EntryType, sympath string) // ShouldRetryOnDirRead is called for Nodes representing // directories, whenever a `Lookup` or `GetDirChildren` is done on // them. It should return true to instruct the caller that it // should re-sync its view of the directory and retry the // operation. ShouldRetryOnDirRead(ctx context.Context) bool // RemoveDir is called on a `Node` before going through the normal // `RemoveDir` flow, to give the Node a chance to handle it in a // custom way. If the `Node` handles it internally, it should // return `true`. RemoveDir(ctx context.Context, dirName string) ( removeHandled bool, err error) // WrapChild returns a wrapped version of child, if desired, to // add custom behavior to the child node. An implementation that // wraps another `Node` (`inner`) must first call // `inner.WrapChild(child)` before performing its own wrapping // operation, to ensure that all wrapping is preserved and that it // happens in the correct order. WrapChild(child Node) Node // Unwrap returns the initial, unwrapped Node that was used to // create this Node. Unwrap() Node // GetFS returns a file system interface that, if non-nil, should // be used to satisfy any directory-related calls on this Node, // instead of the standard, block-based method of acessing data. // The provided context will be used, if possible, for any // subsequent calls on the file system. GetFS(ctx context.Context) billy.Filesystem // GetFile returns a file interface that, if non-nil, should be // used to satisfy any file-related calls on this Node, instead of // the standard, block-based method of accessing data. The // provided context will be used, if possible, for any subsequent // calls on the file. GetFile(ctx context.Context) billy.File } // KBFSOps handles all file system operations. Expands all indirect // pointers. Operations that modify the server data change all the // block IDs along the path, and so must return a path with the new // BlockIds so the caller can update their references. // // KBFSOps implementations must guarantee goroutine-safety of calls on // a per-top-level-folder basis. // // There are two types of operations that could block: // * remote-sync operations, that need to synchronously update the // MD for the corresponding top-level folder. When these // operations return successfully, they will have guaranteed to // have successfully written the modification to the KBFS servers. // * remote-access operations, that don't sync any modifications to KBFS // servers, but may block on reading data from the servers. // // KBFSOps implementations are supposed to give git-like consistency // semantics for modification operations; they will be visible to // other clients immediately after the remote-sync operations succeed, // if and only if there was no other intervening modification to the // same folder. If not, the change will be sync'd to the server in a // special per-device "unmerged" area before the operation succeeds. // In this case, the modification will not be visible to other clients // until the KBFS code on this device performs automatic conflict // resolution in the background. // // All methods take a Context (see https://blog.golang.org/context), // and if that context is cancelled during the operation, KBFSOps will // abort any blocking calls and return ctx.Err(). Any notifications // resulting from an operation will also include this ctx (or a // Context derived from it), allowing the caller to determine whether // the notification is a result of their own action or an external // action. type KBFSOps interface { // GetFavorites returns the logged-in user's list of favorite // top-level folders. This is a remote-access operation. GetFavorites(ctx context.Context) ([]Favorite, error) // RefreshCachedFavorites tells the instances to forget any cached // favorites list and fetch a new list from the server. The // effects are asychronous; if there's an error refreshing the // favorites, the cached favorites will become empty. RefreshCachedFavorites(ctx context.Context) // AddFavorite adds the favorite to both the server and // the local cache. AddFavorite(ctx context.Context, fav Favorite) error // DeleteFavorite deletes the favorite from both the server and // the local cache. Idempotent, so it succeeds even if the folder // isn't favorited. DeleteFavorite(ctx context.Context, fav Favorite) error // GetTLFCryptKeys gets crypt key of all generations as well as // TLF ID for tlfHandle. The returned keys (the keys slice) are ordered by // generation, starting with the key for FirstValidKeyGen. GetTLFCryptKeys(ctx context.Context, tlfHandle *TlfHandle) ( keys []kbfscrypto.TLFCryptKey, id tlf.ID, err error) // GetTLFID gets the TLF ID for tlfHandle. GetTLFID(ctx context.Context, tlfHandle *TlfHandle) (tlf.ID, error) // GetTLFHandle returns the TLF handle for a given node. GetTLFHandle(ctx context.Context, node Node) (*TlfHandle, error) // GetOrCreateRootNode returns the root node and root entry // info associated with the given TLF handle and branch, if // the logged-in user has read permissions to the top-level // folder. It creates the folder if one doesn't exist yet (and // branch == MasterBranch), and the logged-in user has write // permissions to the top-level folder. This is a // remote-access operation. GetOrCreateRootNode( ctx context.Context, h *TlfHandle, branch BranchName) ( node Node, ei EntryInfo, err error) // GetRootNode is like GetOrCreateRootNode but if the root node // does not exist it will return a nil Node and not create it. GetRootNode( ctx context.Context, h *TlfHandle, branch BranchName) ( node Node, ei EntryInfo, err error) // GetDirChildren returns a map of children in the directory, // mapped to their EntryInfo, if the logged-in user has read // permission for the top-level folder. This is a remote-access // operation. GetDirChildren(ctx context.Context, dir Node) (map[string]EntryInfo, error) // Lookup returns the Node and entry info associated with a // given name in a directory, if the logged-in user has read // permissions to the top-level folder. The returned Node is nil // if the name is a symlink. This is a remote-access operation. Lookup(ctx context.Context, dir Node, name string) (Node, EntryInfo, error) // Stat returns the entry info associated with a // given Node, if the logged-in user has read permissions to the // top-level folder. This is a remote-access operation. Stat(ctx context.Context, node Node) (EntryInfo, error) // CreateDir creates a new subdirectory under the given node, if // the logged-in user has write permission to the top-level // folder. Returns the new Node for the created subdirectory, and // its new entry info. This is a remote-sync operation. CreateDir(ctx context.Context, dir Node, name string) ( Node, EntryInfo, error) // CreateFile creates a new file under the given node, if the // logged-in user has write permission to the top-level folder. // Returns the new Node for the created file, and its new // entry info. excl (when implemented) specifies whether this is an exclusive // create. Semantically setting excl to WithExcl is like O_CREAT|O_EXCL in a // Unix open() call. // // This is a remote-sync operation. CreateFile(ctx context.Context, dir Node, name string, isExec bool, excl Excl) ( Node, EntryInfo, error) // CreateLink creates a new symlink under the given node, if the // logged-in user has write permission to the top-level folder. // Returns the new entry info for the created symlink. This // is a remote-sync operation. CreateLink(ctx context.Context, dir Node, fromName string, toPath string) ( EntryInfo, error) // RemoveDir removes the subdirectory represented by the given // node, if the logged-in user has write permission to the // top-level folder. Will return an error if the subdirectory is // not empty. This is a remote-sync operation. RemoveDir(ctx context.Context, dir Node, dirName string) error // RemoveEntry removes the directory entry represented by the // given node, if the logged-in user has write permission to the // top-level folder. This is a remote-sync operation. RemoveEntry(ctx context.Context, dir Node, name string) error // Rename performs an atomic rename operation with a given // top-level folder if the logged-in user has write permission to // that folder, and will return an error if nodes from different // folders are passed in. Also returns an error if the new name // already has an entry corresponding to an existing directory // (only non-dir types may be renamed over). This is a // remote-sync operation. Rename(ctx context.Context, oldParent Node, oldName string, newParent Node, newName string) error // Read fills in the given buffer with data from the file at the // given node starting at the given offset, if the logged-in user // has read permission to the top-level folder. The read data // reflects any outstanding writes and truncates to that file that // have been written through this KBFSOps object, even if those // writes have not yet been sync'd. There is no guarantee that // Read returns all of the requested data; it will return the // number of bytes that it wrote to the dest buffer. Reads on an // unlinked file may or may not succeed, depending on whether or // not the data has been cached locally. If (0, nil) is returned, // that means EOF has been reached. This is a remote-access // operation. Read(ctx context.Context, file Node, dest []byte, off int64) (int64, error) // Write modifies the file at the given node, by writing the given // buffer at the given offset within the file, if the logged-in // user has write permission to the top-level folder. It // overwrites any data already there, and extends the file size as // necessary to accomodate the new data. It guarantees to write // the entire buffer in one operation. Writes on an unlinked file // may or may not succeed as no-ops, depending on whether or not // the necessary blocks have been locally cached. This is a // remote-access operation. Write(ctx context.Context, file Node, data []byte, off int64) error // Truncate modifies the file at the given node, by either // shrinking or extending its size to match the given size, if the // logged-in user has write permission to the top-level folder. // If extending the file, it pads the new data with 0s. Truncates // on an unlinked file may or may not succeed as no-ops, depending // on whether or not the necessary blocks have been locally // cached. This is a remote-access operation. Truncate(ctx context.Context, file Node, size uint64) error // SetEx turns on or off the executable bit on the file // represented by a given node, if the logged-in user has write // permissions to the top-level folder. This is a remote-sync // operation. SetEx(ctx context.Context, file Node, ex bool) error // SetMtime sets the modification time on the file represented by // a given node, if the logged-in user has write permissions to // the top-level folder. If mtime is nil, it is a noop. This is // a remote-sync operation. SetMtime(ctx context.Context, file Node, mtime *time.Time) error // SyncAll flushes all outstanding writes and truncates for any // dirty files to the KBFS servers within the given folder, if the // logged-in user has write permissions to the top-level folder. // If done through a file system interface, this may include // modifications done via multiple file handles. This is a // remote-sync operation. SyncAll(ctx context.Context, folderBranch FolderBranch) error // FolderStatus returns the status of a particular folder/branch, along // with a channel that will be closed when the status has been // updated (to eliminate the need for polling this method). FolderStatus(ctx context.Context, folderBranch FolderBranch) ( FolderBranchStatus, <-chan StatusUpdate, error) // Status returns the status of KBFS, along with a channel that will be // closed when the status has been updated (to eliminate the need for // polling this method). Note that this channel only applies to // connection status changes. // // KBFSStatus can be non-empty even if there is an error. Status(ctx context.Context) ( KBFSStatus, <-chan StatusUpdate, error) // UnstageForTesting clears out this device's staged state, if // any, and fast-forwards to the current head of this // folder-branch. UnstageForTesting(ctx context.Context, folderBranch FolderBranch) error // RequestRekey requests to rekey this folder. Note that this asynchronously // requests a rekey, so canceling ctx doesn't cancel the rekey. RequestRekey(ctx context.Context, id tlf.ID) // SyncFromServer blocks until the local client has contacted the // server and guaranteed that all known updates for the given // top-level folder have been applied locally (and notifications // sent out to any observers). It returns an error if this // folder-branch is currently unmerged or dirty locally. If // lockBeforeGet is non-nil, it blocks on idempotently taking the // lock from server at the time it gets any metadata. SyncFromServer(ctx context.Context, folderBranch FolderBranch, lockBeforeGet *keybase1.LockID) error // GetUpdateHistory returns a complete history of all the merged // updates of the given folder, in a data structure that's // suitable for encoding directly into JSON. This is an expensive // operation, and should only be used for ocassional debugging. // Note that the history does not include any unmerged changes or // outstanding writes from the local device. GetUpdateHistory(ctx context.Context, folderBranch FolderBranch) ( history TLFUpdateHistory, err error) // GetEditHistory returns the edit history of the TLF, clustered // by writer. GetEditHistory(ctx context.Context, folderBranch FolderBranch) ( tlfHistory keybase1.FSFolderEditHistory, err error) // GetNodeMetadata gets metadata associated with a Node. GetNodeMetadata(ctx context.Context, node Node) (NodeMetadata, error) // Shutdown is called to clean up any resources associated with // this KBFSOps instance. Shutdown(ctx context.Context) error // PushConnectionStatusChange updates the status of a service for // human readable connection status tracking. PushConnectionStatusChange(service string, newStatus error) // PushStatusChange causes Status listeners to be notified via closing // the status channel. PushStatusChange() // ClearPrivateFolderMD clears any cached private folder metadata, // e.g. on a logout. ClearPrivateFolderMD(ctx context.Context) // ForceFastForward forwards the nodes of all folders that have // been previously cleared with `ClearPrivateFolderMD` to their // newest version. It works asynchronously, so no error is // returned. ForceFastForward(ctx context.Context) // InvalidateNodeAndChildren sends invalidation messages for the // given node and all of its children that are currently in the // NodeCache. It's useful if the caller has outside knowledge of // data changes to that node or its children that didn't come // through the usual MD update channels (e.g., autogit nodes need // invalidation when the corresponding git repo is updated). InvalidateNodeAndChildren(ctx context.Context, node Node) error // TeamNameChanged indicates that a team has changed its name, and // we should clean up any outstanding handle info associated with // the team ID. TeamNameChanged(ctx context.Context, tid keybase1.TeamID) // TeamAbandoned indicates that a team has been abandoned, and // shouldn't be referred to by its previous name anymore. TeamAbandoned(ctx context.Context, tid keybase1.TeamID) // MigrateToImplicitTeam migrates the given folder from a private- // or public-keyed folder, to a team-keyed folder. If it's // already a private/public team-keyed folder, nil is returned. MigrateToImplicitTeam(ctx context.Context, id tlf.ID) error // KickoffAllOutstandingRekeys kicks off all outstanding rekeys. It does // nothing to folders that have not scheduled a rekey. This should be // called when we receive an event of "paper key cached" from service. KickoffAllOutstandingRekeys() error // NewNotificationChannel is called to notify any existing TLF // matching `handle` that a new kbfs-edits channel is available. NewNotificationChannel( ctx context.Context, handle *TlfHandle, convID chat1.ConversationID, channelName string) } type merkleRootGetter interface { // GetCurrentMerkleRoot returns the current root of the global // Keybase Merkle tree. GetCurrentMerkleRoot(ctx context.Context) ( keybase1.MerkleRootV2, time.Time, error) // VerifyMerkleRoot checks that the specified merkle root // contains the given KBFS root; if not, it returns an error. VerifyMerkleRoot( ctx context.Context, root keybase1.MerkleRootV2, kbfsRoot keybase1.KBFSRoot) error } type gitMetadataPutter interface { PutGitMetadata(ctx context.Context, folder keybase1.Folder, repoID keybase1.RepoID, metadata keybase1.GitLocalMetadata) error } // KeybaseService is an interface for communicating with the keybase // service. type KeybaseService interface { merkleRootGetter gitMetadataPutter // Resolve, given an assertion, resolves it to a username/UID // pair. The username <-> UID mapping is trusted and // immutable, so it can be cached. If the assertion is just // the username or a UID assertion, then the resolution can // also be trusted. If the returned pair is equal to that of // the current session, then it can also be // trusted. Otherwise, Identify() needs to be called on the // assertion before the assertion -> (username, UID) mapping // can be trusted. Resolve(ctx context.Context, assertion string) ( kbname.NormalizedUsername, keybase1.UserOrTeamID, error) // Identify, given an assertion, returns a UserInfo struct // with the user that matches that assertion, or an error // otherwise. The reason string is displayed on any tracker // popups spawned. Identify(ctx context.Context, assertion, reason string) ( kbname.NormalizedUsername, keybase1.UserOrTeamID, error) // NormalizeSocialAssertion creates a SocialAssertion from its input and // normalizes it. The service name will be lowercased. If the service is // case-insensitive, then the username will also be lowercased. Colon // assertions (twitter:user) will be transformed to the user@twitter // format. Only registered services are allowed. NormalizeSocialAssertion( ctx context.Context, assertion string) (keybase1.SocialAssertion, error) // ResolveIdentifyImplicitTeam resolves, and optionally // identifies, an implicit team. If the implicit team doesn't yet // exist, and doIdentifies is true, one is created. ResolveIdentifyImplicitTeam( ctx context.Context, assertions, suffix string, tlfType tlf.Type, doIdentifies bool, reason string) (ImplicitTeamInfo, error) // ResolveImplicitTeamByID resolves an implicit team to a team // name, given a team ID. ResolveImplicitTeamByID( ctx context.Context, teamID keybase1.TeamID) (string, error) // CreateTeamTLF associates the given TLF ID with the team ID in // the team's sigchain. If the team already has a TLF ID // associated with it, this overwrites it. CreateTeamTLF( ctx context.Context, teamID keybase1.TeamID, tlfID tlf.ID) error // GetTeamSettings returns the KBFS settings for the given team. GetTeamSettings(ctx context.Context, teamID keybase1.TeamID) ( keybase1.KBFSTeamSettings, error) // LoadUserPlusKeys returns a UserInfo struct for a // user with the specified UID. // If you have the UID for a user and don't require Identify to // validate an assertion or the identity of a user, use this to // get UserInfo structs as it is much cheaper than Identify. // // pollForKID, if non empty, causes `PollForKID` field to be populated, which // causes the service to poll for the given KID. This is useful during // provisioning where the provisioner needs to get the MD revision that the // provisionee has set the rekey bit on. LoadUserPlusKeys(ctx context.Context, uid keybase1.UID, pollForKID keybase1.KID) (UserInfo, error) // LoadTeamPlusKeys returns a TeamInfo struct for a team with the // specified TeamID. The caller can specify `desiredKeyGen` to // force a server check if that particular key gen isn't yet // known; it may be set to UnspecifiedKeyGen if no server check is // required. The caller can specify `desiredUID` and // `desiredRole` to force a server check if that particular UID // isn't a member of the team yet according to local caches; it // may be set to "" if no server check is required. LoadTeamPlusKeys(ctx context.Context, tid keybase1.TeamID, tlfType tlf.Type, desiredKeyGen kbfsmd.KeyGen, desiredUser keybase1.UserVersion, desiredKey kbfscrypto.VerifyingKey, desiredRole keybase1.TeamRole) (TeamInfo, error) // CurrentSession returns a SessionInfo struct with all the // information for the current session, or an error otherwise. CurrentSession(ctx context.Context, sessionID int) (SessionInfo, error) // FavoriteAdd adds the given folder to the list of favorites. FavoriteAdd(ctx context.Context, folder keybase1.Folder) error // FavoriteAdd removes the given folder from the list of // favorites. FavoriteDelete(ctx context.Context, folder keybase1.Folder) error // FavoriteList returns the current list of favorites. FavoriteList(ctx context.Context, sessionID int) ([]keybase1.Folder, error) // Notify sends a filesystem notification. Notify(ctx context.Context, notification *keybase1.FSNotification) error // NotifyPathUpdated sends a path updated notification. NotifyPathUpdated(ctx context.Context, path string) error // NotifySyncStatus sends a sync status notification. NotifySyncStatus(ctx context.Context, status *keybase1.FSPathSyncStatus) error // FlushUserFromLocalCache instructs this layer to clear any // KBFS-side, locally-cached information about the given user. // This does NOT involve communication with the daemon, this is // just to force future calls loading this user to fall through to // the daemon itself, rather than being served from the cache. FlushUserFromLocalCache(ctx context.Context, uid keybase1.UID) // TODO: Add CryptoClient methods, too. // EstablishMountDir asks the service for the current mount path // and sets it if not established. EstablishMountDir(ctx context.Context) (string, error) // Shutdown frees any resources associated with this // instance. No other methods may be called after this is // called. Shutdown() } // KeybaseServiceCn defines methods needed to construct KeybaseService // and Crypto implementations. type KeybaseServiceCn interface { NewKeybaseService( config Config, params InitParams, ctx Context, log logger.Logger) ( KeybaseService, error) NewCrypto( config Config, params InitParams, ctx Context, log logger.Logger) ( Crypto, error) NewChat( config Config, params InitParams, ctx Context, log logger.Logger) ( Chat, error) } type resolver interface { // Resolve, given an assertion, resolves it to a username/UID // pair. The username <-> UID mapping is trusted and // immutable, so it can be cached. If the assertion is just // the username or a UID assertion, then the resolution can // also be trusted. If the returned pair is equal to that of // the current session, then it can also be // trusted. Otherwise, Identify() needs to be called on the // assertion before the assertion -> (username, UserOrTeamID) mapping // can be trusted. // // TODO: some of the above assumptions on cacheability aren't // right for subteams, which can change their name, so this may // need updating. Resolve(ctx context.Context, assertion string) ( kbname.NormalizedUsername, keybase1.UserOrTeamID, error) // ResolveImplicitTeam resolves the given implicit team. ResolveImplicitTeam( ctx context.Context, assertions, suffix string, tlfType tlf.Type) ( ImplicitTeamInfo, error) // ResolveImplicitTeamByID resolves the given implicit team, given // a team ID. ResolveImplicitTeamByID( ctx context.Context, teamID keybase1.TeamID, tlfType tlf.Type) ( ImplicitTeamInfo, error) // ResolveTeamTLFID returns the TLF ID associated with a given // team ID, or tlf.NullID if no ID is yet associated with that // team. ResolveTeamTLFID(ctx context.Context, teamID keybase1.TeamID) ( tlf.ID, error) // NormalizeSocialAssertion creates a SocialAssertion from its input and // normalizes it. The service name will be lowercased. If the service is // case-insensitive, then the username will also be lowercased. Colon // assertions (twitter:user) will be transformed to the user@twitter // format. Only registered services are allowed. NormalizeSocialAssertion( ctx context.Context, assertion string) (keybase1.SocialAssertion, error) } type identifier interface { // Identify resolves an assertion (which could also be a // username) to a UserInfo struct, spawning tracker popups if // necessary. The reason string is displayed on any tracker // popups spawned. Identify(ctx context.Context, assertion, reason string) ( kbname.NormalizedUsername, keybase1.UserOrTeamID, error) // IdentifyImplicitTeam identifies (and creates if necessary) the // given implicit team. IdentifyImplicitTeam( ctx context.Context, assertions, suffix string, tlfType tlf.Type, reason string) (ImplicitTeamInfo, error) } type normalizedUsernameGetter interface { // GetNormalizedUsername returns the normalized username // corresponding to the given UID. GetNormalizedUsername(ctx context.Context, id keybase1.UserOrTeamID) ( kbname.NormalizedUsername, error) } // CurrentSessionGetter is an interface for objects that can return // session info. type CurrentSessionGetter interface { // GetCurrentSession gets the current session info. GetCurrentSession(ctx context.Context) (SessionInfo, error) } // teamMembershipChecker is a copy of kbfsmd.TeamMembershipChecker for // embedding in KBPKI. Unfortunately, this is necessary since mockgen // can't handle embedded interfaces living in other packages. type teamMembershipChecker interface { // IsTeamWriter is a copy of // kbfsmd.TeamMembershipChecker.IsTeamWriter. IsTeamWriter(ctx context.Context, tid keybase1.TeamID, uid keybase1.UID, verifyingKey kbfscrypto.VerifyingKey) (bool, error) // NoLongerTeamWriter returns the global Merkle root of the // most-recent time the given user (with the given device key, // which implies an eldest seqno) transitioned from being a writer // to not being a writer on the given team. If the user was never // a writer of the team, it returns an error. NoLongerTeamWriter( ctx context.Context, tid keybase1.TeamID, tlfType tlf.Type, uid keybase1.UID, verifyingKey kbfscrypto.VerifyingKey) ( keybase1.MerkleRootV2, error) // IsTeamReader is a copy of // kbfsmd.TeamMembershipChecker.IsTeamWriter. IsTeamReader(ctx context.Context, tid keybase1.TeamID, uid keybase1.UID) ( bool, error) } type teamKeysGetter interface { // GetTeamTLFCryptKeys gets all of a team's secret crypt keys, by // generation, as well as the latest key generation number for the // team. The caller can specify `desiredKeyGen` to force a server // check if that particular key gen isn't yet known; it may be set // to UnspecifiedKeyGen if no server check is required. GetTeamTLFCryptKeys(ctx context.Context, tid keybase1.TeamID, desiredKeyGen kbfsmd.KeyGen) ( map[kbfsmd.KeyGen]kbfscrypto.TLFCryptKey, kbfsmd.KeyGen, error) } type teamRootIDGetter interface { // GetTeamRootID returns the root team ID for the given (sub)team // ID. GetTeamRootID(ctx context.Context, tid keybase1.TeamID) ( keybase1.TeamID, error) } // KBPKI interacts with the Keybase daemon to fetch user info. type KBPKI interface { CurrentSessionGetter resolver identifier normalizedUsernameGetter merkleRootGetter teamMembershipChecker teamKeysGetter teamRootIDGetter gitMetadataPutter // HasVerifyingKey returns nil if the given user has the given // VerifyingKey, and an error otherwise. If the revoked key was // valid according to the untrusted server timestamps, a special // error type `RevokedDeviceVerificationError` is returned, which // includes information the caller can use to verify the key using // the merkle tree. HasVerifyingKey(ctx context.Context, uid keybase1.UID, verifyingKey kbfscrypto.VerifyingKey, atServerTime time.Time) error // GetCryptPublicKeys gets all of a user's crypt public keys (including // paper keys). GetCryptPublicKeys(ctx context.Context, uid keybase1.UID) ( []kbfscrypto.CryptPublicKey, error) // TODO: Split the methods below off into a separate // FavoriteOps interface. // FavoriteAdd adds folder to the list of the logged in user's // favorite folders. It is idempotent. FavoriteAdd(ctx context.Context, folder keybase1.Folder) error // FavoriteDelete deletes folder from the list of the logged in user's // favorite folders. It is idempotent. FavoriteDelete(ctx context.Context, folder keybase1.Folder) error // FavoriteList returns the list of all favorite folders for // the logged in user. FavoriteList(ctx context.Context) ([]keybase1.Folder, error) // CreateTeamTLF associates the given TLF ID with the team ID in // the team's sigchain. If the team already has a TLF ID // associated with it, this overwrites it. CreateTeamTLF( ctx context.Context, teamID keybase1.TeamID, tlfID tlf.ID) error // Notify sends a filesystem notification. Notify(ctx context.Context, notification *keybase1.FSNotification) error // NotifyPathUpdated sends a path updated notification. NotifyPathUpdated(ctx context.Context, path string) error } // KeyMetadata is an interface for something that holds key // information. This is usually implemented by RootMetadata. type KeyMetadata interface { // TlfID returns the ID of the TLF for which this object holds // key info. TlfID() tlf.ID // TypeForKeying returns the keying type for this MD. TypeForKeying() tlf.KeyingType // LatestKeyGeneration returns the most recent key generation // with key data in this object, or PublicKeyGen if this TLF // is public. LatestKeyGeneration() kbfsmd.KeyGen // GetTlfHandle returns the handle for the TLF. It must not // return nil. // // TODO: Remove the need for this function in this interface, // so that kbfsmd.RootMetadata can implement this interface // fully. GetTlfHandle() *TlfHandle // IsWriter checks that the given user is a valid writer of the TLF // right now. IsWriter( ctx context.Context, checker kbfsmd.TeamMembershipChecker, uid keybase1.UID, verifyingKey kbfscrypto.VerifyingKey) ( bool, error) // HasKeyForUser returns whether or not the given user has // keys for at least one device. Returns an error if the TLF // is public. HasKeyForUser(user keybase1.UID) (bool, error) // GetTLFCryptKeyParams returns all the necessary info to // construct the TLF crypt key for the given key generation, // user, and device (identified by its crypt public key), or // false if not found. This returns an error if the TLF is // public. GetTLFCryptKeyParams( keyGen kbfsmd.KeyGen, user keybase1.UID, key kbfscrypto.CryptPublicKey) ( kbfscrypto.TLFEphemeralPublicKey, kbfscrypto.EncryptedTLFCryptKeyClientHalf, kbfscrypto.TLFCryptKeyServerHalfID, bool, error) // StoresHistoricTLFCryptKeys returns whether or not history keys are // symmetrically encrypted; if not, they're encrypted per-device. StoresHistoricTLFCryptKeys() bool // GetHistoricTLFCryptKey attempts to symmetrically decrypt the key at the given // generation using the current generation's TLFCryptKey. GetHistoricTLFCryptKey(codec kbfscodec.Codec, keyGen kbfsmd.KeyGen, currentKey kbfscrypto.TLFCryptKey) ( kbfscrypto.TLFCryptKey, error) } // KeyMetadataWithRootDirEntry is like KeyMetadata, but can also // return the root dir entry for the associated MD update. type KeyMetadataWithRootDirEntry interface { KeyMetadata // GetRootDirEntry returns the root directory entry for the // associated MD. GetRootDirEntry() DirEntry } type encryptionKeyGetter interface { // GetTLFCryptKeyForEncryption gets the crypt key to use for // encryption (i.e., with the latest key generation) for the // TLF with the given metadata. GetTLFCryptKeyForEncryption(ctx context.Context, kmd KeyMetadata) ( kbfscrypto.TLFCryptKey, error) } type mdDecryptionKeyGetter interface { // GetTLFCryptKeyForMDDecryption gets the crypt key to use for the // TLF with the given metadata to decrypt the private portion of // the metadata. It finds the appropriate key from mdWithKeys // (which in most cases is the same as mdToDecrypt) if it's not // already cached. GetTLFCryptKeyForMDDecryption(ctx context.Context, kmdToDecrypt, kmdWithKeys KeyMetadata) ( kbfscrypto.TLFCryptKey, error) } type blockDecryptionKeyGetter interface { // GetTLFCryptKeyForBlockDecryption gets the crypt key to use // for the TLF with the given metadata to decrypt the block // pointed to by the given pointer. GetTLFCryptKeyForBlockDecryption(ctx context.Context, kmd KeyMetadata, blockPtr BlockPointer) (kbfscrypto.TLFCryptKey, error) } type blockKeyGetter interface { encryptionKeyGetter blockDecryptionKeyGetter } // KeyManager fetches and constructs the keys needed for KBFS file // operations. type KeyManager interface { blockKeyGetter mdDecryptionKeyGetter // GetTLFCryptKeyOfAllGenerations gets the crypt keys of all generations // for current devices. keys contains crypt keys from all generations, in // order, starting from FirstValidKeyGen. GetTLFCryptKeyOfAllGenerations(ctx context.Context, kmd KeyMetadata) ( keys []kbfscrypto.TLFCryptKey, err error) // Rekey checks the given MD object, if it is a private TLF, // against the current set of device keys for all valid // readers and writers. If there are any new devices, it // updates all existing key generations to include the new // devices. If there are devices that have been removed, it // creates a new epoch of keys for the TLF. If there was an // error, or the RootMetadata wasn't changed, it returns false. // Otherwise, it returns true. If a new key generation is // added the second return value points to this new key. This // is to allow for caching of the TLF crypt key only after a // successful merged write of the metadata. Otherwise we could // prematurely pollute the key cache. // // If the given MD object is a public TLF, it simply updates // the TLF's handle with any newly-resolved writers. // // If promptPaper is set, prompts for any unlocked paper keys. // promptPaper shouldn't be set if md is for a public TLF. Rekey(ctx context.Context, md *RootMetadata, promptPaper bool) ( bool, *kbfscrypto.TLFCryptKey, error) } // Reporter exports events (asynchronously) to any number of sinks type Reporter interface { // ReportErr records that a given error happened. ReportErr(ctx context.Context, tlfName tlf.CanonicalName, t tlf.Type, mode ErrorModeType, err error) // AllKnownErrors returns all errors known to this Reporter. AllKnownErrors() []ReportedError // Notify sends the given notification to any sink. Notify(ctx context.Context, notification *keybase1.FSNotification) // NotifyPathUpdated sends the given notification to any sink. NotifyPathUpdated(ctx context.Context, path string) // NotifySyncStatus sends the given path sync status to any sink. NotifySyncStatus(ctx context.Context, status *keybase1.FSPathSyncStatus) // Shutdown frees any resources allocated by a Reporter. Shutdown() } // MDCache gets and puts plaintext top-level metadata into the cache. type MDCache interface { // Get gets the metadata object associated with the given TLF ID, // revision number, and branch ID (kbfsmd.NullBranchID for merged MD). Get(tlf tlf.ID, rev kbfsmd.Revision, bid kbfsmd.BranchID) (ImmutableRootMetadata, error) // Put stores the metadata object, only if an MD matching that TLF // ID, revision number, and branch ID isn't already cached. If // there is already a matching item in the cache, we require that // caller manages the cache explicitly by deleting or replacing it // explicitly. This should be used when putting existing MDs // being fetched from the server. Put(md ImmutableRootMetadata) error // Delete removes the given metadata object from the cache if it exists. Delete(tlf tlf.ID, rev kbfsmd.Revision, bid kbfsmd.BranchID) // Replace replaces the entry matching the md under the old branch // ID with the new one. If the old entry doesn't exist, this is // equivalent to a Put, except that it overrides anything else // that's already in the cache. This should be used when putting // new MDs created locally. Replace(newRmd ImmutableRootMetadata, oldBID kbfsmd.BranchID) error // MarkPutToServer sets `PutToServer` to true for the specified // MD, if it already exists in the cache. MarkPutToServer(tlf tlf.ID, rev kbfsmd.Revision, bid kbfsmd.BranchID) // GetIDForHandle retrieves a cached, trusted TLF ID for the given // handle, if one exists. GetIDForHandle(handle *TlfHandle) (tlf.ID, error) // PutIDForHandle caches a trusted TLF ID for the given handle. PutIDForHandle(handle *TlfHandle, id tlf.ID) error // ChangeHandleForID moves an ID to be under a new handle, if the // ID is cached already. ChangeHandleForID(oldHandle *TlfHandle, newHandle *TlfHandle) } // KeyCache handles caching for both TLFCryptKeys and BlockCryptKeys. type KeyCache interface { // GetTLFCryptKey gets the crypt key for the given TLF. GetTLFCryptKey(tlf.ID, kbfsmd.KeyGen) (kbfscrypto.TLFCryptKey, error) // PutTLFCryptKey stores the crypt key for the given TLF. PutTLFCryptKey(tlf.ID, kbfsmd.KeyGen, kbfscrypto.TLFCryptKey) error } // BlockCacheLifetime denotes the lifetime of an entry in BlockCache. type BlockCacheLifetime int func (l BlockCacheLifetime) String() string { switch l { case NoCacheEntry: return "NoCacheEntry" case TransientEntry: return "TransientEntry" case PermanentEntry: return "PermanentEntry" } return "Unknown" } const ( // NoCacheEntry means that the entry will not be cached. NoCacheEntry BlockCacheLifetime = iota // TransientEntry means that the cache entry may be evicted at // any time. TransientEntry // PermanentEntry means that the cache entry must remain until // explicitly removed from the cache. PermanentEntry ) // BlockCacheSimple gets and puts plaintext dir blocks and file blocks into // a cache. These blocks are immutable and identified by their // content hash. type BlockCacheSimple interface { // Get gets the block associated with the given block ID. Get(ptr BlockPointer) (Block, error) // Put stores the final (content-addressable) block associated // with the given block ID. If lifetime is TransientEntry, // then it is assumed that the block exists on the server and // the entry may be evicted from the cache at any time. If // lifetime is PermanentEntry, then it is assumed that the // block doesn't exist on the server and must remain in the // cache until explicitly removed. As an intermediary state, // as when a block is being sent to the server, the block may // be put into the cache both with TransientEntry and // PermanentEntry -- these are two separate entries. This is // fine, since the block should be the same. Put(ptr BlockPointer, tlf tlf.ID, block Block, lifetime BlockCacheLifetime) error } // BlockCache specifies the interface of BlockCacheSimple, and also more // advanced and internal methods. type BlockCache interface { BlockCacheSimple // CheckForKnownPtr sees whether this cache has a transient // entry for the given file block, which must be a direct file // block containing data). Returns the full BlockPointer // associated with that ID, including key and data versions. // If no ID is known, return an uninitialized BlockPointer and // a nil error. CheckForKnownPtr(tlf tlf.ID, block *FileBlock) (BlockPointer, error) // DeleteTransient removes the transient entry for the given // pointer from the cache, as well as any cached IDs so the block // won't be reused. DeleteTransient(ptr BlockPointer, tlf tlf.ID) error // Delete removes the permanent entry for the non-dirty block // associated with the given block ID from the cache. No // error is returned if no block exists for the given ID. DeletePermanent(id kbfsblock.ID) error // DeleteKnownPtr removes the cached ID for the given file // block. It does not remove the block itself. DeleteKnownPtr(tlf tlf.ID, block *FileBlock) error // GetWithPrefetch retrieves a block from the cache, along with the block's // prefetch status. GetWithPrefetch(ptr BlockPointer) (block Block, prefetchStatus PrefetchStatus, lifetime BlockCacheLifetime, err error) // PutWithPrefetch puts a block into the cache, along with whether or not // it has triggered or finished a prefetch. PutWithPrefetch(ptr BlockPointer, tlf tlf.ID, block Block, lifetime BlockCacheLifetime, prefetchStatus PrefetchStatus) error // SetCleanBytesCapacity atomically sets clean bytes capacity for block // cache. SetCleanBytesCapacity(capacity uint64) // GetCleanBytesCapacity atomically gets clean bytes capacity for block // cache. GetCleanBytesCapacity() (capacity uint64) } // DirtyPermChan is a channel that gets closed when the holder has // permission to write. We are forced to define it as a type due to a // bug in mockgen that can't handle return values with a chan // struct{}. type DirtyPermChan <-chan struct{} type isDirtyProvider interface { // IsDirty states whether or not the block associated with the // given block pointer and branch name is dirty in this cache. IsDirty(tlfID tlf.ID, ptr BlockPointer, branch BranchName) bool } // DirtyBlockCache gets and puts plaintext dir blocks and file blocks // into a cache, which have been modified by the application and not // yet committed on the KBFS servers. They are identified by a // (potentially random) ID that may not have any relationship with // their context, along with a Branch in case the same TLF is being // modified via multiple branches. Dirty blocks are never evicted, // they must be deleted explicitly. type DirtyBlockCache interface { isDirtyProvider // Get gets the block associated with the given block ID. Returns // the dirty block for the given ID, if one exists. Get(tlfID tlf.ID, ptr BlockPointer, branch BranchName) (Block, error) // Put stores a dirty block currently identified by the // given block pointer and branch name. Put(tlfID tlf.ID, ptr BlockPointer, branch BranchName, block Block) error // Delete removes the dirty block associated with the given block // pointer and branch from the cache. No error is returned if no // block exists for the given ID. Delete(tlfID tlf.ID, ptr BlockPointer, branch BranchName) error // IsAnyDirty returns whether there are any dirty blocks in the // cache. tlfID may be ignored. IsAnyDirty(tlfID tlf.ID) bool // RequestPermissionToDirty is called whenever a user wants to // write data to a file. The caller provides an estimated number // of bytes that will become dirty -- this is difficult to know // exactly without pre-fetching all the blocks involved, but in // practice we can just use the number of bytes sent in via the // Write. It returns a channel that blocks until the cache is // ready to receive more dirty data, at which point the channel is // closed. The user must call // `UpdateUnsyncedBytes(-estimatedDirtyBytes)` once it has // completed its write and called `UpdateUnsyncedBytes` for all // the exact dirty block sizes. RequestPermissionToDirty(ctx context.Context, tlfID tlf.ID, estimatedDirtyBytes int64) (DirtyPermChan, error) // UpdateUnsyncedBytes is called by a user, who has already been // granted permission to write, with the delta in block sizes that // were dirtied as part of the write. So for example, if a // newly-dirtied block of 20 bytes was extended by 5 bytes, they // should send 25. If on the next write (before any syncs), bytes // 10-15 of that same block were overwritten, they should send 0 // over the channel because there were no new bytes. If an // already-dirtied block is truncated, or if previously requested // bytes have now been updated more accurately in previous // requests, newUnsyncedBytes may be negative. wasSyncing should // be true if `BlockSyncStarted` has already been called for this // block. UpdateUnsyncedBytes(tlfID tlf.ID, newUnsyncedBytes int64, wasSyncing bool) // UpdateSyncingBytes is called when a particular block has // started syncing, or with a negative number when a block is no // longer syncing due to an error (and BlockSyncFinished will // never be called). UpdateSyncingBytes(tlfID tlf.ID, size int64) // BlockSyncFinished is called when a particular block has // finished syncing, though the overall sync might not yet be // complete. This lets the cache know it might be able to grant // more permission to writers. BlockSyncFinished(tlfID tlf.ID, size int64) // SyncFinished is called when a complete sync has completed and // its dirty blocks have been removed from the cache. This lets // the cache know it might be able to grant more permission to // writers. SyncFinished(tlfID tlf.ID, size int64) // ShouldForceSync returns true if the sync buffer is full enough // to force all callers to sync their data immediately. ShouldForceSync(tlfID tlf.ID) bool // Shutdown frees any resources associated with this instance. It // returns an error if there are any unsynced blocks. Shutdown() error } // DiskBlockCache caches blocks to the disk. type DiskBlockCache interface { // Get gets a block from the disk cache. Get(ctx context.Context, tlfID tlf.ID, blockID kbfsblock.ID) ( buf []byte, serverHalf kbfscrypto.BlockCryptKeyServerHalf, prefetchStatus PrefetchStatus, err error) // Put puts a block to the disk cache. Returns after it has updated the // metadata but before it has finished writing the block. Put(ctx context.Context, tlfID tlf.ID, blockID kbfsblock.ID, buf []byte, serverHalf kbfscrypto.BlockCryptKeyServerHalf) error // Delete deletes some blocks from the disk cache. Delete(ctx context.Context, blockIDs []kbfsblock.ID) (numRemoved int, sizeRemoved int64, err error) // UpdateMetadata updates metadata for a given block in the disk cache. UpdateMetadata(ctx context.Context, blockID kbfsblock.ID, prefetchStatus PrefetchStatus) error // Status returns the current status of the disk cache. Status(ctx context.Context) map[string]DiskBlockCacheStatus // Shutdown cleanly shuts down the disk block cache. Shutdown(ctx context.Context) } // cryptoPure contains all methods of Crypto that don't depend on // implicit state, i.e. they're pure functions of the input. type cryptoPure interface { // MakeRandomTlfID generates a dir ID using a CSPRNG. MakeRandomTlfID(t tlf.Type) (tlf.ID, error) // MakeRandomBranchID generates a per-device branch ID using a // CSPRNG. It will not return LocalSquashBranchID or // kbfsmd.NullBranchID. MakeRandomBranchID() (kbfsmd.BranchID, error) // MakeTemporaryBlockID generates a temporary block ID using a // CSPRNG. This is used for indirect blocks before they're // committed to the server. MakeTemporaryBlockID() (kbfsblock.ID, error) // MakeRefNonce generates a block reference nonce using a // CSPRNG. This is used for distinguishing different references to // the same BlockID. MakeBlockRefNonce() (kbfsblock.RefNonce, error) // MakeRandomTLFEphemeralKeys generates ephemeral keys using a // CSPRNG for a TLF. These keys can then be used to key/rekey // the TLF. MakeRandomTLFEphemeralKeys() (kbfscrypto.TLFEphemeralPublicKey, kbfscrypto.TLFEphemeralPrivateKey, error) // MakeRandomTLFKeys generates keys using a CSPRNG for a // single key generation of a TLF. MakeRandomTLFKeys() (kbfscrypto.TLFPublicKey, kbfscrypto.TLFPrivateKey, kbfscrypto.TLFCryptKey, error) // MakeRandomBlockCryptKeyServerHalf generates the server-side of // a block crypt key. MakeRandomBlockCryptKeyServerHalf() ( kbfscrypto.BlockCryptKeyServerHalf, error) // EncryptPrivateMetadata encrypts a PrivateMetadata object. EncryptPrivateMetadata( pmd PrivateMetadata, key kbfscrypto.TLFCryptKey) ( kbfscrypto.EncryptedPrivateMetadata, error) // DecryptPrivateMetadata decrypts a PrivateMetadata object. DecryptPrivateMetadata( encryptedPMD kbfscrypto.EncryptedPrivateMetadata, key kbfscrypto.TLFCryptKey) (PrivateMetadata, error) // EncryptBlocks encrypts a block. plainSize is the size of the encoded // block; EncryptBlock() must guarantee that plainSize <= // len(encryptedBlock). EncryptBlock(block Block, key kbfscrypto.BlockCryptKey) ( plainSize int, encryptedBlock kbfscrypto.EncryptedBlock, err error) // DecryptBlock decrypts a block. Similar to EncryptBlock(), // DecryptBlock() must guarantee that (size of the decrypted // block) <= len(encryptedBlock). DecryptBlock(encryptedBlock kbfscrypto.EncryptedBlock, key kbfscrypto.BlockCryptKey, block Block) error } // Crypto signs, verifies, encrypts, and decrypts stuff. type Crypto interface { cryptoPure // Duplicate kbfscrypto.Signer here to work around gomock's // limitations. Sign(context.Context, []byte) (kbfscrypto.SignatureInfo, error) SignForKBFS(context.Context, []byte) (kbfscrypto.SignatureInfo, error) SignToString(context.Context, []byte) (string, error) // DecryptTLFCryptKeyClientHalf decrypts a // kbfscrypto.TLFCryptKeyClientHalf using the current device's // private key and the TLF's ephemeral public key. DecryptTLFCryptKeyClientHalf(ctx context.Context, publicKey kbfscrypto.TLFEphemeralPublicKey, encryptedClientHalf kbfscrypto.EncryptedTLFCryptKeyClientHalf) ( kbfscrypto.TLFCryptKeyClientHalf, error) // DecryptTLFCryptKeyClientHalfAny decrypts one of the // kbfscrypto.TLFCryptKeyClientHalf using the available // private keys and the ephemeral public key. If promptPaper // is true, the service will prompt the user for any unlocked // paper keys. DecryptTLFCryptKeyClientHalfAny(ctx context.Context, keys []EncryptedTLFCryptKeyClientAndEphemeral, promptPaper bool) ( kbfscrypto.TLFCryptKeyClientHalf, int, error) // DecryptTeamMerkleLeaf decrypts a team-encrypted Merkle leaf // using some team key generation greater than `minKeyGen`, and // the provided ephemeral public key. DecryptTeamMerkleLeaf(ctx context.Context, teamID keybase1.TeamID, publicKey kbfscrypto.TLFEphemeralPublicKey, encryptedMerkleLeaf kbfscrypto.EncryptedMerkleLeaf, minKeyGen keybase1.PerTeamKeyGeneration) ([]byte, error) // Shutdown frees any resources associated with this instance. Shutdown() } type tlfIDGetter interface { // GetIDForHandle returns the tlf.ID associated with the given // handle, if the logged-in user has read permission on the // folder. It may or may not create the folder if it doesn't // exist yet, and it may return `tlf.NullID` with a `nil` error if // it doesn't create a missing folder. GetIDForHandle(ctx context.Context, handle *TlfHandle) (tlf.ID, error) // ValidateLatestHandleForTLF returns true if the TLF ID contained // in `h` does not currently map to a finalized TLF. ValidateLatestHandleNotFinal(ctx context.Context, h *TlfHandle) ( bool, error) } // MDOps gets and puts root metadata to an MDServer. On a get, it // verifies the metadata is signed by the metadata's signing key. type MDOps interface { tlfIDGetter // GetForTLF returns the current metadata object // corresponding to the given top-level folder, if the logged-in // user has read permission on the folder. // // If lockBeforeGet is not nil, it causes mdserver to take the lock on the // lock ID before the get. GetForTLF(ctx context.Context, id tlf.ID, lockBeforeGet *keybase1.LockID) ( ImmutableRootMetadata, error) // GetForTLFByTime returns the newest merged MD update with a // server timestamp less than or equal to `serverTime`. GetForTLFByTime(ctx context.Context, id tlf.ID, serverTime time.Time) ( ImmutableRootMetadata, error) // GetUnmergedForTLF is the same as the above but for unmerged // metadata. GetUnmergedForTLF(ctx context.Context, id tlf.ID, bid kbfsmd.BranchID) ( ImmutableRootMetadata, error) // GetRange returns a range of metadata objects corresponding to // the passed revision numbers (inclusive). // // If lockBeforeGet is not nil, it causes mdserver to take the lock on the // lock ID before the get. GetRange(ctx context.Context, id tlf.ID, start, stop kbfsmd.Revision, lockID *keybase1.LockID) ([]ImmutableRootMetadata, error) // GetUnmergedRange is the same as the above but for unmerged // metadata history (inclusive). GetUnmergedRange(ctx context.Context, id tlf.ID, bid kbfsmd.BranchID, start, stop kbfsmd.Revision) ([]ImmutableRootMetadata, error) // Put stores the metadata object for the given top-level folder. // This also adds the resulting ImmutableRootMetadata object to // the mdcache, if the Put is successful. Note that constructing // the ImmutableRootMetadata requires knowing the verifying key, // which might not be the same as the local user's verifying key // if the MD has been copied from a previous update. // // If lockContext is not nil, it causes the mdserver to check a lockID at // the time of the put, and optionally (if specified in lockContext) // releases the lock on the lock ID if the put is successful. Releasing the // lock in mdserver is idempotent. Note that journalMDOps doesn't support // lockContext for now. If journaling is enabled, use FinishSinbleOp to // require locks. // // The priority parameter specifies the priority of this particular MD put // operation. When conflict happens, mdserver tries to prioritize writes // with higher priorities. Caller should use pre-defined (or define new) // constants in keybase1 package, such as keybase1.MDPriorityNormal. Note // that journalMDOps doesn't support any priority other than // MDPriorityNormal for now. If journaling is enabled, use FinishSinbleOp // to override priority. Put(ctx context.Context, rmd *RootMetadata, verifyingKey kbfscrypto.VerifyingKey, lockContext *keybase1.LockContext, priority keybase1.MDPriority) ( ImmutableRootMetadata, error) // PutUnmerged is the same as the above but for unmerged metadata // history. This also adds the resulting ImmutableRootMetadata // object to the mdcache, if the PutUnmerged is successful. Note // that constructing the ImmutableRootMetadata requires knowing // the verifying key, which might not be the same as the local // user's verifying key if the MD has been copied from a previous // update. PutUnmerged(ctx context.Context, rmd *RootMetadata, verifyingKey kbfscrypto.VerifyingKey) (ImmutableRootMetadata, error) // PruneBranch prunes all unmerged history for the given TLF // branch. PruneBranch(ctx context.Context, id tlf.ID, bid kbfsmd.BranchID) error // ResolveBranch prunes all unmerged history for the given TLF // branch, and also deletes any blocks in `blocksToDelete` that // are still in the local journal. In addition, it appends the // given MD to the journal. This also adds the resulting // ImmutableRootMetadata object to the mdcache, if the // ResolveBranch is successful. Note that constructing the // ImmutableRootMetadata requires knowing the verifying key, which // might not be the same as the local user's verifying key if the // MD has been copied from a previous update. ResolveBranch(ctx context.Context, id tlf.ID, bid kbfsmd.BranchID, blocksToDelete []kbfsblock.ID, rmd *RootMetadata, verifyingKey kbfscrypto.VerifyingKey) (ImmutableRootMetadata, error) // GetLatestHandleForTLF returns the server's idea of the latest // handle for the TLF, which may not yet be reflected in the MD if // the TLF hasn't been rekeyed since it entered into a conflicting // state. GetLatestHandleForTLF(ctx context.Context, id tlf.ID) (tlf.Handle, error) } // KeyOps fetches server-side key halves from the key server. type KeyOps interface { // GetTLFCryptKeyServerHalf gets a server-side key half for a // device given the key half ID. GetTLFCryptKeyServerHalf(ctx context.Context, serverHalfID kbfscrypto.TLFCryptKeyServerHalfID, cryptPublicKey kbfscrypto.CryptPublicKey) ( kbfscrypto.TLFCryptKeyServerHalf, error) // PutTLFCryptKeyServerHalves stores a server-side key halves for a // set of users and devices. PutTLFCryptKeyServerHalves(ctx context.Context, keyServerHalves kbfsmd.UserDeviceKeyServerHalves) error // DeleteTLFCryptKeyServerHalf deletes a server-side key half for a // device given the key half ID. DeleteTLFCryptKeyServerHalf(ctx context.Context, uid keybase1.UID, key kbfscrypto.CryptPublicKey, serverHalfID kbfscrypto.TLFCryptKeyServerHalfID) error } // Prefetcher is an interface to a block prefetcher. type Prefetcher interface { // ProcessBlockForPrefetch potentially triggers and monitors a prefetch. ProcessBlockForPrefetch(ctx context.Context, ptr BlockPointer, block Block, kmd KeyMetadata, priority int, lifetime BlockCacheLifetime, prefetchStatus PrefetchStatus) // CancelPrefetch notifies the prefetcher that a prefetch should be // canceled. CancelPrefetch(kbfsblock.ID) // Shutdown shuts down the prefetcher idempotently. Future calls to // the various Prefetch* methods will return io.EOF. The returned channel // allows upstream components to block until all pending prefetches are // complete. This feature is mainly used for testing, but also to toggle // the prefetcher on and off. Shutdown() <-chan struct{} } // BlockOps gets and puts data blocks to a BlockServer. It performs // the necessary crypto operations on each block. type BlockOps interface { blockRetrieverGetter // Get gets the block associated with the given block pointer // (which belongs to the TLF with the given key metadata), // decrypts it if necessary, and fills in the provided block // object with its contents, if the logged-in user has read // permission for that block. cacheLifetime controls the behavior of the // write-through cache once a Get completes. Get(ctx context.Context, kmd KeyMetadata, blockPtr BlockPointer, block Block, cacheLifetime BlockCacheLifetime) error // GetEncodedSize gets the encoded size of the block associated // with the given block pointer (which belongs to the TLF with the // given key metadata). GetEncodedSize(ctx context.Context, kmd KeyMetadata, blockPtr BlockPointer) (uint32, keybase1.BlockStatus, error) // Ready turns the given block (which belongs to the TLF with // the given key metadata) into encoded (and encrypted) data, // and calculates its ID and size, so that we can do a bunch // of block puts in parallel for every write. Ready() must // guarantee that plainSize <= readyBlockData.QuotaSize(). Ready(ctx context.Context, kmd KeyMetadata, block Block) ( id kbfsblock.ID, plainSize int, readyBlockData ReadyBlockData, err error) // Delete instructs the server to delete the given block references. // It returns the number of not-yet deleted references to // each block reference Delete(ctx context.Context, tlfID tlf.ID, ptrs []BlockPointer) ( liveCounts map[kbfsblock.ID]int, err error) // Archive instructs the server to mark the given block references // as "archived"; that is, they are not being used in the current // view of the folder, and shouldn't be served to anyone other // than folder writers. Archive(ctx context.Context, tlfID tlf.ID, ptrs []BlockPointer) error // TogglePrefetcher activates or deactivates the prefetcher. TogglePrefetcher(enable bool) <-chan struct{} // Prefetcher retrieves this BlockOps' Prefetcher. Prefetcher() Prefetcher // Shutdown shuts down all the workers performing Get operations Shutdown() } // Duplicate kbfscrypto.AuthTokenRefreshHandler here to work around // gomock's limitations. type authTokenRefreshHandler interface { RefreshAuthToken(context.Context) } // MDServer gets and puts metadata for each top-level directory. The // instantiation should be able to fetch session/user details via KBPKI. On a // put, the server is responsible for 1) ensuring the user has appropriate // permissions for whatever modifications were made; 2) ensuring that // LastModifyingWriter and LastModifyingUser are updated appropriately; and 3) // detecting conflicting writes based on the previous root block ID (i.e., when // it supports strict consistency). On a get, it verifies the logged-in user // has read permissions. // // TODO: Add interface for searching by time type MDServer interface { authTokenRefreshHandler // GetForHandle returns the current (signed/encrypted) metadata // object corresponding to the given top-level folder's handle, if // the logged-in user has read permission on the folder. It // creates the folder if one doesn't exist yet, and the logged-in // user has permission to do so. // // If lockBeforeGet is not nil, it takes a lock on the lock ID before // trying to get anything. If taking the lock fails, an error is returned. // Note that taking a lock from the mdserver is idempotent. // // If there is no returned error, then the returned ID must // always be non-null. A nil *RootMetadataSigned may be // returned, but if it is non-nil, then its ID must match the // returned ID. GetForHandle(ctx context.Context, handle tlf.Handle, mStatus kbfsmd.MergeStatus, lockBeforeGet *keybase1.LockID) ( tlf.ID, *RootMetadataSigned, error) // GetForTLF returns the current (signed/encrypted) metadata object // corresponding to the given top-level folder, if the logged-in // user has read permission on the folder. // // If lockBeforeGet is not nil, it takes a lock on the lock ID before // trying to get anything. If taking the lock fails, an error is returned. // Note that taking a lock from the mdserver is idempotent. GetForTLF(ctx context.Context, id tlf.ID, bid kbfsmd.BranchID, mStatus kbfsmd.MergeStatus, lockBeforeGet *keybase1.LockID) (*RootMetadataSigned, error) // GetForTLFByTime returns the earliest merged MD update with a // server timestamp equal or greater to `serverTime`. GetForTLFByTime(ctx context.Context, id tlf.ID, serverTime time.Time) ( *RootMetadataSigned, error) // GetRange returns a range of (signed/encrypted) metadata objects // corresponding to the passed revision numbers (inclusive). // // If lockBeforeGet is not nil, it takes a lock on the lock ID before // trying to get anything. If taking the lock fails, an error is returned. // Note that taking a lock from the mdserver is idempotent. GetRange(ctx context.Context, id tlf.ID, bid kbfsmd.BranchID, mStatus kbfsmd.MergeStatus, start, stop kbfsmd.Revision, lockBeforeGet *keybase1.LockID) ( []*RootMetadataSigned, error) // Put stores the (signed/encrypted) metadata object for the given // top-level folder. Note: If the unmerged bit is set in the metadata // block's flags bitmask it will be appended to the unmerged per-device // history. // // If lockContext is not nil, it causes the mdserver to check a lockID at // the time of the put, and optionally (if specified in lockContext) // releases the lock on the lock ID if the put is successful. Releasing the // lock in mdserver is idempotent. Put(ctx context.Context, rmds *RootMetadataSigned, extra kbfsmd.ExtraMetadata, lockContext *keybase1.LockContext, priority keybase1.MDPriority) error // Lock ensures lockID for tlfID is taken by this session, i.e., // idempotently take the lock. If the lock is already taken by *another* // session, mdserver returns a throttle error, causing RPC layer at client // to retry. So caller of this method should observe a behavior similar to // blocking call, which upon successful return, makes sure the lock is // taken on the server. Note that the lock expires after certain time, so // it's important to make writes contingent to the lock by requiring the // lockID in Put. Lock(ctx context.Context, tlfID tlf.ID, lockID keybase1.LockID) error // Release Lock ensures lockID for tlfID is not taken by this session, i.e., // idempotently release the lock. If the lock is already released or // expired, this is a no-op. ReleaseLock(ctx context.Context, tlfID tlf.ID, lockID keybase1.LockID) error // StartImplicitTeamMigration tells mdserver to put a implicit team // migration lock on id, which prevents any rekey MD writes from going // in. Normal classic MD updates can still happen after implicit team // migration has started, until a iTeam-style MD is written. StartImplicitTeamMigration(ctx context.Context, id tlf.ID) (err error) // PruneBranch prunes all unmerged history for the given TLF branch. PruneBranch(ctx context.Context, id tlf.ID, bid kbfsmd.BranchID) error // RegisterForUpdate tells the MD server to inform the caller when // there is a merged update with a revision number greater than // currHead, which did NOT originate from this same MD server // session. This method returns a chan which can receive only a // single error before it's closed. If the received err is nil, // then there is updated MD ready to fetch which didn't originate // locally; if it is non-nil, then the previous registration // cannot send the next notification (e.g., the connection to the // MD server may have failed). In either case, the caller must // re-register to get a new chan that can receive future update // notifications. RegisterForUpdate(ctx context.Context, id tlf.ID, currHead kbfsmd.Revision) (<-chan error, error) // CancelRegistration lets the local MDServer instance know that // we are no longer interested in updates for the specified // folder. It does not necessarily forward this cancellation to // remote servers. CancelRegistration(ctx context.Context, id tlf.ID) // CheckForRekeys initiates the rekey checking process on the // server. The server is allowed to delay this request, and so it // returns a channel for returning the error. Actual rekey // requests are expected to come in asynchronously. CheckForRekeys(ctx context.Context) <-chan error // TruncateLock attempts to take the history truncation lock for // this folder, for a TTL defined by the server. Returns true if // the lock was successfully taken. TruncateLock(ctx context.Context, id tlf.ID) (bool, error) // TruncateUnlock attempts to release the history truncation lock // for this folder. Returns true if the lock was successfully // released. TruncateUnlock(ctx context.Context, id tlf.ID) (bool, error) // DisableRekeyUpdatesForTesting disables processing rekey updates // received from the mdserver while testing. DisableRekeyUpdatesForTesting() // Shutdown is called to shutdown an MDServer connection. Shutdown() // IsConnected returns whether the MDServer is connected. IsConnected() bool // GetLatestHandleForTLF returns the server's idea of the latest handle for the TLF, // which may not yet be reflected in the MD if the TLF hasn't been rekeyed since it // entered into a conflicting state. For the highest level of confidence, the caller // should verify the mapping with a Merkle tree lookup. GetLatestHandleForTLF(ctx context.Context, id tlf.ID) (tlf.Handle, error) // OffsetFromServerTime is the current estimate for how off our // local clock is from the mdserver clock. Add this to any // mdserver-provided timestamps to get the "local" time of the // corresponding event. If the returned bool is false, then we // don't have a current estimate for the offset. OffsetFromServerTime() (time.Duration, bool) // GetKeyBundles looks up the key bundles for the given key // bundle IDs. tlfID must be non-zero but either or both wkbID // and rkbID can be zero, in which case nil will be returned // for the respective bundle. If a bundle cannot be found, an // error is returned and nils are returned for both bundles. GetKeyBundles(ctx context.Context, tlfID tlf.ID, wkbID kbfsmd.TLFWriterKeyBundleID, rkbID kbfsmd.TLFReaderKeyBundleID) ( *kbfsmd.TLFWriterKeyBundleV3, *kbfsmd.TLFReaderKeyBundleV3, error) // CheckReachability is called when the Keybase service sends a notification // that network connectivity has changed. CheckReachability(ctx context.Context) // FastForwardBackoff fast forwards any existing backoff timer for // reconnects. If MD server is connected at the time this is called, it's // essentially a no-op. FastForwardBackoff() // FindNextMD finds the serialized (and possibly encrypted) root // metadata object from the leaf node of the second KBFS merkle // tree to be produced after a given Keybase global merkle tree // sequence number `rootSeqno` (and all merkle nodes between it // and the root, and the root itself). It also returns the global // merkle tree sequence number of the root that first included the // returned metadata object. FindNextMD(ctx context.Context, tlfID tlf.ID, rootSeqno keybase1.Seqno) ( nextKbfsRoot *kbfsmd.MerkleRoot, nextMerkleNodes [][]byte, nextRootSeqno keybase1.Seqno, err error) // GetMerkleRootLatest returns the latest KBFS merkle root for the // given tree ID. GetMerkleRootLatest(ctx context.Context, treeID keybase1.MerkleTreeID) ( root *kbfsmd.MerkleRoot, err error) } type mdServerLocal interface { MDServer addNewAssertionForTest( uid keybase1.UID, newAssertion keybase1.SocialAssertion) error getCurrentMergedHeadRevision(ctx context.Context, id tlf.ID) ( rev kbfsmd.Revision, err error) isShutdown() bool copy(config mdServerLocalConfig) mdServerLocal enableImplicitTeams() setKbfsMerkleRoot(treeID keybase1.MerkleTreeID, root *kbfsmd.MerkleRoot) } // BlockServer gets and puts opaque data blocks. The instantiation // should be able to fetch session/user details via KBPKI. On a // put/delete, the server is reponsible for: 1) checking that the ID // matches the hash of the buffer; and 2) enforcing writer quotas. type BlockServer interface { authTokenRefreshHandler // Get gets the (encrypted) block data associated with the given // block ID and context, uses the provided block key to decrypt // the block, and fills in the provided block object with its // contents, if the logged-in user has read permission for that // block. Get(ctx context.Context, tlfID tlf.ID, id kbfsblock.ID, context kbfsblock.Context) ( []byte, kbfscrypto.BlockCryptKeyServerHalf, error) // GetEncodedSize gets the encoded size of the block associated // with the given block pointer (which belongs to the TLF with the // given key metadata). GetEncodedSize( ctx context.Context, tlfID tlf.ID, id kbfsblock.ID, context kbfsblock.Context) (uint32, keybase1.BlockStatus, error) // Put stores the (encrypted) block data under the given ID // and context on the server, along with the server half of // the block key. context should contain a kbfsblock.RefNonce // of zero. There will be an initial reference for this block // for the given context. // // Put should be idempotent, although it should also return an // error if, for a given ID, any of the other arguments differ // from previous Put calls with the same ID. // // If this returns a kbfsblock.ServerErrorOverQuota, with // Throttled=false, the caller can treat it as informational // and otherwise ignore the error. Put(ctx context.Context, tlfID tlf.ID, id kbfsblock.ID, context kbfsblock.Context, buf []byte, serverHalf kbfscrypto.BlockCryptKeyServerHalf) error // PutAgain re-stores a previously deleted block under the same ID // with the same data. PutAgain(ctx context.Context, tlfID tlf.ID, id kbfsblock.ID, context kbfsblock.Context, buf []byte, serverHalf kbfscrypto.BlockCryptKeyServerHalf) error // AddBlockReference adds a new reference to the given block, // defined by the given context (which should contain a // non-zero kbfsblock.RefNonce). (Contexts with a // kbfsblock.RefNonce of zero should be used when putting the // block for the first time via Put().) Returns a // kbfsblock.ServerErrorBlockNonExistent if id is unknown within this // folder. // // AddBlockReference should be idempotent, although it should // also return an error if, for a given ID and refnonce, any // of the other fields of context differ from previous // AddBlockReference calls with the same ID and refnonce. // // If this returns a kbfsblock.ServerErrorOverQuota, with // Throttled=false, the caller can treat it as informational // and otherwise ignore the error. AddBlockReference(ctx context.Context, tlfID tlf.ID, id kbfsblock.ID, context kbfsblock.Context) error // RemoveBlockReferences removes the references to the given block // ID defined by the given contexts. If no references to the block // remain after this call, the server is allowed to delete the // corresponding block permanently. If the reference defined by // the count has already been removed, the call is a no-op. // It returns the number of remaining not-yet-deleted references after this // reference has been removed RemoveBlockReferences(ctx context.Context, tlfID tlf.ID, contexts kbfsblock.ContextMap) (liveCounts map[kbfsblock.ID]int, err error) // ArchiveBlockReferences marks the given block references as // "archived"; that is, they are not being used in the current // view of the folder, and shouldn't be served to anyone other // than folder writers. // // For a given ID/refnonce pair, ArchiveBlockReferences should // be idempotent, although it should also return an error if // any of the other fields of the context differ from previous // calls with the same ID/refnonce pair. ArchiveBlockReferences(ctx context.Context, tlfID tlf.ID, contexts kbfsblock.ContextMap) error // IsUnflushed returns whether a given block is being queued // locally for later flushing to another block server. If the // block is currently being flushed to the server, this should // return `true`, so that the caller will try to clean it up from // the server if it's no longer needed. IsUnflushed(ctx context.Context, tlfID tlf.ID, id kbfsblock.ID) ( bool, error) // Shutdown is called to shutdown a BlockServer connection. Shutdown(ctx context.Context) // GetUserQuotaInfo returns the quota for the logged-in user. GetUserQuotaInfo(ctx context.Context) (info *kbfsblock.QuotaInfo, err error) // GetTeamQuotaInfo returns the quota for a team. GetTeamQuotaInfo(ctx context.Context, tid keybase1.TeamID) ( info *kbfsblock.QuotaInfo, err error) } // blockServerLocal is the interface for BlockServer implementations // that store data locally. type blockServerLocal interface { BlockServer // getAllRefsForTest returns all the known block references // for the given TLF, and should only be used during testing. getAllRefsForTest(ctx context.Context, tlfID tlf.ID) ( map[kbfsblock.ID]blockRefMap, error) } // BlockSplitter decides when a file block needs to be split type BlockSplitter interface { // CopyUntilSplit copies data into the block until we reach the // point where we should split, but only if writing to the end of // the last block. If this is writing into the middle of a file, // just copy everything that will fit into the block, and assume // that block boundaries will be fixed later. Return how much was // copied. CopyUntilSplit( block *FileBlock, lastBlock bool, data []byte, off int64) int64 // CheckSplit, given a block, figures out whether it ends at the // right place. If so, return 0. If not, return either the // offset in the block where it should be split, or -1 if more // bytes from the next block should be appended. CheckSplit(block *FileBlock) int64 // MaxPtrsPerBlock describes the number of indirect pointers we // can fit into one indirect block. MaxPtrsPerBlock() int // ShouldEmbedBlockChanges decides whether we should keep the // block changes embedded in the MD or not. ShouldEmbedBlockChanges(bc *BlockChanges) bool // SplitDirIfNeeded splits a direct DirBlock into multiple blocks // if needed. It may modify `block`. If a split isn't needed, it // returns a one-element slice containing `block`. If a split is // needed, it returns a non-nil offset for the new block. SplitDirIfNeeded(block *DirBlock) ([]*DirBlock, *StringOffset) } // KeyServer fetches/writes server-side key halves from/to the key server. type KeyServer interface { // GetTLFCryptKeyServerHalf gets a server-side key half for a // device given the key half ID. GetTLFCryptKeyServerHalf(ctx context.Context, serverHalfID kbfscrypto.TLFCryptKeyServerHalfID, cryptPublicKey kbfscrypto.CryptPublicKey) ( kbfscrypto.TLFCryptKeyServerHalf, error) // PutTLFCryptKeyServerHalves stores a server-side key halves for a // set of users and devices. PutTLFCryptKeyServerHalves(ctx context.Context, keyServerHalves kbfsmd.UserDeviceKeyServerHalves) error // DeleteTLFCryptKeyServerHalf deletes a server-side key half for a // device given the key half ID. DeleteTLFCryptKeyServerHalf(ctx context.Context, uid keybase1.UID, key kbfscrypto.CryptPublicKey, serverHalfID kbfscrypto.TLFCryptKeyServerHalfID) error // Shutdown is called to free any KeyServer resources. Shutdown() } // NodeChange represents a change made to a node as part of an atomic // file system operation. type NodeChange struct { Node Node // Basenames of entries added/removed. DirUpdated []string FileUpdated []WriteRange } // Observer can be notified that there is an available update for a // given directory. The notification callbacks should not block, or // make any calls to the Notifier interface. Nodes passed to the // observer should not be held past the end of the notification // callback. type Observer interface { // LocalChange announces that the file at this Node has been // updated locally, but not yet saved at the server. LocalChange(ctx context.Context, node Node, write WriteRange) // BatchChanges announces that the nodes have all been updated // together atomically. Each NodeChange in `changes` affects the // same top-level folder and branch. `allAffectedNodeIDs` is a // list of all the nodes that had their underlying data changed, // even if it wasn't an user-visible change (e.g., if a // subdirectory was updated, the directory block for the TLF root // is updated but that wouldn't be visible to a user). BatchChanges(ctx context.Context, changes []NodeChange, allAffectedNodeIDs []NodeID) // TlfHandleChange announces that the handle of the corresponding // folder branch has changed, likely due to previously-unresolved // assertions becoming resolved. This indicates that the listener // should switch over any cached paths for this folder-branch to // the new name. Nodes that were acquired under the old name will // still continue to work, but new lookups on the old name may // either encounter alias errors or entirely new TLFs (in the case // of conflicts). TlfHandleChange(ctx context.Context, newHandle *TlfHandle) } // Notifier notifies registrants of directory changes type Notifier interface { // RegisterForChanges declares that the given Observer wants to // subscribe to updates for the given top-level folders. RegisterForChanges(folderBranches []FolderBranch, obs Observer) error // UnregisterFromChanges declares that the given Observer no // longer wants to subscribe to updates for the given top-level // folders. UnregisterFromChanges(folderBranches []FolderBranch, obs Observer) error } // Clock is an interface for getting the current time type Clock interface { // Now returns the current time. Now() time.Time } // ConflictRenamer deals with names for conflicting directory entries. type ConflictRenamer interface { // ConflictRename returns the appropriately modified filename. ConflictRename(ctx context.Context, op op, original string) ( string, error) } // Tracer maybe adds traces to contexts. type Tracer interface { // MaybeStartTrace, if tracing is on, returns a new context // based on the given one with an attached trace made with the // given family and title. Otherwise, it returns the given // context unchanged. MaybeStartTrace(ctx context.Context, family, title string) context.Context // MaybeFinishTrace, finishes the trace attached to the given // context, if any. MaybeFinishTrace(ctx context.Context, err error) } // InitMode encapsulates mode differences. type InitMode interface { // Type returns the InitModeType of this mode. Type() InitModeType // IsTestMode returns whether we are running a test. IsTestMode() bool // BlockWorkers returns the number of block workers to run. BlockWorkers() int // PrefetchWorkers returns the number of prefetch workers to run. PrefetchWorkers() int // RekeyWorkers returns the number of rekey workers to run. RekeyWorkers() int // RekeyQueueSize returns the size of the rekey queue. RekeyQueueSize() int // DirtyBlockCacheEnabled indicates if we should run a dirty block // cache. DirtyBlockCacheEnabled() bool // BackgroundFlushesEnabled indicates if we should periodically be // flushing unsynced dirty writes to the server or journal. BackgroundFlushesEnabled() bool // MetricsEnabled indicates if we should be collecting metrics. MetricsEnabled() bool // ConflictResolutionEnabled indicated if we should be running // the conflict resolution background process. ConflictResolutionEnabled() bool // BlockManagementEnabled indicates whether we should be running // the block archive/delete background process, and whether we // should be re-embedding block change blocks in MDs. BlockManagementEnabled() bool // QuotaReclamationEnabled indicates whether we should be running // the quota reclamation background process. QuotaReclamationEnabled() bool // QuotaReclamationPeriod indicates how often should each TLF // should check for quota to reclaim. If the Duration.Seconds() // == 0, quota reclamation should not run automatically. QuotaReclamationPeriod() time.Duration // QuotaReclamationMinUnrefAge indicates the minimum time a block // must have been unreferenced before it can be reclaimed. QuotaReclamationMinUnrefAge() time.Duration // QuotaReclamationMinHeadAge indicates the minimum age of the // most recently merged MD update before we can run reclamation, // to avoid conflicting with a currently active writer. QuotaReclamationMinHeadAge() time.Duration // NodeCacheEnabled indicates whether we should be caching data nodes. NodeCacheEnabled() bool // TLFUpdatesEnabled indicates whether we should be registering // ourselves with the mdserver for TLF updates. TLFUpdatesEnabled() bool // KBFSServiceEnabled indicates whether we should launch a local // service for answering incoming KBFS-related RPCs. KBFSServiceEnabled() bool // JournalEnabled indicates whether this mode supports a journal. JournalEnabled() bool // UnmergedTLFsEnabled indicates whether it's possible for a // device in this mode to have unmerged TLFs. UnmergedTLFsEnabled() bool // ServiceKeepaliveEnabled indicates whether we need to send // keepalive probes to the Keybase service daemon. ServiceKeepaliveEnabled() bool // TLFEditHistoryEnabled indicates whether we should be running // the background TLF edit history process. TLFEditHistoryEnabled() bool // SendEditNotificationsEnabled indicates whether we should send // edit notifications on FS writes. SendEditNotificationsEnabled() bool // ClientType indicates the type we should advertise to the // Keybase service. ClientType() keybase1.ClientType } type initModeGetter interface { // Mode indicates how KBFS is configured to run. Mode() InitMode // IsTestMode() inidicates whether KBFS is running in a test. IsTestMode() bool } // Config collects all the singleton instance instantiations needed to // run KBFS in one place. The methods below are self-explanatory and // do not require comments. type Config interface { dataVersioner logMaker blockCacher blockServerGetter codecGetter cryptoPureGetter keyGetterGetter cryptoGetter chatGetter signerGetter currentSessionGetterGetter diskBlockCacheGetter diskBlockCacheSetter clockGetter diskLimiterGetter syncedTlfGetterSetter initModeGetter Tracer KBFSOps() KBFSOps SetKBFSOps(KBFSOps) KBPKI() KBPKI SetKBPKI(KBPKI) KeyManager() KeyManager SetKeyManager(KeyManager) Reporter() Reporter SetReporter(Reporter) MDCache() MDCache SetMDCache(MDCache) KeyCache() KeyCache SetKeyBundleCache(kbfsmd.KeyBundleCache) KeyBundleCache() kbfsmd.KeyBundleCache SetKeyCache(KeyCache) SetBlockCache(BlockCache) DirtyBlockCache() DirtyBlockCache SetDirtyBlockCache(DirtyBlockCache) SetCrypto(Crypto) SetChat(Chat) SetCodec(kbfscodec.Codec) MDOps() MDOps SetMDOps(MDOps) KeyOps() KeyOps SetKeyOps(KeyOps) BlockOps() BlockOps SetBlockOps(BlockOps) MDServer() MDServer SetMDServer(MDServer) SetBlockServer(BlockServer) KeyServer() KeyServer SetKeyServer(KeyServer) KeybaseService() KeybaseService SetKeybaseService(KeybaseService) BlockSplitter() BlockSplitter SetBlockSplitter(BlockSplitter) Notifier() Notifier SetNotifier(Notifier) SetClock(Clock) ConflictRenamer() ConflictRenamer SetConflictRenamer(ConflictRenamer) UserHistory() *kbfsedits.UserHistory SetUserHistory(*kbfsedits.UserHistory) MetadataVersion() kbfsmd.MetadataVer SetMetadataVersion(kbfsmd.MetadataVer) DefaultBlockType() keybase1.BlockType SetDefaultBlockType(blockType keybase1.BlockType) RekeyQueue() RekeyQueue SetRekeyQueue(RekeyQueue) // ReqsBufSize indicates the number of read or write operations // that can be buffered per folder ReqsBufSize() int // MaxNameBytes indicates the maximum supported size of a // directory entry name in bytes. MaxNameBytes() uint32 // DoBackgroundFlushes says whether we should periodically try to // flush dirty files, even without a sync from the user. Should // be true except for during some testing. DoBackgroundFlushes() bool SetDoBackgroundFlushes(bool) // RekeyWithPromptWaitTime indicates how long to wait, after // setting the rekey bit, before prompting for a paper key. RekeyWithPromptWaitTime() time.Duration SetRekeyWithPromptWaitTime(time.Duration) // PrefetchStatus returns the prefetch status of a block. PrefetchStatus(context.Context, tlf.ID, BlockPointer) PrefetchStatus // GracePeriod specifies a grace period for which a delayed cancellation // waits before actual cancels the context. This is useful for giving // critical portion of a slow remote operation some extra time to finish as // an effort to avoid conflicting. Example include an O_EXCL Create call // interrupted by ALRM signal actually makes it to the server, while // application assumes not since EINTR is returned. A delayed cancellation // allows us to distinguish between successful cancel (where remote operation // didn't make to server) or failed cancel (where remote operation made to // the server). However, the optimal value of this depends on the network // conditions. A long grace period for really good network condition would // just unnecessarily slow down Ctrl-C. // // TODO: make this adaptive and self-change over time based on network // conditions. DelayedCancellationGracePeriod() time.Duration SetDelayedCancellationGracePeriod(time.Duration) // ResetCaches clears and re-initializes all data and key caches. ResetCaches() // StorageRoot returns the path to the storage root for this config. StorageRoot() string // MetricsRegistry may be nil, which should be interpreted as // not using metrics at all. (i.e., as if UseNilMetrics were // set). This differs from how go-metrics treats nil Registry // objects, which is to use the default registry. MetricsRegistry() metrics.Registry SetMetricsRegistry(metrics.Registry) // SetTraceOptions set the options for tracing (via x/net/trace). SetTraceOptions(enabled bool) // TLFValidDuration is the time TLFs are valid before identification needs to be redone. TLFValidDuration() time.Duration // SetTLFValidDuration sets TLFValidDuration. SetTLFValidDuration(time.Duration) // BGFlushDirOpBatchSize returns the directory op batch size for // background flushes. BGFlushDirOpBatchSize() int // SetBGFlushDirOpBatchSize sets the directory op batch size for // background flushes. SetBGFlushDirOpBatchSize(s int) // BGFlushPeriod returns how long to wait for a batch to fill up // before syncing a set of changes to the servers. BGFlushPeriod() time.Duration // SetBGFlushPeriod sets how long to wait for a batch to fill up // before syncing a set of changes to the servers. SetBGFlushPeriod(p time.Duration) // Shutdown is called to free config resources. Shutdown(context.Context) error // CheckStateOnShutdown tells the caller whether or not it is safe // to check the state of the system on shutdown. CheckStateOnShutdown() bool // GetRekeyFSMLimiter returns the global rekey FSM limiter. GetRekeyFSMLimiter() *OngoingWorkLimiter // RootNodeWrappers returns the set of root node wrapper functions // that will be applied to each newly-created root node. RootNodeWrappers() []func(Node) Node // AddRootNodeWrapper adds a new wrapper function that will be // applied whenever a root Node is created. This will only apply // to TLFs that are first accessed after `AddRootNodeWrapper` is // called. AddRootNodeWrapper(func(Node) Node) } // NodeCache holds Nodes, and allows libkbfs to update them when // things change about the underlying KBFS blocks. It is probably // most useful to instantiate this on a per-folder-branch basis, so // that it can create a Path with the correct DirId and Branch name. type NodeCache interface { // GetOrCreate either makes a new Node for the given // BlockPointer, or returns an existing one. TODO: If we ever // support hard links, we will have to revisit the "name" and // "parent" parameters here. name must not be empty. Returns // an error if parent cannot be found. GetOrCreate(ptr BlockPointer, name string, parent Node) (Node, error) // Get returns the Node associated with the given ptr if one // already exists. Otherwise, it returns nil. Get(ref BlockRef) Node // UpdatePointer updates the BlockPointer for the corresponding // Node. NodeCache ignores this call when oldRef is not cached in // any Node. Returns whether the ID of the node that was updated, // or `nil` if nothing was updated. UpdatePointer(oldRef BlockRef, newPtr BlockPointer) NodeID // Move swaps the parent node for the corresponding Node, and // updates the node's name. NodeCache ignores the call when ptr // is not cached. If newParent is nil, it treats the ptr's // corresponding node as being unlinked from the old parent // completely. If successful, it returns a function that can be // called to undo the effect of the move (or `nil` if nothing // needs to be done); if newParent cannot be found, it returns an // error and a `nil` undo function. Move(ref BlockRef, newParent Node, newName string) ( undoFn func(), err error) // Unlink set the corresponding node's parent to nil and caches // the provided path in case the node is still open. NodeCache // ignores the call when ptr is not cached. The path is required // because the caller may have made changes to the parent nodes // already that shouldn't be reflected in the cached path. It // returns a function that can be called to undo the effect of the // unlink (or `nil` if nothing needs to be done). Unlink(ref BlockRef, oldPath path, oldDe DirEntry) (undoFn func()) // IsUnlinked returns whether `Unlink` has been called for the // reference behind this node. IsUnlinked(node Node) bool // UnlinkedDirEntry returns a directory entry if `Unlink` has been // called for the reference behind this node. UnlinkedDirEntry(node Node) DirEntry // UpdateUnlinkedDirEntry modifies a cached directory entry for a // node that has already been unlinked. UpdateUnlinkedDirEntry(node Node, newDe DirEntry) // PathFromNode creates the path up to a given Node. PathFromNode(node Node) path // AllNodes returns the complete set of nodes currently in the // cache. The returned Nodes are not wrapped, and shouldn't be // used for data access. AllNodes() []Node // AllNodeChildren returns the complete set of nodes currently in // the cache, for which the given node `n` is a parent (direct or // indirect). The returned slice does not include `n` itself. // The returned Nodes are not wrapped, and shouldn't be used for // data access. AllNodeChildren(n Node) []Node // AddRootWrapper adds a new wrapper function that will be applied // whenever a root Node is created. AddRootWrapper(func(Node) Node) } // fileBlockDeepCopier fetches a file block, makes a deep copy of it // (duplicating pointer for any indirect blocks) and generates a new // random temporary block ID for it. It returns the new BlockPointer, // and internally saves the block for future uses. type fileBlockDeepCopier func(context.Context, string, BlockPointer) ( BlockPointer, error) // crAction represents a specific action to take as part of the // conflict resolution process. type crAction interface { // swapUnmergedBlock should be called before do(), and if it // returns true, the caller must use the merged block // corresponding to the returned BlockPointer instead of // unmergedBlock when calling do(). If BlockPointer{} is zeroPtr // (and true is returned), just swap in the regular mergedBlock. swapUnmergedBlock( ctx context.Context, unmergedChains, mergedChains *crChains, unmergedDir *dirData) (bool, BlockPointer, error) // do modifies the given merged `dirData` in place to resolve the // conflict, and potentially uses the provided // `fileBlockDeepCopier`s to obtain copies of other blocks (along // with new BlockPointers) when requiring a block copy. It // returns a set of block infos that need to be unreferenced as // part of this conflict resolution. do( ctx context.Context, unmergedCopier, mergedCopier fileBlockDeepCopier, unmergedDir, mergedDir *dirData) (unrefs []BlockInfo, err error) // updateOps potentially modifies, in place, the slices of // unmerged and merged operations stored in the corresponding // crChains for the given unmerged and merged most recent // pointers. Eventually, the "unmerged" ops will be pushed as // part of a MD update, and so should contain any necessarily // operations to fully merge the unmerged data, including any // conflict resolution. The "merged" ops will be played through // locally, to notify any caches about the newly-obtained merged // data (and any changes to local data that were required as part // of conflict resolution, such as renames). A few things to note: // * A particular action's updateOps method may be called more than // once for different sets of chains, however it should only add // new directory operations (like create/rm/rename) into directory // chains. // * updateOps doesn't necessarily result in correct BlockPointers within // each of those ops; that must happen in a later phase. // * mergedDir can be nil if the chain is for a file. updateOps( ctx context.Context, unmergedMostRecent, mergedMostRecent BlockPointer, unmergedDir, mergedDir *dirData, unmergedChains, mergedChains *crChains) error // String returns a string representation for this crAction, used // for debugging. String() string } // RekeyQueue is a managed queue of folders needing some rekey action taken // upon them by the current client. type RekeyQueue interface { // Enqueue enqueues a folder for rekey action. If the TLF is already in the // rekey queue, the error channel of the existing one is returned. Enqueue(tlf.ID) // IsRekeyPending returns true if the given folder is in the rekey queue. // Note that an ongoing rekey doesn't count as "pending". IsRekeyPending(tlf.ID) bool // Shutdown cancels all pending rekey actions and clears the queue. It // doesn't cancel ongoing rekeys. After Shutdown() is called, the same // RekeyQueue shouldn't be used anymore. Shutdown() } // RekeyFSM is a Finite State Machine (FSM) for housekeeping rekey states for a // FolderBranch. Each FolderBranch has its own FSM for rekeys. // // See rekey_fsm.go for implementation details. // // TODO: report FSM status in FolderBranchStatus? type RekeyFSM interface { // Event sends an event to the FSM. Event(event RekeyEvent) // Shutdown shuts down the FSM. No new event should be sent into the FSM // after this method is called. Shutdown() // listenOnEvent adds a listener (callback) to the FSM so that when // event happens, callback is called with the received event. If repeatedly // is set to false, callback is called only once. Otherwise it's called every // time event happens. // // Currently this is only used in tests and for RekeyFile. See comment for // RequestRekeyAndWaitForOneFinishEvent for more details. listenOnEvent( event rekeyEventType, callback func(RekeyEvent), repeatedly bool) } // BlockRetriever specifies how to retrieve blocks. type BlockRetriever interface { // Request retrieves blocks asynchronously. Request(ctx context.Context, priority int, kmd KeyMetadata, ptr BlockPointer, block Block, lifetime BlockCacheLifetime) <-chan error // RequestNoPrefetch retrieves blocks asynchronously, but doesn't trigger a // prefetch unless the block had to be retrieved from the server. RequestNoPrefetch(ctx context.Context, priority int, kmd KeyMetadata, ptr BlockPointer, block Block, lifetime BlockCacheLifetime) <-chan error // PutInCaches puts the block into the in-memory cache, and ensures that // the disk cache metadata is updated. PutInCaches(ctx context.Context, ptr BlockPointer, tlfID tlf.ID, block Block, lifetime BlockCacheLifetime, prefetchStatus PrefetchStatus) error // TogglePrefetcher creates a new prefetcher. TogglePrefetcher(enable bool, syncCh <-chan struct{}) <-chan struct{} } // ChatChannelNewMessageCB is a callback function that can be called // when there's a new message on a given conversation. type ChatChannelNewMessageCB func(convID chat1.ConversationID, body string) // Chat specifies a minimal interface for Keybase chatting. type Chat interface { // GetConversationID returns the chat conversation ID associated // with the given TLF name, type, chat type and channel name. GetConversationID( ctx context.Context, tlfName tlf.CanonicalName, tlfType tlf.Type, channelName string, chatType chat1.TopicType) ( chat1.ConversationID, error) // SendTextMessage (asynchronously) sends a text chat message to // the given conversation and channel. SendTextMessage( ctx context.Context, tlfName tlf.CanonicalName, tlfType tlf.Type, convID chat1.ConversationID, body string) error // GetGroupedInbox returns the TLFs with the most-recent chat // messages of the given type, up to `maxChats` of them. GetGroupedInbox( ctx context.Context, chatType chat1.TopicType, maxChats int) ( []*TlfHandle, error) // GetChannels returns a list of all the channels for a given // chat. The entries in `convIDs` and `channelNames` have a 1-to-1 // correspondence. GetChannels( ctx context.Context, tlfName tlf.CanonicalName, tlfType tlf.Type, chatType chat1.TopicType) ( convIDs []chat1.ConversationID, channelNames []string, err error) // ReadChannel returns a set of text messages from a channel, and // a `nextPage` pointer to the following set of messages. If the // given `startPage` is non-nil, it's used to specify the starting // point for the set of messages returned. ReadChannel( ctx context.Context, convID chat1.ConversationID, startPage []byte) ( messages []string, nextPage []byte, err error) // RegisterForMessages registers a callback that will be called // for each new messages that reaches convID. RegisterForMessages(convID chat1.ConversationID, cb ChatChannelNewMessageCB) // ClearCache is called to force this instance to forget // everything it might have cached, e.g. when a user logs out. ClearCache() }
1
20,324
This should go in `ConfigLocal` (and then re-generate the mocks).
keybase-kbfs
go
@@ -51,10 +51,10 @@ class releaseTestCase(SparkTestCase): if entry['release_name'] != '': expected[entry['user_name']].append({ 'release_name': entry['release_name'], - 'release_msid': entry['release_msid'], - 'release_mbid': entry['release_mbid'], + 'release_msid': entry['release_msid'] or None, + 'release_mbid': entry['release_mbid'] or None, 'artist_name': entry['artist_name'], - 'artist_msid': entry['artist_msid'], + 'artist_msid': entry['artist_msid'] or None, 'artist_mbids': entry['artist_mbids'], 'listen_count': entry['count'] })
1
import json import os from collections import defaultdict from datetime import datetime import listenbrainz_spark.stats.user.release as release_stats from listenbrainz_spark import utils from listenbrainz_spark.path import LISTENBRAINZ_DATA_DIRECTORY from listenbrainz_spark.tests import SparkTestCase from pyspark.sql import Row class releaseTestCase(SparkTestCase): # use path_ as prefix for all paths in this class. path_ = LISTENBRAINZ_DATA_DIRECTORY def tearDown(self): path_found = utils.path_exists(self.path_) if path_found: utils.delete_dir(self.path_, recursive=True) def save_dataframe(self, filename): now = datetime.now() with open(self.path_to_data_file(filename)) as f: data = json.load(f) df = None for entry in data: for idx in range(0, entry['count']): # Assign listened_at to each listen row = utils.create_dataframe(Row(user_name=entry['user_name'], release_name=entry['release_name'], release_msid=entry['release_msid'], release_mbid=entry['release_mbid'], artist_name=entry['artist_name'], artist_msid=entry['artist_msid'], artist_mbids=entry['artist_mbids']), schema=None) df = df.union(row) if df else row utils.save_parquet(df, os.path.join(self.path_, '{}/{}.parquet'.format(now.year, now.month))) def test_get_releases(self): self.save_dataframe('user_top_releases.json') df = utils.get_listens(datetime.now(), datetime.now(), self.path_) df.createOrReplaceTempView('test_view') expected = defaultdict(list) with open(self.path_to_data_file('user_top_releases.json')) as f: data = json.load(f) for entry in data: if entry['release_name'] != '': expected[entry['user_name']].append({ 'release_name': entry['release_name'], 'release_msid': entry['release_msid'], 'release_mbid': entry['release_mbid'], 'artist_name': entry['artist_name'], 'artist_msid': entry['artist_msid'], 'artist_mbids': entry['artist_mbids'], 'listen_count': entry['count'] }) # Sort in descending order w.r.t to listen_count for user_name, user_releases in expected.items(): user_releases.sort(key=lambda release: release['listen_count'], reverse=True) data = release_stats.get_releases('test_view') received = defaultdict(list) for entry in data: _dict = entry.asDict(recursive=True) received[_dict['user_name']] = _dict['releases'] self.assertDictEqual(received, expected) def test_get_releases_empty(self): self.save_dataframe('user_top_releases_empty.json') df = utils.get_listens(datetime.now(), datetime.now(), self.path_) df.createOrReplaceTempView('test_view') with open(self.path_to_data_file('user_top_releases.json')) as f: data = json.load(f) received = defaultdict(list) data = release_stats.get_releases('test_view') for entry in data: _dict = entry.asDict(recursive=True) received[_dict['user_name']] = _dict['releases'] self.assertDictEqual(received, {})
1
16,562
Nitty but entry.get for the same thing
metabrainz-listenbrainz-server
py
@@ -10,11 +10,12 @@ import ( "github.com/opencontainers/runc/libcontainer/logs" _ "github.com/opencontainers/runc/libcontainer/nsenter" "github.com/sirupsen/logrus" - "github.com/urfave/cli" ) func init() { if len(os.Args) > 1 && os.Args[1] == "init" { + // This is the golang entry point for runc init, executed + // before main() but after libcontainer/nsenter's nsexec(). runtime.GOMAXPROCS(1) runtime.LockOSThread()
1
package main import ( "fmt" "os" "runtime" "strconv" "github.com/opencontainers/runc/libcontainer" "github.com/opencontainers/runc/libcontainer/logs" _ "github.com/opencontainers/runc/libcontainer/nsenter" "github.com/sirupsen/logrus" "github.com/urfave/cli" ) func init() { if len(os.Args) > 1 && os.Args[1] == "init" { runtime.GOMAXPROCS(1) runtime.LockOSThread() level := os.Getenv("_LIBCONTAINER_LOGLEVEL") logLevel, err := logrus.ParseLevel(level) if err != nil { panic(fmt.Sprintf("libcontainer: failed to parse log level: %q: %v", level, err)) } logPipeFdStr := os.Getenv("_LIBCONTAINER_LOGPIPE") logPipeFd, err := strconv.Atoi(logPipeFdStr) if err != nil { panic(fmt.Sprintf("libcontainer: failed to convert environment variable _LIBCONTAINER_LOGPIPE=%s to int: %s", logPipeFdStr, err)) } err = logs.ConfigureLogging(logs.Config{ LogPipeFd: logPipeFd, LogFormat: "json", LogLevel: logLevel, }) if err != nil { panic(fmt.Sprintf("libcontainer: failed to configure logging: %v", err)) } logrus.Debug("child process in init()") } } var initCommand = cli.Command{ Name: "init", Usage: `initialize the namespaces and launch the process (do not call it outside of runc)`, Action: func(context *cli.Context) error { factory, _ := libcontainer.New("") if err := factory.StartInitialization(); err != nil { // as the error is sent back to the parent there is no need to log // or write it to stderr because the parent process will handle this os.Exit(1) } panic("libcontainer: container init failed to exec") }, }
1
24,353
Might not hurt to mention the function never returns (since this all ends in `execve`) so `main` never actually runs.
opencontainers-runc
go
@@ -100,6 +100,11 @@ namespace Microsoft.AspNetCore.Server.Kestrel "ThreadCount must be positive."); } + if (!Constants.ECONNRESET.HasValue) + { + _logger.LogWarning("Unable to determine ECONNRESET value on this platform."); + } + if (!Constants.EADDRINUSE.HasValue) { _logger.LogWarning("Unable to determine EADDRINUSE value on this platform.");
1
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Reflection; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Hosting.Server; using Microsoft.AspNetCore.Hosting.Server.Features; using Microsoft.AspNetCore.Http.Features; using Microsoft.AspNetCore.Server.Kestrel.Internal; using Microsoft.AspNetCore.Server.Kestrel.Internal.Http; using Microsoft.AspNetCore.Server.Kestrel.Internal.Infrastructure; using Microsoft.AspNetCore.Server.Kestrel.Internal.Networking; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; namespace Microsoft.AspNetCore.Server.Kestrel { public class KestrelServer : IServer { private Stack<IDisposable> _disposables; private readonly IApplicationLifetime _applicationLifetime; private readonly ILogger _logger; private readonly IServerAddressesFeature _serverAddresses; public KestrelServer(IOptions<KestrelServerOptions> options, IApplicationLifetime applicationLifetime, ILoggerFactory loggerFactory) { if (options == null) { throw new ArgumentNullException(nameof(options)); } if (applicationLifetime == null) { throw new ArgumentNullException(nameof(applicationLifetime)); } if (loggerFactory == null) { throw new ArgumentNullException(nameof(loggerFactory)); } Options = options.Value ?? new KestrelServerOptions(); _applicationLifetime = applicationLifetime; _logger = loggerFactory.CreateLogger(typeof(KestrelServer).GetTypeInfo().Namespace); Features = new FeatureCollection(); _serverAddresses = new ServerAddressesFeature(); Features.Set<IServerAddressesFeature>(_serverAddresses); } public IFeatureCollection Features { get; } public KestrelServerOptions Options { get; } public void Start<TContext>(IHttpApplication<TContext> application) { try { if (!BitConverter.IsLittleEndian) { throw new PlatformNotSupportedException("Kestrel does not support big-endian architectures."); } ValidateOptions(); if (_disposables != null) { // The server has already started and/or has not been cleaned up yet throw new InvalidOperationException("Server has already started."); } _disposables = new Stack<IDisposable>(); var dateHeaderValueManager = new DateHeaderValueManager(); var trace = new KestrelTrace(_logger); var engine = new KestrelEngine(new ServiceContext { FrameFactory = context => { return new Frame<TContext>(application, context); }, AppLifetime = _applicationLifetime, Log = trace, ThreadPool = new LoggingThreadPool(trace), DateHeaderValueManager = dateHeaderValueManager, ServerOptions = Options }); _disposables.Push(engine); _disposables.Push(dateHeaderValueManager); var threadCount = Options.ThreadCount; if (threadCount <= 0) { throw new ArgumentOutOfRangeException(nameof(threadCount), threadCount, "ThreadCount must be positive."); } if (!Constants.EADDRINUSE.HasValue) { _logger.LogWarning("Unable to determine EADDRINUSE value on this platform."); } engine.Start(threadCount); var atLeastOneListener = false; foreach (var address in _serverAddresses.Addresses.ToArray()) { var parsedAddress = ServerAddress.FromUrl(address); atLeastOneListener = true; if (!parsedAddress.Host.Equals("localhost", StringComparison.OrdinalIgnoreCase)) { try { _disposables.Push(engine.CreateServer(parsedAddress)); } catch (AggregateException ex) { if ((ex.InnerException as UvException)?.StatusCode == Constants.EADDRINUSE) { throw new IOException($"Failed to bind to address {parsedAddress}: address already in use.", ex); } throw; } } else { if (parsedAddress.Port == 0) { throw new InvalidOperationException("Dynamic port binding is not supported when binding to localhost. You must either bind to 127.0.0.1:0 or [::1]:0, or both."); } var ipv4Address = parsedAddress.WithHost("127.0.0.1"); var exceptions = new List<Exception>(); try { _disposables.Push(engine.CreateServer(ipv4Address)); } catch (AggregateException ex) when (ex.InnerException is UvException) { if ((ex.InnerException as UvException).StatusCode == Constants.EADDRINUSE) { throw new IOException($"Failed to bind to address {parsedAddress.ToString()} on the IPv4 loopback interface: port already in use.", ex); } else { _logger.LogWarning(0, ex, $"Unable to bind to {parsedAddress.ToString()} on the IPv4 loopback interface."); exceptions.Add(ex.InnerException); } } var ipv6Address = parsedAddress.WithHost("[::1]"); try { _disposables.Push(engine.CreateServer(ipv6Address)); } catch (AggregateException ex) when (ex.InnerException is UvException) { if ((ex.InnerException as UvException).StatusCode == Constants.EADDRINUSE) { throw new IOException($"Failed to bind to address {parsedAddress.ToString()} on the IPv6 loopback interface: port already in use.", ex); } else { _logger.LogWarning(0, ex, $"Unable to bind to {parsedAddress.ToString()} on the IPv6 loopback interface."); exceptions.Add(ex.InnerException); } } if (exceptions.Count == 2) { throw new IOException($"Failed to bind to address {parsedAddress.ToString()}.", new AggregateException(exceptions)); } } // If requested port was "0", replace with assigned dynamic port. _serverAddresses.Addresses.Remove(address); _serverAddresses.Addresses.Add(parsedAddress.ToString()); } if (!atLeastOneListener) { throw new InvalidOperationException("No recognized listening addresses were configured."); } } catch (Exception ex) { _logger.LogCritical(0, ex, "Unable to start Kestrel."); Dispose(); throw; } } public void Dispose() { if (_disposables != null) { while (_disposables.Count > 0) { _disposables.Pop().Dispose(); } _disposables = null; } } private void ValidateOptions() { if (Options.Limits.MaxRequestBufferSize.HasValue && Options.Limits.MaxRequestBufferSize < Options.Limits.MaxRequestLineSize) { throw new InvalidOperationException( $"Maximum request buffer size ({Options.Limits.MaxRequestBufferSize.Value}) must be greater than or equal to maximum request line size ({Options.Limits.MaxRequestLineSize})."); } } } }
1
10,759
nit: add new line after this block
aspnet-KestrelHttpServer
.cs
@@ -1111,10 +1111,10 @@ Blockly.Css.CONTENT = [ '}', '.scratchCategoryMenu {', - 'width: 60px;', + 'width: 3.25rem;', 'background: $colour_toolbox;', 'color: $colour_toolboxText;', - 'font-size: .7em;', + 'font-size: .7rem;', 'user-select: none;', '-webkit-user-select: none;', '-moz-user-select: none;',
1
/** * @license * Visual Blocks Editor * * Copyright 2013 Google Inc. * https://developers.google.com/blockly/ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @fileoverview Inject Blockly's CSS synchronously. * @author [email protected] (Neil Fraser) */ 'use strict'; /** * @name Blockly.Css * @namespace */ goog.provide('Blockly.Css'); goog.require('Blockly.Colours'); goog.require('goog.userAgent'); /** * List of cursors. * @enum {string} */ Blockly.Css.Cursor = { OPEN: 'handopen', CLOSED: 'handclosed', DELETE: 'handdelete' }; /** * Current cursor (cached value). * @type {string} * @private */ Blockly.Css.currentCursor_ = ''; /** * Large stylesheet added by Blockly.Css.inject. * @type {Element} * @private */ Blockly.Css.styleSheet_ = null; /** * Path to media directory, with any trailing slash removed. * @type {string} * @private */ Blockly.Css.mediaPath_ = ''; /** * Inject the CSS into the DOM. This is preferable over using a regular CSS * file since: * a) It loads synchronously and doesn't force a redraw later. * b) It speeds up loading by not blocking on a separate HTTP transfer. * c) The CSS content may be made dynamic depending on init options. * @param {boolean} hasCss If false, don't inject CSS * (providing CSS becomes the document's responsibility). * @param {string} pathToMedia Path from page to the Blockly media directory. */ Blockly.Css.inject = function(hasCss, pathToMedia) { // Only inject the CSS once. if (Blockly.Css.styleSheet_) { return; } // Placeholder for cursor rule. Must be first rule (index 0). var text = '.blocklyDraggable {}\n'; if (hasCss) { text += Blockly.Css.CONTENT.join('\n'); if (Blockly.FieldDate) { text += Blockly.FieldDate.CSS.join('\n'); } } // Strip off any trailing slash (either Unix or Windows). Blockly.Css.mediaPath_ = pathToMedia.replace(/[\\\/]$/, ''); text = text.replace(/<<<PATH>>>/g, Blockly.Css.mediaPath_); // Dynamically replace colours in the CSS text, in case they have // been set at run-time injection. for (var colourProperty in Blockly.Colours) { if (Blockly.Colours.hasOwnProperty(colourProperty)) { // Replace all text = text.replace( new RegExp('\\$colour\\_' + colourProperty, 'g'), Blockly.Colours[colourProperty] ); } } // Inject CSS tag at start of head. var cssNode = document.createElement('style'); document.head.insertBefore(cssNode, document.head.firstChild); var cssTextNode = document.createTextNode(text); cssNode.appendChild(cssTextNode); Blockly.Css.styleSheet_ = cssNode.sheet; }; /** * Set the cursor to be displayed when over something draggable. * See See https://github.com/google/blockly/issues/981 for context. * @param {Blockly.Css.Cursor} cursor Enum. * @deprecated April 2017. */ Blockly.Css.setCursor = function(cursor) { console.warn('Deprecated call to Blockly.Css.setCursor.' + 'See https://github.com/google/blockly/issues/981 for context'); }; /** * Array making up the CSS content for Blockly. */ Blockly.Css.CONTENT = [ '.blocklySvg {', 'background-color: $colour_workspace;', 'outline: none;', 'overflow: hidden;', /* IE overflows by default. */ 'position: absolute;', 'display: block;', '}', /* Necessary to position the drag surface */ '.blocklyRelativeWrapper {', 'position: relative;', 'width: 100%;', 'height: 100%;', '}', '.blocklyWidgetDiv {', 'display: none;', 'position: absolute;', 'z-index: 99999;', /* big value for bootstrap3 compatibility */ '}', '.injectionDiv {', 'height: 100%;', 'position: relative;', 'overflow: hidden;', /* So blocks in drag surface disappear at edges */ 'touch-action: none', '}', '.blocklyNonSelectable {', 'user-select: none;', '-moz-user-select: none;', '-webkit-user-select: none;', '-ms-user-select: none;', '}', '.blocklyWidgetDiv.fieldTextInput {', 'overflow: hidden;', 'border: 1px solid;', 'box-sizing: border-box;', 'transform-origin: 0 0;', '-ms-transform-origin: 0 0;', '-moz-transform-origin: 0 0;', '-webkit-transform-origin: 0 0;', '}', '.blocklyTextDropDownArrow {', 'position: absolute;', '}', '.blocklyNonSelectable {', 'user-select: none;', '-moz-user-select: none;', '-webkit-user-select: none;', '-ms-user-select: none;', '}', '.blocklyWsDragSurface {', 'display: none;', 'position: absolute;', 'top: 0;', 'left: 0;', '}', /* Added as a separate rule with multiple classes to make it more specific than a bootstrap rule that selects svg:root. See issue #1275 for context. */ '.blocklyWsDragSurface.blocklyOverflowVisible {', 'overflow: visible;', '}', '.blocklyBlockDragSurface {', 'display: none;', 'position: absolute;', 'top: 0;', 'left: 0;', 'right: 0;', 'bottom: 0;', 'overflow: visible !important;', 'z-index: 50;', /* Display above the toolbox */ '}', '.blocklyTooltipDiv {', 'background-color: #ffffc7;', 'border: 1px solid #ddc;', 'box-shadow: 4px 4px 20px 1px rgba(0,0,0,.15);', 'color: #000;', 'display: none;', 'font-family: "Helvetica Neue", Helvetica, sans-serif;', 'font-size: 9pt;', 'opacity: 0.9;', 'padding: 2px;', 'position: absolute;', 'z-index: 100000;', /* big value for bootstrap3 compatibility */ '}', '.blocklyDropDownDiv {', 'position: fixed;', 'left: 0;', 'top: 0;', 'z-index: 1000;', 'display: none;', 'border: 1px solid;', 'border-radius: 4px;', 'box-shadow: 0px 0px 8px 1px ' + Blockly.Colours.dropDownShadow + ';', 'padding: 4px;', '-webkit-user-select: none;', '}', '.blocklyDropDownContent {', 'max-height: 300px;', // @todo: spec for maximum height. 'overflow: auto;', '}', '.blocklyDropDownArrow {', 'position: absolute;', 'left: 0;', 'top: 0;', 'width: 16px;', 'height: 16px;', 'z-index: -1;', 'background-color: inherit;', 'border-color: inherit;', '}', '.blocklyDropDownButton {', 'display: inline-block;', 'float: left;', 'padding: 0;', 'margin: 4px;', 'border-radius: 4px;', 'outline: none;', 'border: 1px solid;', 'transition: box-shadow .1s;', 'cursor: pointer;', '}', '.blocklyDropDownButtonHover {', 'box-shadow: 0px 0px 0px 4px ' + Blockly.Colours.fieldShadow + ';', '}', '.blocklyDropDownButton:active {', 'box-shadow: 0px 0px 0px 6px ' + Blockly.Colours.fieldShadow + ';', '}', '.blocklyDropDownButton > img {', 'width: 80%;', 'height: 80%;', 'margin-top: 5%', '}', '.blocklyDropDownPlaceholder {', 'display: inline-block;', 'float: left;', 'padding: 0;', 'margin: 4px;', '}', '.blocklyNumPadButton {', 'display: inline-block;', 'float: left;', 'padding: 0;', 'width: 48px;', 'height: 48px;', 'margin: 4px;', 'border-radius: 4px;', 'background: $colour_numPadBackground;', 'color: $colour_numPadText;', 'outline: none;', 'border: 1px solid $colour_numPadBorder;', 'cursor: pointer;', 'font-weight: 600;', 'font-family: "Helvetica Neue", Helvetica, sans-serif;', 'font-size: 12pt;', '-webkit-tap-highlight-color: rgba(0,0,0,0);', '}', '.blocklyNumPadButton > img {', 'margin-top: 10%;', 'width: 80%;', 'height: 80%;', '}', '.blocklyNumPadButton:active {', 'background: $colour_numPadActiveBackground;', '-webkit-tap-highlight-color: rgba(0,0,0,0);', '}', '.arrowTop {', 'border-top: 1px solid;', 'border-left: 1px solid;', 'border-top-left-radius: 4px;', 'border-color: inherit;', '}', '.arrowBottom {', 'border-bottom: 1px solid;', 'border-right: 1px solid;', 'border-bottom-right-radius: 4px;', 'border-color: inherit;', '}', '.valueReportBox {', 'min-width: 50px;', 'max-width: 300px;', 'max-height: 200px;', 'overflow: auto;', 'word-wrap: break-word;', 'text-align: center;', 'font-family: "Helvetica Neue", Helvetica, sans-serif;', 'font-size: .8em;', '}', '.blocklyResizeSE {', 'cursor: se-resize;', 'fill: #aaa;', '}', '.blocklyResizeSW {', 'cursor: sw-resize;', 'fill: #aaa;', '}', '.blocklyResizeLine {', 'stroke: #888;', 'stroke-width: 1;', '}', '.blocklyHighlightedConnectionPath {', 'fill: none;', 'stroke: #fc3;', 'stroke-width: 4px;', '}', '.blocklyPath {', 'stroke-width: 1px;', '}', '.blocklySelected>.blocklyPath {', // 'stroke: #fc3;', // 'stroke-width: 3px;', '}', '.blocklySelected>.blocklyPathLight {', 'display: none;', '}', '.blocklyDraggable {', /* backup for browsers (e.g. IE11) that don't support grab */ 'cursor: url("<<<PATH>>>/handopen.cur"), auto;', 'cursor: grab;', 'cursor: -webkit-grab;', 'cursor: -moz-grab;', '}', '.blocklyDragging {', /* backup for browsers (e.g. IE11) that don't support grabbing */ 'cursor: url("<<<PATH>>>/handclosed.cur"), auto;', 'cursor: grabbing;', 'cursor: -webkit-grabbing;', 'cursor: -moz-grabbing;', '}', /* Changes cursor on mouse down. Not effective in Firefox because of https://bugzilla.mozilla.org/show_bug.cgi?id=771241 */ '.blocklyDraggable:active {', /* backup for browsers (e.g. IE11) that don't support grabbing */ 'cursor: url("<<<PATH>>>/handclosed.cur"), auto;', 'cursor: grabbing;', 'cursor: -webkit-grabbing;', 'cursor: -moz-grabbing;', '}', /* Change the cursor on the whole drag surface in case the mouse gets ahead of block during a drag. This way the cursor is still a closed hand. */ '.blocklyBlockDragSurface .blocklyDraggable {', /* backup for browsers (e.g. IE11) that don't support grabbing */ 'cursor: url("<<<PATH>>>/handclosed.cur"), auto;', 'cursor: grabbing;', 'cursor: -webkit-grabbing;', 'cursor: -moz-grabbing;', '}', '.blocklyDragging.blocklyDraggingDelete {', 'cursor: url("<<<PATH>>>/handdelete.cur"), auto;', '}', '.blocklyToolboxDelete {', 'cursor: url("<<<PATH>>>/handdelete.cur"), auto;', '}', '.blocklyDragging>.blocklyPath,', '.blocklyDragging>.blocklyPathLight {', 'fill-opacity: 1.0;', 'stroke-opacity: 1.0;', '}', '.blocklyDragging>.blocklyPath {', '}', '.blocklyDisabled>.blocklyPath {', 'fill-opacity: .5;', 'stroke-opacity: .5;', '}', '.blocklyInsertionMarker>.blocklyPath {', 'stroke: none;', '}', '.blocklyText {', 'fill: #fff;', 'font-family: "Helvetica Neue", Helvetica, sans-serif;', 'font-size: 12pt;', 'font-weight: 500;', '}', '.blocklyTextTruncated {', 'font-size: 11pt;', '}', '.blocklyNonEditableText>text {', 'pointer-events: none;', '}', '.blocklyNonEditableText>text,', '.blocklyEditableText>text {', 'fill: $colour_text;', '}', '.blocklyDropdownText {', 'fill: #fff !important;', '}', '.blocklyBubbleText {', 'fill: $colour_text;', '}', '.blocklyFlyout {', 'position: absolute;', 'z-index: 20;', '}', '.blocklyFlyout {', 'position: absolute;', 'z-index: 20;', '}', '.blocklyFlyoutButton {', 'fill: none;', '}', '.blocklyFlyoutButtonBackground {', 'stroke: #c6c6c6;', '}', '.blocklyFlyoutButton .blocklyText {', 'fill: $colour_text;', '}', '.blocklyFlyoutButtonShadow {', 'fill: none;', '}', '.blocklyFlyoutButton:hover {', 'fill: white;', 'cursor: pointer;', '}', '.blocklyFlyoutLabel {', 'cursor: default;', '}', '.blocklyFlyoutLabelBackground {', 'opacity: 0;', '}', '.blocklyFlyoutLabelText {', 'font-family: "Helvetica Neue", Helvetica, sans-serif;', 'font-size: 14pt;', 'fill: #575E75;', 'font-weight: bold;', '}', /* Don't allow users to select text. It gets annoying when trying to drag a block and selected text moves instead. */ '.blocklySvg text, .blocklyBlockDragSurface text, .blocklyFlyout text, .blocklyToolboxDiv text {', 'user-select: none;', '-moz-user-select: none;', '-webkit-user-select: none;', 'cursor: inherit;', '}', '.blocklyHidden {', 'display: none;', '}', '.blocklyFieldDropdown:not(.blocklyHidden) {', 'display: block;', '}', '.blocklyIconGroup {', 'cursor: default;', '}', '.blocklyIconGroup:not(:hover),', '.blocklyIconGroupReadonly {', 'opacity: .6;', '}', '.blocklyIconShape {', 'fill: #00f;', 'stroke: #fff;', 'stroke-width: 1px;', '}', '.blocklyIconSymbol {', 'fill: #fff;', '}', '.blocklyMinimalBody {', 'margin: 0;', 'padding: 0;', '}', '.blocklyCommentTextarea {', 'background-color: #ffc;', 'border: 0;', 'margin: 0;', 'padding: 2px;', 'resize: none;', '}', '.blocklyHtmlInput {', 'border: none;', 'font-family: "Helvetica Neue", Helvetica, sans-serif;', 'font-size: 12pt;', 'height: 100%;', 'margin: 0;', 'outline: none;', 'box-sizing: border-box;', 'width: 100%;', 'text-align: center;', 'color: $colour_text;', 'font-weight: 500;', '}', '.blocklyMainBackground {', 'stroke-width: 1;', 'stroke: #c6c6c6;', /* Equates to #ddd due to border being off-pixel. */ '}', '.blocklyMutatorBackground {', 'fill: #fff;', 'stroke: #ddd;', 'stroke-width: 1;', '}', '.blocklyFlyoutBackground {', 'fill: $colour_flyout;', 'fill-opacity: .8;', '}', '.blocklyMainWorkspaceScrollbar {', 'z-index: 20;', '}', '.blocklyFlyoutScrollbar {', 'z-index: 30;', '}', '.blocklyScrollbarHorizontal, .blocklyScrollbarVertical {', 'position: absolute;', 'outline: none;', '}', '.blocklyScrollbarBackground {', 'opacity: 0;', '}', '.blocklyScrollbarHandle {', 'fill: $colour_scrollbar;', '}', '.blocklyScrollbarBackground:hover+.blocklyScrollbarHandle,', '.blocklyScrollbarHandle:hover {', 'fill: $colour_scrollbarHover;', '}', '.blocklyZoom>image {', 'opacity: 1;', '}', /* Darken flyout scrollbars due to being on a grey background. */ /* By contrast, workspace scrollbars are on a white background. */ '.blocklyFlyout .blocklyScrollbarHandle {', 'fill: #bbb;', '}', '.blocklyFlyout .blocklyScrollbarBackground:hover+.blocklyScrollbarHandle,', '.blocklyFlyout .blocklyScrollbarHandle:hover {', 'fill: #aaa;', '}', '.blocklyInvalidInput {', 'background: #faa;', '}', '.blocklyAngleCircle {', 'stroke: ' + Blockly.Colours.motion.tertiary + ';', 'stroke-width: 1;', 'fill: ' + Blockly.Colours.motion.secondary + ';', '}', '.blocklyAngleCenterPoint {', 'stroke: #fff;', 'stroke-width: 1;', 'fill: #fff;', '}', '.blocklyAngleDragHandle {', 'stroke: #fff;', 'stroke-width: 5;', 'stroke-opacity: 0.25;', 'fill: #fff;', 'cursor: pointer;', '}', '.blocklyAngleMarks {', 'stroke: #fff;', 'stroke-width: 1;', 'stroke-opacity: 0.5;', '}', '.blocklyAngleGauge {', 'fill: #fff;', 'fill-opacity: 0.20;', '}', '.blocklyAngleLine {', 'stroke: #fff;', 'stroke-width: 1;', 'stroke-linecap: round;', 'pointer-events: none;', '}', '.blocklyContextMenu {', 'border-radius: 4px;', '}', '.blocklyDropdownMenu {', 'padding: 0 !important;', '}', '.blocklyDropDownNumPad {', 'background-color: $colour_numPadBackground;', '}', /* Override the default Closure URL. */ '.blocklyWidgetDiv .goog-option-selected .goog-menuitem-checkbox,', '.blocklyWidgetDiv .goog-option-selected .goog-menuitem-icon {', 'background: url(<<<PATH>>>/sprites.png) no-repeat -48px -16px !important;', '}', /* Category tree in Toolbox. */ '.blocklyToolboxDiv {', 'background-color: $colour_toolbox;', 'color: $colour_toolboxText;', 'overflow-x: visible;', 'overflow-y: auto;', 'position: absolute;', 'font-family: "Helvetica Neue", Helvetica, sans-serif;', 'z-index: 40;', /* so blocks go over toolbox when dragging */ '-webkit-tap-highlight-color: transparent;', /* issue #1345 */ '}', '.blocklyTreeRoot {', 'padding: 4px 0;', '}', '.blocklyTreeRoot:focus {', 'outline: none;', '}', '.blocklyTreeRow {', 'height: 22px;', 'line-height: 22px;', 'margin-bottom: 3px;', 'padding-right: 8px;', 'white-space: nowrap;', '}', '.blocklyHorizontalTree {', 'float: left;', 'margin: 1px 5px 8px 0;', '}', '.blocklyHorizontalTreeRtl {', 'float: right;', 'margin: 1px 0 8px 5px;', '}', '.blocklyToolboxDiv[dir="RTL"] .blocklyTreeRow {', 'margin-left: 8px;', '}', '.blocklyTreeRow:not(.blocklyTreeSelected):hover {', 'background-color: #e4e4e4;', '}', '.blocklyTreeSeparator {', 'border-bottom: solid #e5e5e5 1px;', 'height: 0;', 'margin: 5px 0;', '}', '.blocklyTreeSeparatorHorizontal {', 'border-right: solid #e5e5e5 1px;', 'width: 0;', 'padding: 5px 0;', 'margin: 0 5px;', '}', '.blocklyTreeIcon {', 'background-image: url(<<<PATH>>>/sprites.png);', 'height: 16px;', 'vertical-align: middle;', 'width: 16px;', '}', '.blocklyTreeIconClosedLtr {', 'background-position: -32px -1px;', '}', '.blocklyTreeIconClosedRtl {', 'background-position: 0px -1px;', '}', '.blocklyTreeIconOpen {', 'background-position: -16px -1px;', '}', '.blocklyTreeSelected>.blocklyTreeIconClosedLtr {', 'background-position: -32px -17px;', '}', '.blocklyTreeSelected>.blocklyTreeIconClosedRtl {', 'background-position: 0px -17px;', '}', '.blocklyTreeSelected>.blocklyTreeIconOpen {', 'background-position: -16px -17px;', '}', '.blocklyTreeIconNone,', '.blocklyTreeSelected>.blocklyTreeIconNone {', 'background-position: -48px -1px;', '}', '.blocklyTreeLabel {', 'cursor: default;', 'font-family: "Helvetica Neue", Helvetica, sans-serif;', 'font-size: 16px;', 'padding: 0 3px;', 'vertical-align: middle;', '}', '.blocklyToolboxDelete .blocklyTreeLabel {', 'cursor: url("<<<PATH>>>/handdelete.cur"), auto;', '}', '.blocklyTreeSelected .blocklyTreeLabel {', 'color: #fff;', '}', '.blocklyDropDownDiv .goog-slider-horizontal {', 'margin: 8px;', 'height: 22px;', 'width: 150px;', 'position: relative;', 'outline: none;', 'border-radius: 11px;', 'margin-bottom: 20px;', '}', '.blocklyDropDownDiv .goog-slider-horizontal .goog-slider-thumb {', 'width: 26px;', 'height: 26px;', 'margin-top: -1px;', 'position: absolute;', 'background-color: white;', 'border-radius: 100%;', '-webkit-box-shadow: 0 0 0 4px rgba(0, 0, 0, 0.15);', '-moz-box-shadow: 0 0 0 4px rgba(0, 0, 0, 0.15);', 'box-shadow: 0 0 0 4px rgba(0, 0, 0, 0.15);', '}', '.scratchEyedropper {', 'background: none;', 'outline: none;', 'border: none;', 'width: 100%;', 'text-align: center;', 'border-top: 1px solid #ddd;', 'padding-top: 5px;', 'cursor: pointer;', '}', '.scratchColourPickerLabel {', 'font-family: "Helvetica Neue", Helvetica, sans-serif;', 'font-size: 0.65rem;', 'color: $colour_toolboxText;', 'margin: 8px;', '}', '.scratchColourPickerLabelText {', 'font-weight: bold;', '}', '.scratchColourPickerReadout {', 'margin-left: 10px;', '}', /* Copied from: goog/css/menu.css */ /* * Copyright 2009 The Closure Library Authors. All Rights Reserved. * * Use of this source code is governed by the Apache License, Version 2.0. * See the COPYING file for details. */ /** * Standard styling for menus created by goog.ui.MenuRenderer. * * @author [email protected] (Attila Bodis) */ '.blocklyWidgetDiv .goog-menu {', 'background: #fff;', 'border-color: #ccc #666 #666 #ccc;', 'border-style: solid;', 'border-width: 1px;', 'cursor: default;', 'font: normal 13px "Helvetica Neue", Helvetica, sans-serif;', 'margin: 0;', 'outline: none;', 'padding: 4px 0;', 'position: absolute;', 'overflow-y: auto;', 'overflow-x: hidden;', 'z-index: 20000;', /* Arbitrary, but some apps depend on it... */ '}', '.blocklyDropDownDiv .goog-menu {', 'cursor: default;', 'font: normal 13px "Helvetica Neue", Helvetica, sans-serif;', 'outline: none;', 'z-index: 20000;', /* Arbitrary, but some apps depend on it... */ '}', /* Copied from: goog/css/menuitem.css */ /* * Copyright 2009 The Closure Library Authors. All Rights Reserved. * * Use of this source code is governed by the Apache License, Version 2.0. * See the COPYING file for details. */ /** * Standard styling for menus created by goog.ui.MenuItemRenderer. * * @author [email protected] (Attila Bodis) */ /** * State: resting. * * NOTE(mleibman,chrishenry): * The RTL support in Closure is provided via two mechanisms -- "rtl" CSS * classes and BiDi flipping done by the CSS compiler. Closure supports RTL * with or without the use of the CSS compiler. In order for them not * to conflict with each other, the "rtl" CSS classes need to have the #noflip * annotation. The non-rtl counterparts should ideally have them as well, but, * since .goog-menuitem existed without .goog-menuitem-rtl for so long before * being added, there is a risk of people having templates where they are not * rendering the .goog-menuitem-rtl class when in RTL and instead rely solely * on the BiDi flipping by the CSS compiler. That's why we're not adding the * #noflip to .goog-menuitem. */ '.blocklyWidgetDiv .goog-menuitem {', 'color: #000;', 'font: normal 13px "Helvetica Neue", Helvetica, sans-serif;', 'list-style: none;', 'margin: 0;', /* 28px on the left for icon or checkbox; 7em on the right for shortcut. */ 'padding: 4px 7em 4px 28px;', 'white-space: nowrap;', '}', '.blocklyDropDownDiv .goog-menuitem {', 'color: #fff;', 'font: normal 13px "Helvetica Neue", Helvetica, sans-serif;', 'font-weight: bold;', 'list-style: none;', 'margin: 0;', /* 28px on the left for icon or checkbox; 7em on the right for shortcut. */ 'padding: 4px 7em 4px 28px;', 'white-space: nowrap;', '}', /* BiDi override for the resting state. */ /* #noflip */ '.blocklyWidgetDiv .goog-menuitem.goog-menuitem-rtl, ', '.blocklyDropDownDiv .goog-menuitem.goog-menuitem-rtl {', /* Flip left/right padding for BiDi. */ 'padding-left: 7em;', 'padding-right: 28px;', '}', /* If a menu doesn't have checkable items or items with icons, remove padding. */ '.blocklyWidgetDiv .goog-menu-nocheckbox .goog-menuitem,', '.blocklyWidgetDiv .goog-menu-noicon .goog-menuitem, ', '.blocklyDropDownDiv .goog-menu-nocheckbox .goog-menuitem,', '.blocklyDropDownDiv .goog-menu-noicon .goog-menuitem { ', 'padding-left: 12px;', '}', /* * If a menu doesn't have items with shortcuts, leave just enough room for * submenu arrows, if they are rendered. */ '.blocklyWidgetDiv .goog-menu-noaccel .goog-menuitem, ', '.blocklyDropDownDiv .goog-menu-noaccel .goog-menuitem {', 'padding-right: 20px;', '}', '.blocklyWidgetDiv .goog-menuitem-content ', '.blocklyDropDownDiv .goog-menuitem-content {', 'color: #000;', 'font: normal 13px "Helvetica Neue", Helvetica, sans-serif;', '}', /* State: disabled. */ '.blocklyWidgetDiv .goog-menuitem-disabled .goog-menuitem-accel,', '.blocklyWidgetDiv .goog-menuitem-disabled .goog-menuitem-content, ', '.blocklyDropDownDiv .goog-menuitem-disabled .goog-menuitem-accel,', '.blocklyDropDownDiv .goog-menuitem-disabled .goog-menuitem-content {', 'color: #ccc !important;', '}', '.blocklyWidgetDiv .goog-menuitem-disabled .goog-menuitem-icon, ', '.blocklyDropDownDiv .goog-menuitem-disabled .goog-menuitem-icon {', 'opacity: 0.3;', '-moz-opacity: 0.3;', 'filter: alpha(opacity=30);', '}', /* State: hover. */ '.blocklyWidgetDiv .goog-menuitem-highlight,', '.blocklyWidgetDiv .goog-menuitem-hover {', 'background-color: #d6e9f8;', /* Use an explicit top and bottom border so that the selection is visible', * in high contrast mode. */ 'border-color: #d6e9f8;', 'border-style: dotted;', 'border-width: 1px 0;', 'padding-bottom: 3px;', 'padding-top: 3px;', '}', '.blocklyDropDownDiv .goog-menuitem-highlight,', '.blocklyDropDownDiv .goog-menuitem-hover {', 'background-color: rgba(0, 0, 0, 0.2);', '}', /* State: selected/checked. */ '.blocklyWidgetDiv .goog-menuitem-checkbox,', '.blocklyWidgetDiv .goog-menuitem-icon, ', '.blocklyDropDownDiv .goog-menuitem-checkbox,', '.blocklyDropDownDiv .goog-menuitem-icon {', 'background-repeat: no-repeat;', 'height: 16px;', 'left: 6px;', 'position: absolute;', 'right: auto;', 'vertical-align: middle;', 'width: 16px;', '}', '.blocklyWidgetDiv .goog-option-selected .goog-menuitem-checkbox,', '.blocklyWidgetDiv .goog-option-selected .goog-menuitem-icon,', '.blocklyDropDownDiv .goog-option-selected .goog-menuitem-checkbox,', '.blocklyDropDownDiv .goog-option-selected .goog-menuitem-icon {', /* Client apps may override the URL at which they serve the sprite. */ 'background: url(<<<PATH>>>/sprites.png) no-repeat -48px -16px !important;', 'position: static;', /* Scroll with the menu. */ 'float: left;', 'margin-left: -24px;', '}', /* BiDi override for the selected/checked state. */ /* #noflip */ '.blocklyWidgetDiv .goog-menuitem-rtl .goog-menuitem-checkbox,', '.blocklyWidgetDiv .goog-menuitem-rtl .goog-menuitem-icon,', '.blocklyDropDownDiv .goog-menuitem-rtl .goog-menuitem-checkbox,', '.blocklyDropDownDiv .goog-menuitem-rtl .goog-menuitem-icon {', /* Flip left/right positioning. */ 'float: right;', 'margin-left: 6px;', '}', /* Keyboard shortcut ("accelerator") style. */ '.blocklyWidgetDiv .goog-menuitem-accel, ', '.blocklyDropDownDiv .goog-menuitem-accel {', 'color: #999;', /* Keyboard shortcuts are untranslated; always left-to-right. */ /* #noflip */ 'direction: ltr;', 'left: auto;', 'padding: 0 6px;', 'position: absolute;', 'right: 0;', 'text-align: right;', '}', /* BiDi override for shortcut style. */ /* #noflip */ '.blocklyWidgetDiv .goog-menuitem-rtl .goog-menuitem-accel, ', '.blocklyDropDownDiv .goog-menuitem-rtl .goog-menuitem-accel {', /* Flip left/right positioning and text alignment. */ 'left: 0;', 'right: auto;', 'text-align: left;', '}', /* Mnemonic styles. */ '.blocklyWidgetDiv .goog-menuitem-mnemonic-hint, ', '.blocklyDropDownDiv .goog-menuitem-mnemonic-hint {', 'text-decoration: underline;', '}', '.blocklyWidgetDiv .goog-menuitem-mnemonic-separator, ', '.blocklyDropDownDiv .goog-menuitem-mnemonic-separator {', 'color: #999;', 'font-size: 12px;', 'padding-left: 4px;', '}', /* Copied from: goog/css/menuseparator.css */ /* * Copyright 2009 The Closure Library Authors. All Rights Reserved. * * Use of this source code is governed by the Apache License, Version 2.0. * See the COPYING file for details. */ /** * Standard styling for menus created by goog.ui.MenuSeparatorRenderer. * * @author [email protected] (Attila Bodis) */ '.blocklyWidgetDiv .goog-menuseparator, ', '.blocklyDropDownDiv .goog-menuseparator {', 'border-top: 1px solid #ccc;', 'margin: 4px 0;', 'padding: 0;', '}', '.blocklyFlyoutCheckbox {', 'fill: white;', 'stroke: #c8c8c8;', '}', '.blocklyFlyoutCheckbox.checked {', 'fill: ' + Blockly.Colours.motion.primary + ';', 'stroke: ' + Blockly.Colours.motion.tertiary + ';', '}', '.blocklyFlyoutCheckboxPath {', 'stroke: white;', 'stroke-width: 3;', 'stroke-linecap: round;', 'stroke-linejoin: round;', '}', '.scratchCategoryMenu {', 'width: 60px;', 'background: $colour_toolbox;', 'color: $colour_toolboxText;', 'font-size: .7em;', 'user-select: none;', '-webkit-user-select: none;', '-moz-user-select: none;', '-ms-user-select: none;', '}', '.scratchCategoryMenuHorizontal {', 'width: 100%;', 'height: 50px;', 'background: $colour_toolbox;', 'color: $colour_toolboxText;', 'font-size: .7em;', 'user-select: none;', '-webkit-user-select: none;', '-moz-user-select: none;', '-ms-user-select: none;', '}', '.scratchCategoryMenuHorizontal .scratchCategoryMenuRow {', 'float: left;', 'margin: 3px;', '}', '.scratchCategoryMenuRow {', '}', '.scratchCategoryMenu .scratchCategoryMenuRow + .scratchCategoryMenuRow:before {', 'display: block;', 'border-top: 1px solid #ddd;', 'content: "";', 'width: 60%;', 'margin: 4px auto;', '}', '.scratchCategoryMenuItem {', 'padding: 6px 0px;', 'cursor: pointer;', 'margin: 0px 2px;', 'text-align: center;', '}', '.scratchCategoryMenuHorizontal .scratchCategoryMenuItem {', 'padding: 6px 5px;', '}', '.scratchCategoryMenuItem.categorySelected {', 'background: $colour_toolboxSelected;', 'border-radius: 6px;', '}', '.scratchCategoryItemBubble {', 'width: 16px;', 'height: 16px;', 'border: 1px solid;', 'border-radius: 100%;', 'margin: 0 auto 3px;', '}', '.scratchCategoryMenuItem:hover {', 'color: $colour_toolboxHover !important;', '}', '' ];
1
8,839
I believe there is some javascript that also uses this 60px number for calculations. I think I'd rather keep the number in px instead of rem to make that correspondence easier to see. If 3.25rem != 60px, can you also change the other place where `60` is used to in the JS?
LLK-scratch-blocks
js
@@ -20,10 +20,11 @@ import urllib2 import jinja2 -from retrying import retry import sendgrid from sendgrid.helpers import mail +from retrying import retry + from google.cloud.security.common.util import errors as util_errors from google.cloud.security.common.util import log_util from google.cloud.security.common.util import retryable_exceptions
1
# Copyright 2017 The Forseti Security Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Email utility module.""" import base64 import os import urllib2 import jinja2 from retrying import retry import sendgrid from sendgrid.helpers import mail from google.cloud.security.common.util import errors as util_errors from google.cloud.security.common.util import log_util from google.cloud.security.common.util import retryable_exceptions LOGGER = log_util.get_logger(__name__) class EmailUtil(object): """Utility for sending emails.""" def __init__(self, api_key): """Initialize the email util. Args: api_key (str): The SendGrid api key to auth email service. """ self.sendgrid = sendgrid.SendGridAPIClient(apikey=api_key) @retry(retry_on_exception=retryable_exceptions.is_retryable_exception, wait_exponential_multiplier=1000, wait_exponential_max=10000, stop_max_attempt_number=5) def _execute_send(self, email): """Executes the sending of the email. This needs to be a standalone method so that we can wrap it with retry, and the final exception can be gracefully handled upstream. Args: email (SendGrid): SendGrid mail object Returns: dict: urllib2 response object """ return self.sendgrid.client.mail.send.post(request_body=email.get()) @staticmethod def _add_recipients(email, email_recipients): """Add multiple recipients to the sendgrid email object. Args: email (SendGrid): SendGrid mail object email_recipients (Str): comma-separated text of the email recipients Returns: SendGrid: SendGrid mail object with mulitiple recipients. """ personalization = mail.Personalization() recipients = email_recipients.split(',') for recipient in recipients: personalization.add_to(mail.Email(recipient)) email.add_personalization(personalization) return email def send(self, email_sender=None, email_recipient=None, email_subject=None, email_content=None, content_type=None, attachment=None): """Send an email. This uses the SendGrid API. https://github.com/sendgrid/sendgrid-python The minimum required info to send email are: sender, recipient, subject, and content (the body) Args: email_sender (str): The email sender. email_recipient (str): The email recipient. email_subject (str): The email subject. email_content (str): The email content (aka, body). content_type (str): The email content type. attachment (Attachment): A SendGrid Attachment. Raises: EmailSendError: An error with sending email has occurred. """ if not email_sender or not email_recipient: LOGGER.warn('Unable to send email: sender=%s, recipient=%s', email_sender, email_recipient) raise util_errors.EmailSendError email = mail.Mail() email.from_email = mail.Email(email_sender) email.subject = email_subject email.add_content(mail.Content(content_type, email_content)) email = self._add_recipients(email, email_recipient) if attachment: email.add_attachment(attachment) try: response = self._execute_send(email) except urllib2.HTTPError as e: LOGGER.error('Unable to send email: %s %s', e.code, e.reason) raise util_errors.EmailSendError if response.status_code == 202: LOGGER.info('Email accepted for delivery:\n%s', email_subject) else: LOGGER.error('Unable to send email:\n%s\n%s\n%s\n%s', email_subject, response.status_code, response.body, response.headers) raise util_errors.EmailSendError @classmethod def render_from_template(cls, template_file, template_vars): """Fill out an email template with template variables. Args: template_file (str): The location of email template in filesystem. template_vars (dict): The template variables to fill into the template. Returns: str: The template content, rendered with the provided variables. """ template_searchpath = os.path.abspath( os.path.join(os.path.dirname(__file__), '../email_templates')) template_loader = jinja2.FileSystemLoader( searchpath=template_searchpath) template_env = jinja2.Environment(loader=template_loader) template = template_env.get_template(template_file) return template.render(template_vars) @classmethod def create_attachment(cls, file_location, content_type, filename, disposition='attachment', content_id=None): """Create a SendGrid attachment. SendGrid attachments file content must be base64 encoded. Args: file_location (str): The path of the file. content_type (str): The content type of the attachment. filename (str): The filename of attachment. disposition (str): Content disposition, defaults to "attachment". content_id (str): The content id. Returns: Attachment: A SendGrid Attachment. """ file_content = '' with open(file_location, 'rb') as f: file_content = f.read() content = base64.b64encode(file_content) attachment = mail.Attachment() attachment.set_content(content) attachment.set_type(content_type) attachment.set_filename(filename) attachment.set_disposition(disposition) attachment.set_content_id(content_id) return attachment
1
28,311
retrying is an installed 3rd party library just like jinja and sendgrid; perhaps try grouping all of them together?
forseti-security-forseti-security
py
@@ -133,12 +133,12 @@ public interface List<T> extends Seq<T>, Stack<T> { @SuppressWarnings("unchecked") final List<T> list = (List<T>) elements; return list; - } else if (elements instanceof ArrayList) { + } else if (elements instanceof ArrayList || elements instanceof Vector) { @SuppressWarnings("unchecked") - final ArrayList<T> arrayList = (ArrayList<T>) elements; + final java.util.List<T> indexedList = (java.util.List<T>) elements; List<T> result = Nil.instance(); - for (int i = arrayList.size() - 1; i >= 0; i--) { - final T element = arrayList.get(i); + for (int i = indexedList.size() - 1; i >= 0; i--) { + final T element = indexedList.get(i); result = result.prepend(element); } return result;
1
/* / \____ _ ______ _____ / \____ ____ _____ * / \__ \/ \ / \__ \ / __// \__ \ / \/ __ \ Javaslang * _/ // _\ \ \/ / _\ \\_ \/ // _\ \ /\ \__/ / Copyright 2014-2015 Daniel Dietrich * /___/ \_____/\____/\_____/____/\___\_____/_/ \_/____/ Licensed under the Apache License, Version 2.0 */ package javaslang.collection; import javaslang.Tuple; import javaslang.Tuple2; import javaslang.control.None; import javaslang.control.Option; import javaslang.control.Some; import java.io.*; import java.util.*; import java.util.function.*; import java.util.stream.Collector; /** * A {@code List} is an eager sequence of elements. Its immutability makes it suitable for concurrent programming. * <p> * A {@code List} is composed of a {@code head} element and a {@code tail} {@code List}. * <p> * There are two implementations of the {@code List} interface: * <ul> * <li>{@link Nil}, which represents the empty {@code List}.</li> * <li>{@link Cons}, which represents a {@code List} containing one or more elements.</li> * </ul> * Methods to obtain a {@code List}: * <pre> * <code> * // factory methods * List.nil() // = List.of() = Nil.instance() * List.of(x) // = new Cons&lt;&gt;(x, Nil.instance()) * List.of(Object...) // e.g. List.of(1, 2, 3) * List.ofAll(Iterable) // e.g. List.ofAll(Stream.of(1, 2, 3)) = 1, 2, 3 * * // int sequences * List.range(0, 3) // = 0, 1, 2 * List.rangeClosed(0, 3) // = 0, 1, 2, 3 * </code> * </pre> * * Note: A {@code List} is primary a {@code Seq} and extends {@code Stack} for technical reasons (so {@code Stack} does not need to wrap {@code List}). * <p> * See Okasaki, Chris: <em>Purely Functional Data Structures</em> (p. 7 ff.). Cambridge, 2003. * * @param <T> Component type of the List * @since 1.1.0 */ public interface List<T> extends Seq<T>, Stack<T> { /** * Returns a {@link java.util.stream.Collector} which may be used in conjunction with * {@link java.util.stream.Stream#collect(java.util.stream.Collector)} to obtain a {@link javaslang.collection.List} * . * * @param <T> Component type of the List. * @return A javaslang.collection.List Collector. */ static <T> Collector<T, ArrayList<T>, List<T>> collector() { final Supplier<ArrayList<T>> supplier = ArrayList::new; final BiConsumer<ArrayList<T>, T> accumulator = ArrayList::add; final BinaryOperator<ArrayList<T>> combiner = (left, right) -> { left.addAll(right); return left; }; final Function<ArrayList<T>, List<T>> finisher = List::ofAll; return Collector.of(supplier, accumulator, combiner, finisher); } /** * Returns the single instance of Nil. Convenience method for {@code Nil.instance()} . * <p> * Note: this method intentionally returns type {@code List} and not {@code Nil}. This comes handy when folding. * If you explicitly need type {@code Nil} use {@linkplain Nil#instance()}. * * @param <T> Component type of Nil, determined by type inference in the particular context. * @return The empty list. */ static <T> List<T> nil() { return Nil.instance(); } /** * Returns a singleton {@code List}, i.e. a {@code List} of one element. * * @param element An element. * @param <T> The component type * @return A new List instance containing the given element */ static <T> List<T> of(T element) { return new Cons<>(element, Nil.instance()); } /** * <p> * Creates a List of the given elements. * </p> * * <pre> * <code> List.of(1, 2, 3, 4) * = Nil.instance().prepend(4).prepend(3).prepend(2).prepend(1) * = new Cons(1, new Cons(2, new Cons(3, new Cons(4, Nil.instance()))))</code> * </pre> * * @param <T> Component type of the List. * @param elements Zero or more elements. * @return A list containing the given elements in the same order. * @throws NullPointerException if {@code elements} is null */ @SafeVarargs static <T> List<T> of(T... elements) { Objects.requireNonNull(elements, "elements is null"); List<T> result = Nil.<T>instance(); for (int i = elements.length - 1; i >= 0; i--) { result = result.prepend(elements[i]); } return result; } /** * Creates a List of the given elements. * * @param <T> Component type of the List. * @param elements An Iterable of elements. * @return A list containing the given elements in the same order. * @throws NullPointerException if {@code elements} is null */ static <T> List<T> ofAll(Iterable<? extends T> elements) { Objects.requireNonNull(elements, "elements is null"); if (elements instanceof List) { @SuppressWarnings("unchecked") final List<T> list = (List<T>) elements; return list; } else if (elements instanceof ArrayList) { @SuppressWarnings("unchecked") final ArrayList<T> arrayList = (ArrayList<T>) elements; List<T> result = Nil.instance(); for (int i = arrayList.size() - 1; i >= 0; i--) { final T element = arrayList.get(i); result = result.prepend(element); } return result; } else { List<T> result = Nil.instance(); for (T element : elements) { result = result.prepend(element); } return result.reverse(); } } /** * Creates a List of int numbers starting from {@code from}, extending to {@code toExclusive - 1}. * * @param from the first number * @param toExclusive the last number + 1 * @return a range of int values as specified or {@code Nil} if {@code from >= toExclusive} */ static List<Integer> range(int from, int toExclusive) { if (from >= toExclusive) { return Nil.instance(); } else { return List.rangeClosed(from, toExclusive - 1); } } /** * Creates a List of int numbers starting from {@code from}, extending to {@code toInclusive}. * * @param from the first number * @param toInclusive the last number * @return a range of int values as specified or {@code Nil} if {@code from > toInclusive} */ static List<Integer> rangeClosed(int from, int toInclusive) { if (from > toInclusive) { return Nil.instance(); } else if (toInclusive == Integer.MIN_VALUE) { return List.of(Integer.MIN_VALUE); } else { List<Integer> result = Nil.instance(); for (int i = toInclusive; i >= from; i--) { result = result.prepend(i); } return result; } } @Override default List<T> append(T element) { return foldRight(List.of(element), (x, xs) -> xs.prepend(x)); } @Override default List<T> appendAll(Iterable<? extends T> elements) { Objects.requireNonNull(elements, "elements is null"); return foldRight(List.ofAll(elements), (x, xs) -> xs.prepend(x)); } @Override default List<T> clear() { return Nil.instance(); } @Override default List<List<T>> combinations() { return List.rangeClosed(0, length()).map(this::combinations).flatten(Function.identity()); } @Override default List<List<T>> combinations(int k) { class Recursion { List<List<T>> combinations(List<T> elements, int k) { return (k == 0) ? List.of(List.nil()) : elements.zipWithIndex().flatMap(t -> combinations(elements.drop(t._2 + 1), (k - 1)) .map((List<T> c) -> c.prepend(t._1))); } } return new Recursion().combinations(this, Math.max(k, 0)); } @Override default List<T> distinct() { return distinct(Function.identity()); } @Override default <U> List<T> distinct(Function<? super T, ? extends U> keyExtractor) { Objects.requireNonNull(keyExtractor, "keyExtractor is null"); final java.util.Set<U> seen = new java.util.HashSet<>(); return filter(t -> seen.add(keyExtractor.apply(t))); } @Override default List<T> drop(int n) { List<T> list = this; for (int i = n; i > 0 && !list.isEmpty(); i--) { list = list.tail(); } return list; } @Override default List<T> dropRight(int n) { return reverse().drop(n).reverse(); } @Override default List<T> dropWhile(Predicate<? super T> predicate) { Objects.requireNonNull(predicate, "predicate is null"); List<T> list = this; while (!list.isEmpty() && predicate.test(list.head())) { list = list.tail(); } return list; } @Override default List<T> filter(Predicate<? super T> predicate) { Objects.requireNonNull(predicate, "predicate is null"); return isEmpty() ? this : foldLeft(List.<T>nil(), (xs, x) -> predicate.test(x) ? xs.prepend(x) : xs).reverse(); } @Override default List<T> findAll(Predicate<? super T> predicate) { Objects.requireNonNull(predicate, "predicate is null"); return filter(predicate); } @Override default <U> List<U> flatMap(Function<? super T, ? extends Iterable<U>> mapper) { Objects.requireNonNull(mapper, "mapper is null"); if (isEmpty()) { return nil(); } else { List<U> list = nil(); for (T t : this) { for (U u : mapper.apply(t)) { list = list.prepend(u); } } return list.reverse(); } } /** * Flattens a {@code List} using a function {@code f}. A common use case is to use the identity * {@code list.flatten(Function::identity)} to flatten a {@code List} of {@code List}s. * <p> * Examples: * <pre> * <code> * Match&lt;List&lt;U&gt;&gt; f = Match * .when((List&lt;U&gt; l) -&gt; l) * .when((U u) -&gt; List.of(u)); * List.of(1).flatten(f); // = List(1) * List.of(List.of(1)).flatten(f); // = List(1) * List.of(Nil.instance()).flatten(f); // = Nil * Nil.instance().flatten(f); // = Nil * </code> * </pre> * * @param <U> component type of the result {@code List} * @param f a function which maps elements of this {@code List} to {@code List}s * @return a new {@code List} * @throws NullPointerException if {@code f} is null */ @Override default <U> List<U> flatten(Function<? super T, ? extends Iterable<U>> f) { Objects.requireNonNull(f, "f is null"); return isEmpty() ? Nil.instance() : foldRight(nil(), (t, xs) -> xs.prependAll(f.apply(t))); } @Override default void forEach(Consumer<? super T> action) { Objects.requireNonNull(action, "action is null"); Stack.super.forEach(action); } @Override default boolean forAll(Predicate<? super T> predicate) { Objects.requireNonNull(predicate, "predicate is null"); return Stack.super.forAll(predicate); } @Override default T get(int index) { if (isEmpty()) { throw new IndexOutOfBoundsException("get(" + index + ") on Nil"); } if (index < 0) { throw new IndexOutOfBoundsException("get(" + index + ")"); } List<T> list = this; for (int i = index - 1; i >= 0; i--) { list = list.tail(); if (list.isEmpty()) { throw new IndexOutOfBoundsException(String.format("get(%s) on List of length %s", index, index - i)); } } return list.head(); } @Override default List<List<T>> grouped(int size) { return sliding(size, size); } @Override default int indexOf(T element) { int index = 0; for (List<T> list = this; !list.isEmpty(); list = list.tail(), index++) { if (Objects.equals(list.head(), element)) { return index; } } return -1; } @Override List<T> init(); @Override Option<List<T>> initOption(); @Override default List<T> insert(int index, T element) { if (index < 0) { throw new IndexOutOfBoundsException("insert(" + index + ", e)"); } List<T> preceding = Nil.instance(); List<T> tail = this; for (int i = index; i > 0; i--, tail = tail.tail()) { if (tail.isEmpty()) { throw new IndexOutOfBoundsException("insert(" + index + ", e) on List of length " + length()); } preceding = preceding.prepend(tail.head()); } List<T> result = tail.prepend(element); for (T next : preceding) { result = result.prepend(next); } return result; } @Override default List<T> insertAll(int index, Iterable<? extends T> elements) { Objects.requireNonNull(elements, "elements is null"); if (index < 0) { throw new IndexOutOfBoundsException("insertAll(" + index + ", elements)"); } List<T> preceding = Nil.instance(); List<T> tail = this; for (int i = index; i > 0; i--, tail = tail.tail()) { if (tail.isEmpty()) { throw new IndexOutOfBoundsException("insertAll(" + index + ", elements) on List of length " + length()); } preceding = preceding.prepend(tail.head()); } List<T> result = tail.prependAll(elements); for (T next : preceding) { result = result.prepend(next); } return result; } @Override default List<T> intersperse(T element) { return isEmpty() ? Nil.instance() : foldRight(nil(), (x, xs) -> xs.isEmpty() ? xs.prepend(x) : xs.prepend(element).prepend(x)); } @Override default int lastIndexOf(T element) { int result = -1, index = 0; for (List<T> list = this; !list.isEmpty(); list = list.tail(), index++) { if (Objects.equals(list.head(), element)) { result = index; } } return result; } @Override default <U> List<U> map(Function<? super T, ? extends U> mapper) { Objects.requireNonNull(mapper, "mapper is null"); List<U> list = nil(); for (T t : this) { list = list.prepend(mapper.apply(t)); } return list.reverse(); } @Override default Tuple2<List<T>, List<T>> partition(Predicate<? super T> predicate) { Objects.requireNonNull(predicate, "predicate is null"); final java.util.List<T> left = new ArrayList<>(), right = new ArrayList<>(); for (T t : this) { (predicate.test(t) ? left : right).add(t); } return Tuple.of(List.ofAll(left), List.ofAll(right)); } @Override default T peek() { return head(); } /** * Performs an action on the head element of this {@code List}. * * @param action A {@code Consumer} * @return this {@code List} */ @Override default List<T> peek(Consumer<? super T> action) { Objects.requireNonNull(action, "action is null"); if (!isEmpty()) { action.accept(head()); } return this; } @Override default List<List<T>> permutations() { if (isEmpty()) { return Nil.instance(); } else { final List<T> tail = tail(); if (tail.isEmpty()) { return List.of(this); } else { final List<List<T>> zero = Nil.instance(); // TODO: IntelliJ IDEA 14.1.1 needs a redundant cast here, jdk 1.8.0_40 compiles fine return distinct().foldLeft(zero, (xs, x) -> xs.appendAll(remove(x).permutations().map((Function<List<T>, List<T>>) l -> l.prepend(x)))); } } } @Override default List<T> pop() { return tail(); } @Override Option<List<T>> popOption(); @Override default Tuple2<T, List<T>> pop2() { return Tuple.of(head(), tail()); } @Override Option<Tuple2<T, List<T>>> pop2Option(); @Override default List<T> prepend(T element) { return new Cons<>(element, this); } @Override default List<T> prependAll(Iterable<? extends T> elements) { Objects.requireNonNull(elements, "elements is null"); return isEmpty() ? List.ofAll(elements) : List.ofAll(elements).reverse().foldLeft(this, List::prepend); } @Override default List<T> push(T element) { return new Cons<>(element, this); } @SuppressWarnings("unchecked") @Override default List<T> push(T... elements) { Objects.requireNonNull(elements, "elements is null"); List<T> result = Nil.<T>instance(); for (T element : elements) { result = result.prepend(element); } return result; } @Override default List<T> pushAll(Iterable<T> elements) { Objects.requireNonNull(elements, "elements is null"); List<T> result = Nil.<T>instance(); for (T element : elements) { result = result.prepend(element); } return result; } @Override default List<T> remove(T element) { List<T> preceding = Nil.instance(); List<T> tail = this; boolean found = false; while (!found && !tail.isEmpty()) { final T head = tail.head(); if (head.equals(element)) { found = true; } else { preceding = preceding.prepend(head); } tail = tail.tail(); } List<T> result = tail; for (T next : preceding) { result = result.prepend(next); } return result; } @Override default List<T> removeAll(T removed) { List<T> result = Nil.instance(); for (T element : this) { if (!element.equals(removed)) { result = result.prepend(element); } } return result.reverse(); } @Override default List<T> removeAll(Iterable<? extends T> elements) { Objects.requireNonNull(elements, "elements is null"); List<T> removed = List.ofAll(elements).distinct(); List<T> result = Nil.instance(); for (T element : this) { if (!removed.contains(element)) { result = result.prepend(element); } } return result.reverse(); } @Override default List<T> replace(T currentElement, T newElement) { List<T> preceding = Nil.instance(); List<T> tail = this; while (!tail.isEmpty() && !Objects.equals(tail.head(), currentElement)) { preceding = preceding.prepend(tail.head()); tail = tail.tail(); } if (tail.isEmpty()) { return this; } // skip the current head element because it is replaced List<T> result = tail.tail().prepend(newElement); for (T next : preceding) { result = result.prepend(next); } return result; } @Override default List<T> replaceAll(T currentElement, T newElement) { List<T> result = Nil.instance(); for (List<T> list = this; !list.isEmpty(); list = list.tail()) { final T head = list.head(); final T elem = Objects.equals(head, currentElement) ? newElement : head; result = result.prepend(elem); } return result.reverse(); } @Override default List<T> replaceAll(UnaryOperator<T> operator) { Objects.requireNonNull(operator, "operator is null"); List<T> result = Nil.instance(); for (T element : this) { result = result.prepend(operator.apply(element)); } return result.reverse(); } @Override default List<T> retainAll(Iterable<? extends T> elements) { Objects.requireNonNull(elements, "elements is null"); final List<T> keeped = List.ofAll(elements).distinct(); List<T> result = Nil.instance(); for (T element : this) { if (keeped.contains(element)) { result = result.prepend(element); } } return result.reverse(); } @Override default List<T> reverse() { return isEmpty() ? this : foldLeft(nil(), List::prepend); } @Override default List<T> set(int index, T element) { if (isEmpty()) { throw new IndexOutOfBoundsException("set(" + index + ", e) on Nil"); } if (index < 0) { throw new IndexOutOfBoundsException("set(" + index + ", e)"); } List<T> preceding = Nil.instance(); List<T> tail = this; for (int i = index; i > 0; i--, tail = tail.tail()) { if (tail.isEmpty()) { throw new IndexOutOfBoundsException("set(" + index + ", e) on List of length " + length()); } preceding = preceding.prepend(tail.head()); } if (tail.isEmpty()) { throw new IndexOutOfBoundsException("set(" + index + ", e) on List of length " + length()); } // skip the current head element because it is replaced List<T> result = tail.tail().prepend(element); for (T next : preceding) { result = result.prepend(next); } return result; } @Override default List<List<T>> sliding(int size) { return sliding(size, 1); } @Override default List<List<T>> sliding(int size, int step) { if (size <= 0 || step <= 0) { throw new IllegalArgumentException(String.format("size: %s or step: %s not positive", size, step)); } List<List<T>> result = Nil.instance(); List<T> list = this; while (!list.isEmpty()) { final Tuple2<List<T>, List<T>> split = list.splitAt(size); result = result.prepend(split._1); list = split._2.isEmpty() ? Nil.instance() : list.drop(step); } return result.reverse(); } @Override default List<T> sort() { return isEmpty() ? this : toJavaStream().sorted().collect(List.collector()); } @Override default List<T> sort(Comparator<? super T> comparator) { Objects.requireNonNull(comparator, "comparator is null"); return isEmpty() ? this : toJavaStream().sorted(comparator).collect(List.collector()); } @Override default Tuple2<List<T>, List<T>> span(Predicate<? super T> predicate) { Objects.requireNonNull(predicate, "predicate is null"); return Tuple.of(takeWhile(predicate), dropWhile(predicate)); } @Override default Tuple2<List<T>, List<T>> splitAt(int n) { return Tuple.of(take(n), drop(n)); } @Override default Spliterator<T> spliterator() { // the focus of the Stream API is on random-access collections of *known size* return Spliterators.spliterator(iterator(), length(), Spliterator.ORDERED | Spliterator.IMMUTABLE); } @Override default List<T> subsequence(int beginIndex) { if (beginIndex < 0) { throw new IndexOutOfBoundsException("subsequence(" + beginIndex + ")"); } List<T> result = this; for (int i = 0; i < beginIndex; i++, result = result.tail()) { if (result.isEmpty()) { throw new IndexOutOfBoundsException( String.format("subsequence(%s) on List of length %s", beginIndex, i)); } } return result; } @Override default List<T> subsequence(int beginIndex, int endIndex) { if (beginIndex < 0 || beginIndex > endIndex) { throw new IndexOutOfBoundsException( String.format("subsequence(%s, %s) on List of length %s", beginIndex, endIndex, length())); } List<T> result = Nil.instance(); List<T> list = this; for (int i = 0; i < endIndex; i++, list = list.tail()) { if (list.isEmpty()) { throw new IndexOutOfBoundsException( String.format("subsequence(%s, %s) on List of length %s", beginIndex, endIndex, i)); } if (i >= beginIndex) { result = result.prepend(list.head()); } } return result.reverse(); } @Override List<T> tail(); @Override Option<List<T>> tailOption(); @Override default List<T> take(int n) { List<T> result = Nil.instance(); List<T> list = this; for (int i = 0; i < n && !list.isEmpty(); i++, list = list.tail()) { result = result.prepend(list.head()); } return result.reverse(); } @Override default List<T> takeRight(int n) { return reverse().take(n).reverse(); } @Override default List<T> takeWhile(Predicate<? super T> predicate) { Objects.requireNonNull(predicate, "predicate is null"); List<T> result = Nil.instance(); for (List<T> list = this; !list.isEmpty() && predicate.test(list.head()); list = list.tail()) { result = result.prepend(list.head()); } return result.reverse(); } @Override default <T1, T2> Tuple2<List<T1>, List<T2>> unzip( Function<? super T, Tuple2<? extends T1, ? extends T2>> unzipper) { Objects.requireNonNull(unzipper, "unzipper is null"); List<T1> xs = Nil.instance(); List<T2> ys = Nil.instance(); for (T element : this) { final Tuple2<? extends T1, ? extends T2> t = unzipper.apply(element); xs = xs.prepend(t._1); ys = ys.prepend(t._2); } return Tuple.of(xs.reverse(), ys.reverse()); } @Override default <U> List<Tuple2<T, U>> zip(Iterable<U> that) { Objects.requireNonNull(that, "that is null"); List<Tuple2<T, U>> result = Nil.instance(); List<T> list1 = this; Iterator<U> list2 = that.iterator(); while (!list1.isEmpty() && list2.hasNext()) { result = result.prepend(Tuple.of(list1.head(), list2.next())); list1 = list1.tail(); } return result.reverse(); } @Override default <U> List<Tuple2<T, U>> zipAll(Iterable<U> that, T thisElem, U thatElem) { Objects.requireNonNull(that, "that is null"); List<Tuple2<T, U>> result = Nil.instance(); Iterator<T> list1 = this.iterator(); Iterator<U> list2 = that.iterator(); while (list1.hasNext() || list2.hasNext()) { final T elem1 = list1.hasNext() ? list1.next() : thisElem; final U elem2 = list2.hasNext() ? list2.next() : thatElem; result = result.prepend(Tuple.of(elem1, elem2)); } return result.reverse(); } @Override default List<Tuple2<T, Integer>> zipWithIndex() { List<Tuple2<T, Integer>> result = Nil.instance(); int index = 0; for (List<T> list = this; !list.isEmpty(); list = list.tail()) { result = result.prepend(Tuple.of(list.head(), index++)); } return result.reverse(); } /** * Non-empty {@code List}, consisting of a {@code head} and a {@code tail}. * * @param <T> Component type of the List. * @since 1.1.0 */ // DEV NOTE: class declared final because of serialization proxy pattern (see Effective Java, 2nd ed., p. 315) final class Cons<T> extends AbstractList<T> implements Serializable { private static final long serialVersionUID = 1L; private final T head; private final List<T> tail; /** * Creates a List consisting of a head value and a trailing List. * * @param head The head * @param tail The tail */ Cons(T head, List<T> tail) { this.head = head; this.tail = tail; } @Override public T head() { return head; } @Override public Some<T> headOption() { return new Some<>(head); } @Override public List<T> init() { return dropRight(1); } @Override public Some<List<T>> initOption() { return new Some<>(init()); } @Override public Some<T> peekOption() { return new Some<>(head()); } @Override public Some<List<T>> popOption() { return new Some<>(tail()); } @Override public Some<Tuple2<T, List<T>>> pop2Option() { return new Some<>(Tuple.of(head(), tail())); } @Override public List<T> tail() { return tail; } @Override public Some<List<T>> tailOption() { return new Some<>(tail); } @Override public boolean isEmpty() { return false; } /** * <p> * {@code writeReplace} method for the serialization proxy pattern. * </p> * <p> * The presence of this method causes the serialization system to emit a SerializationProxy instance instead of * an instance of the enclosing class. * </p> * * @return A SerialiationProxy for this enclosing class. */ private Object writeReplace() { return new SerializationProxy<>(this); } /** * <p> * {@code readObject} method for the serialization proxy pattern. * </p> * Guarantees that the serialization system will never generate a serialized instance of the enclosing class. * * @param stream An object serialization stream. * @throws java.io.InvalidObjectException This method will throw with the message "Proxy required". */ private void readObject(ObjectInputStream stream) throws InvalidObjectException { throw new InvalidObjectException("Proxy required"); } /** * A serialization proxy which, in this context, is used to deserialize immutable, linked Lists with final * instance fields. * * @param <T> The component type of the underlying list. */ // DEV NOTE: The serialization proxy pattern is not compatible with non-final, i.e. extendable, // classes. Also, it may not be compatible with circular object graphs. private static final class SerializationProxy<T> implements Serializable { private static final long serialVersionUID = 1L; // the instance to be serialized/deserialized private transient Cons<T> list; /** * Constructor for the case of serialization, called by {@link Cons#writeReplace()}. * <p/> * The constructor of a SerializationProxy takes an argument that concisely represents the logical state of * an instance of the enclosing class. * * @param list a Cons */ SerializationProxy(Cons<T> list) { this.list = list; } /** * Write an object to a serialization stream. * * @param s An object serialization stream. * @throws java.io.IOException If an error occurs writing to the stream. */ private void writeObject(ObjectOutputStream s) throws IOException { s.defaultWriteObject(); s.writeInt(list.length()); for (List<T> l = list; !l.isEmpty(); l = l.tail()) { s.writeObject(l.head()); } } /** * Read an object from a deserialization stream. * * @param s An object deserialization stream. * @throws ClassNotFoundException If the object's class read from the stream cannot be found. * @throws InvalidObjectException If the stream contains no list elements. * @throws IOException If an error occurs reading from the stream. */ private void readObject(ObjectInputStream s) throws ClassNotFoundException, IOException { s.defaultReadObject(); final int size = s.readInt(); if (size <= 0) { throw new InvalidObjectException("No elements"); } List<T> temp = Nil.instance(); for (int i = 0; i < size; i++) { @SuppressWarnings("unchecked") final T element = (T) s.readObject(); temp = temp.prepend(element); } list = (Cons<T>) temp.reverse(); } /** * <p> * {@code readResolve} method for the serialization proxy pattern. * </p> * Returns a logically equivalent instance of the enclosing class. The presence of this method causes the * serialization system to translate the serialization proxy back into an instance of the enclosing class * upon deserialization. * * @return A deserialized instance of the enclosing class. */ private Object readResolve() { return list; } } } /** * Representation of the singleton empty {@code List}. * * @param <T> Component type of the List. * @since 1.1.0 */ final class Nil<T> extends AbstractList<T> implements Serializable { private static final long serialVersionUID = 1L; private static final Nil<?> INSTANCE = new Nil<>(); // hidden private Nil() { } /** * Returns the singleton instance of the liked list. * * @param <T> Component type of the List * @return the singleton instance of the linked list. */ @SuppressWarnings("unchecked") public static <T> Nil<T> instance() { return (Nil<T>) INSTANCE; } @Override public T head() { throw new NoSuchElementException("head of empty list"); } @Override public None<T> headOption() { return None.instance(); } @Override public List<T> init() { throw new UnsupportedOperationException("init of empty list"); } @Override public None<List<T>> initOption() { return None.instance(); } @Override public None<T> peekOption() { return None.instance(); } @Override public None<List<T>> popOption() { return None.instance(); } @Override public None<Tuple2<T, List<T>>> pop2Option() { return None.instance(); } @Override public List<T> tail() { throw new UnsupportedOperationException("tail of empty list"); } @Override public None<List<T>> tailOption() { return None.instance(); } @Override public boolean isEmpty() { return true; } /** * Instance control for object serialization. * * @return The singleton instance of Nil. * @see java.io.Serializable */ private Object readResolve() { return INSTANCE; } } /** * <p> * This class is needed because the interface {@link List} cannot use default methods to override Object's non-final * methods equals, hashCode and toString. * </p> * See <a href="http://mail.openjdk.java.net/pipermail/lambda-dev/2013-March/008435.html">Allow default methods to * override Object's methods</a>. * * @param <T> Component type of the List. * @since 1.1.0 */ abstract class AbstractList<T> implements List<T> { @Override public boolean equals(Object o) { if (o == this) { return true; } else if (o instanceof List) { List<?> list1 = this; List<?> list2 = (List<?>) o; while (!list1.isEmpty() && !list2.isEmpty()) { final boolean isEqual = Objects.equals(list1.head(), list2.head()); if (!isEqual) { return false; } list1 = list1.tail(); list2 = list2.tail(); } return list1.isEmpty() && list2.isEmpty(); } else { return false; } } @Override public int hashCode() { int hashCode = 1; for (T element : this) { hashCode = 31 * hashCode + Objects.hashCode(element); } return hashCode; } @Override public String toString() { return map(String::valueOf).join(", ", "List(", ")"); } } }
1
5,996
you could always use List.listIterator with previous() and hasPrevious() to traverse all kinds of j.u.List backwards. No need for special cases for ArrayList and Vector.
vavr-io-vavr
java
@@ -355,10 +355,17 @@ public class EditImageActivity extends EditBaseActivity implements View.OnClickL private void addToUndoList() { try{ + TODO:// implement a more efficient way, like storing only the difference of bitmaps or + // steps followed to edit + bitmapsForUndo.add(mainBitmap.copy(mainBitmap.getConfig(),true)); }catch (OutOfMemoryError error){ - //Snackbar.make(getWindow().getDecorView().getRootView(),"Out of Memory. steps = " + bitmapsForUndo.size(),Snackbar.LENGTH_LONG).show(); - + /** + * When outOfMemory exception throws then to make space, remove the last edited step + * from list and added the new operation in the end. + */ + bitmapsForUndo.remove(0); + bitmapsForUndo.add(mainBitmap.copy(mainBitmap.getConfig(),true)); } }
1
package org.fossasia.phimpme.editor.editimage; import android.app.Activity; import android.app.Dialog; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.ActivityInfo; import android.graphics.Bitmap; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v7.app.AlertDialog; import android.text.TextUtils; import android.util.DisplayMetrics; import android.view.MotionEvent; import android.view.View; import android.widget.Toast; import org.fossasia.phimpme.R; import org.fossasia.phimpme.editor.EditBaseActivity; import org.fossasia.phimpme.editor.editimage.fragment.AddTextFragment; import org.fossasia.phimpme.editor.editimage.fragment.CropFragment; import org.fossasia.phimpme.editor.editimage.fragment.MainMenuFragment; import org.fossasia.phimpme.editor.editimage.fragment.PaintFragment; import org.fossasia.phimpme.editor.editimage.fragment.RecyclerMenuFragment; import org.fossasia.phimpme.editor.editimage.fragment.RotateFragment; import org.fossasia.phimpme.editor.editimage.fragment.SliderFragment; import org.fossasia.phimpme.editor.editimage.fragment.StickersFragment; import org.fossasia.phimpme.editor.editimage.fragment.TwoItemFragment; import org.fossasia.phimpme.editor.editimage.utils.BitmapUtils; import org.fossasia.phimpme.editor.editimage.utils.FileUtil; import org.fossasia.phimpme.editor.editimage.view.CropImageView; import org.fossasia.phimpme.editor.editimage.view.CustomPaintView; import org.fossasia.phimpme.editor.editimage.view.RotateImageView; import org.fossasia.phimpme.editor.editimage.view.StickerView; import org.fossasia.phimpme.editor.editimage.view.TextStickerView; import org.fossasia.phimpme.editor.editimage.view.imagezoom.ImageViewTouch; import org.fossasia.phimpme.editor.editimage.view.imagezoom.ImageViewTouchBase; import org.fossasia.phimpme.leafpic.activities.SingleMediaActivity; import org.fossasia.phimpme.leafpic.util.ThemeHelper; import org.fossasia.phimpme.SharingActivity; import org.fossasia.phimpme.utilities.ActivitySwitchHelper; import java.io.File; import java.io.IOException; import java.util.ArrayList; /** * Called from SingleMediaActivity when the user selects the 'edit' option in the toolbar overflow menu. */ public class EditImageActivity extends EditBaseActivity implements View.OnClickListener, View.OnTouchListener { public static final String FILE_PATH = "file_path"; public static final String EXTRA_OUTPUT = "extra_output"; public static final String SAVE_FILE_PATH = "save_file_path"; public static final String IMAGE_IS_EDIT = "image_is_edit"; /** * Different edit modes. */ public static final int MODE_MAIN = 0; public static final int MODE_SLIDER = 1; public static final int MODE_FILTERS = 2; public static final int MODE_ENHANCE = 3; public static final int MODE_STICKER_TYPES = 4; public static final int MODE_STICKERS = 5; public static final int MODE_ADJUST = 6; public static final int MODE_CROP = 7; public static final int MODE_ROTATE = 8; public static final int MODE_WRITE = 9; public static final int MODE_TEXT = 10; public static final int MODE_PAINT = 11; public String filePath; public String saveFilePath; private int imageWidth, imageHeight; public static int mode; public static int effectType; /** * Number of times image has been edited. Indicates whether image has been edited or not. */ protected int mOpTimes = 0; protected boolean isBeenSaved = false; LoadImageTask mLoadImageTask; private EditImageActivity mContext; public Bitmap mainBitmap; private Bitmap originalBitmap; public ImageViewTouch mainImage; private View cancel,save,bef_aft,undo; public StickerView mStickerView;// Texture layers View public CropImageView mCropPanel;// Cut operation control public RotateImageView mRotatePanel;//Rotation operation controls public TextStickerView mTextStickerView;//Text display map View public CustomPaintView mPaintView;//drawing paint private SaveImageTask mSaveImageTask; private int requestCode; final String REVIEW_ACTION = "com.android.camera.action.REVIEW"; public ArrayList<Bitmap> bitmapsForUndo; public ThemeHelper themeHelper; public MainMenuFragment mainMenuFragment; public RecyclerMenuFragment filterFragment, enhanceFragment,stickerTypesFragment; public StickersFragment stickersFragment; public SliderFragment sliderFragment; public TwoItemFragment writeFragment,adjustFragment; public AddTextFragment addTextFragment; public PaintFragment paintFragment; public CropFragment cropFragment; public RotateFragment rotateFragment; private static String stickerType; /** * @param context * @param editImagePath * @param outputPath * @param requestCode */ public static void start(Activity context, final String editImagePath, final String outputPath, final int requestCode) { if (TextUtils.isEmpty(editImagePath)) { Toast.makeText(context, R.string.no_choose, Toast.LENGTH_SHORT).show(); return; } Intent it = new Intent(context, EditImageActivity.class); it.putExtra(EditImageActivity.FILE_PATH, editImagePath); it.putExtra(EditImageActivity.EXTRA_OUTPUT, outputPath); it.putExtra("requestCode",requestCode); context.startActivityForResult(it, requestCode); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getSupportActionBar() != null) getSupportActionBar().hide(); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); checkInitImageLoader(); setContentView(R.layout.activity_image_edit); initView(); getData(); requestCode = getIntent().getIntExtra("requestCode", 1); // setInitialFragments(); } /** * Called after onCreate() when the activity is first started. Loads the initial default fragments. */ private void setInitialFragments() { getSupportFragmentManager() .beginTransaction() .add(R.id.controls_container, mainMenuFragment) .commit(); getSupportFragmentManager() .beginTransaction() .add(R.id.preview_container, filterFragment) .commit(); } /** * Gets the image to be loaded from the intent and displays this image. */ private void getData() { filePath = getIntent().getStringExtra(FILE_PATH); saveFilePath = getIntent().getStringExtra(EXTRA_OUTPUT); loadImage(filePath); } /** * Called from onCreate(). * Initializes all view objects and fragments to be used. */ private void initView() { mContext = this; DisplayMetrics metrics = getResources().getDisplayMetrics(); imageWidth = metrics.widthPixels / 2; imageHeight = metrics.heightPixels / 2; mainImage = (ImageViewTouch) findViewById(R.id.main_image); cancel = findViewById(R.id.edit_cancel); save = findViewById(R.id.edit_save); bef_aft = findViewById(R.id.edit_befaft); undo = findViewById(R.id.edit_undo); bitmapsForUndo = new ArrayList<>(); cancel.setOnClickListener(this); save.setOnClickListener(this); undo.setOnClickListener(this); bef_aft.setOnTouchListener(this); mStickerView = (StickerView) findViewById(R.id.sticker_panel); mCropPanel = (CropImageView) findViewById(R.id.crop_panel); mRotatePanel = (RotateImageView) findViewById(R.id.rotate_panel); mTextStickerView = (TextStickerView) findViewById(R.id.text_sticker_panel); mPaintView = (CustomPaintView) findViewById(R.id.custom_paint_view); mode = MODE_FILTERS; mainMenuFragment = MainMenuFragment.newInstance(); sliderFragment = SliderFragment.newInstance(); filterFragment = RecyclerMenuFragment.newInstance(MODE_FILTERS); enhanceFragment = RecyclerMenuFragment.newInstance(MODE_ENHANCE); stickerTypesFragment = RecyclerMenuFragment.newInstance(MODE_STICKER_TYPES); adjustFragment = TwoItemFragment.newInstance(MODE_ADJUST); writeFragment = TwoItemFragment.newInstance(MODE_WRITE); addTextFragment = AddTextFragment.newInstance(); paintFragment = PaintFragment.newInstance(); cropFragment = CropFragment.newInstance(); rotateFragment = RotateFragment.newInstance(); } /** * Called when the edit_save button is pressed. Used to share the image on social media. */ private void shareImage() { Intent shareIntent = new Intent(EditImageActivity.this, SharingActivity.class); if(mOpTimes>0) { shareIntent.putExtra(EXTRA_OUTPUT, saveFilePath); shareIntent.putExtra(IMAGE_IS_EDIT, mOpTimes > 0); } else { shareIntent.putExtra(EXTRA_OUTPUT, filePath); } FileUtil.ablumUpdate(this, saveFilePath); setResult(RESULT_OK, shareIntent); startActivity(shareIntent); finish(); } /** * Get current editing mode. * @return the editing mode. */ public int getMode(){ return mode; } /** * Get the fragment corresponding to current editing mode. * @param index integer corresponding to editing mode. * @return Fragment of current editing mode. */ public Fragment getFragment(int index){ switch (index){ case MODE_MAIN: return mainMenuFragment; case MODE_SLIDER: sliderFragment = SliderFragment.newInstance(); return sliderFragment; case MODE_FILTERS: return filterFragment; case MODE_ENHANCE: return enhanceFragment; case MODE_STICKER_TYPES: return stickerTypesFragment; case MODE_STICKERS: stickersFragment = StickersFragment.newInstance(addStickerImages(stickerType)); return stickersFragment; case MODE_WRITE: return writeFragment; case MODE_ADJUST: return adjustFragment; case MODE_TEXT: return addTextFragment; case MODE_PAINT: return paintFragment; case MODE_CROP: return cropFragment; case MODE_ROTATE: return rotateFragment; } return mainMenuFragment; } /** * Called when a particular option in the preview_container is selected. It reassigns * the controls_container. It displays options and tools for the selected editing mode. * @param index integer corresponding to the current editing mode. */ public void changeBottomFragment(int index){ getSupportFragmentManager() .beginTransaction() .replace(R.id.controls_container, getFragment(index)) .commit(); setButtonsVisibility(); } /** * Handles the visibility of the 'save' button. */ private void setButtonsVisibility() { save.setVisibility(View.VISIBLE); bef_aft.setVisibility(View.VISIBLE); if (bitmapsForUndo.size() > 1) undo.setVisibility(View.VISIBLE); else undo.setVisibility(View.INVISIBLE); switch (mode){ case MODE_STICKERS: case MODE_CROP: case MODE_ROTATE: case MODE_TEXT: case MODE_PAINT: save.setVisibility(View.INVISIBLE); bef_aft.setVisibility(View.INVISIBLE); break; case MODE_SLIDER: save.setVisibility(View.INVISIBLE); break; } } public void setEffectType(int type, int mode){ effectType = 100 * mode + type; } /** * Is called when an editing mode is selected in the control_container. Reassigns the * preview_container according to the editing mode selected. * @param index integer representing selected editing mode */ public void changeMiddleFragment(int index){ getSupportFragmentManager() .beginTransaction() .replace(R.id.preview_container, getFragment(index)) .commit(); } public void changeMainBitmap(Bitmap newBit) { if (mainBitmap != null) { if (!mainBitmap.isRecycled()) { mainBitmap.recycle(); } } mainBitmap = newBit; mainImage.setImageBitmap(mainBitmap); mainImage.setDisplayType(ImageViewTouchBase.DisplayType.FIT_TO_SCREEN); addToUndoList(); setButtonsVisibility(); increaseOpTimes(); } private void addToUndoList() { try{ bitmapsForUndo.add(mainBitmap.copy(mainBitmap.getConfig(),true)); }catch (OutOfMemoryError error){ //Snackbar.make(getWindow().getDecorView().getRootView(),"Out of Memory. steps = " + bitmapsForUndo.size(),Snackbar.LENGTH_LONG).show(); } } private Bitmap getRecentFromUndoList(boolean fromLast){ if (bitmapsForUndo.size()>1) { if (fromLast) { Bitmap bitmap = bitmapsForUndo.get(bitmapsForUndo.size() - 2).copy(bitmapsForUndo.get(bitmapsForUndo.size() - 2).getConfig(), true); bitmapsForUndo.get(bitmapsForUndo.size() - 1).recycle(); bitmapsForUndo.remove(bitmapsForUndo.size() - 1); return bitmap; } else { Bitmap bitmap = bitmapsForUndo.get(0).copy(bitmapsForUndo.get(0).getConfig(), true); bitmapsForUndo.get(1).recycle(); bitmapsForUndo.remove(1); return bitmap; } }else return bitmapsForUndo.get(0); } private void onUndoPressed() { if (mainBitmap != null) { if (!mainBitmap.isRecycled()) { mainBitmap.recycle(); } } mainBitmap = getRecentFromUndoList(true); mainImage.setImageBitmap(mainBitmap); mainImage.setDisplayType(ImageViewTouchBase.DisplayType.FIT_TO_SCREEN); setButtonsVisibility(); } /** * Load the image from filepath into mainImage imageView. * @param filepath The image to be loaded. */ public void loadImage(String filepath) { if (mLoadImageTask != null) { mLoadImageTask.cancel(true); } mLoadImageTask = new LoadImageTask(); mLoadImageTask.execute(filepath); } protected void doSaveImage() { if (mOpTimes <= 0) shareImage(); if (mSaveImageTask != null) { mSaveImageTask.cancel(true); } mSaveImageTask = new SaveImageTask(); mSaveImageTask.execute(mainBitmap); } //Increment no. of times the image has been edited public void increaseOpTimes() { mOpTimes++; isBeenSaved = false; } public void resetOpTimes() { isBeenSaved = true; } /** * Allow exit only if image has not been modified or has been modified and saved. * @return true if can exit, false if cannot. */ public boolean canAutoExit() { return isBeenSaved || mOpTimes == 0; } protected void onSaveTaskDone() { Intent returnIntent = new Intent(); returnIntent.putExtra(FILE_PATH, filePath); returnIntent.putExtra(EXTRA_OUTPUT, saveFilePath); returnIntent.putExtra(IMAGE_IS_EDIT, mOpTimes > 0); FileUtil.ablumUpdate(this, saveFilePath); setResult(RESULT_OK, returnIntent); if(requestCode == 1 && mOpTimes<=0) { //Checks if this Activity was started by PhotoActivity Intent intent = new Intent(REVIEW_ACTION, Uri.fromFile(new File(filePath))); intent.setClass(getApplicationContext(), SingleMediaActivity.class); shareImage(); } else if(mOpTimes>0) { Intent intent = new Intent(REVIEW_ACTION, Uri.fromFile(new File(saveFilePath))); intent.setClass(getApplicationContext(), SingleMediaActivity.class); shareImage(); } } private ArrayList<String> addStickerImages(String folderPath) { ArrayList<String> pathList = new ArrayList<>(); try { String[] files = getAssets().list(folderPath); for (String name : files) { pathList.add(folderPath + File.separator + name); } } catch (IOException e) { e.printStackTrace(); } return pathList; } public void setStickerType(String stickerType) { EditImageActivity.stickerType = stickerType; } public String getStickerType(){ return stickerType; } @Override public boolean onTouch(View v, MotionEvent event) { if (R.id.edit_befaft == v.getId()){ if (MotionEvent.ACTION_DOWN == event.getAction()){ switch (mode){ case MODE_SLIDER: mainImage.setImageBitmap(mainBitmap); break; default: mainImage.setImageBitmap(originalBitmap); } }else if (MotionEvent.ACTION_UP == event.getAction()){ switch (mode){ case MODE_SLIDER: mainImage.setImageBitmap(sliderFragment.filterBit); break; default: mainImage.setImageBitmap(mainBitmap); } } } return true; } private final class LoadImageTask extends AsyncTask<String, Void, Bitmap> { @Override protected Bitmap doInBackground(String... params) { return BitmapUtils.getSampledBitmap(params[0], imageWidth, imageHeight); } @Override protected void onPostExecute(Bitmap result) { super.onPostExecute(result); if (mainBitmap != null) { mainBitmap.recycle(); mainBitmap = null; System.gc(); } mainBitmap = result; mainImage.setImageBitmap(result); mainImage.setDisplayType(ImageViewTouchBase.DisplayType.FIT_TO_SCREEN); originalBitmap = mainBitmap.copy(mainBitmap.getConfig(),true); addToUndoList(); setInitialFragments(); } } private final class SaveImageTask extends AsyncTask<Bitmap, Void, Boolean> { private Dialog dialog; @Override protected Boolean doInBackground(Bitmap... params) { if (TextUtils.isEmpty(saveFilePath)) return false; return BitmapUtils.saveBitmap(params[0], saveFilePath); } @Override protected void onCancelled() { super.onCancelled(); dialog.dismiss(); } @Override protected void onCancelled(Boolean result) { super.onCancelled(result); dialog.dismiss(); } @Override protected void onPreExecute() { super.onPreExecute(); dialog = getLoadingDialog(mContext, R.string.saving_image, false); dialog.show(); } @Override protected void onPostExecute(Boolean result) { super.onPostExecute(result); dialog.dismiss(); if (result) { resetOpTimes(); onSaveTaskDone(); } else { Toast.makeText(mContext, R.string.save_error, Toast.LENGTH_SHORT).show(); } } } @Override public void onBackPressed() { switch (mode){ case MODE_SLIDER: sliderFragment.backToMain(); return; case MODE_STICKERS: stickersFragment.backToMain(); return; case MODE_CROP: cropFragment.backToMain(); return; case MODE_ROTATE: rotateFragment.backToMain(); return; case MODE_TEXT: addTextFragment.backToMain(); return; case MODE_PAINT: paintFragment.backToMain(); return; } //if the image has not been edited or has been edited and saved. if (canAutoExit()) { finish(); } else { AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(this); alertDialogBuilder.setMessage(R.string.exit_without_save) .setCancelable(false).setPositiveButton(R.string.confirm, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { mContext.finish(); } }).setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); AlertDialog alertDialog = alertDialogBuilder.create(); alertDialog.show(); } } @Override protected void onDestroy() { super.onDestroy(); if (originalBitmap != null && !originalBitmap.isRecycled()) { originalBitmap.recycle(); originalBitmap = null; } if (mLoadImageTask != null) { mLoadImageTask.cancel(true); } if (mSaveImageTask != null) { mSaveImageTask.cancel(true); } } @Override protected void onResume() { super.onResume(); ActivitySwitchHelper.setContext(this); } @Override public void onClick(View v) { switch (v.getId()){ case R.id.edit_save: if (mOpTimes == 0) {//Does not modify the image shareImage(); } else { doSaveImage(); } break; case R.id.edit_cancel: onBackPressed(); break; case R.id.edit_undo: onUndoPressed(); } } }
1
11,259
remove the bitmap of index 1 from the list not the 0th one. because when we keep on undoing, it would be better if we end up with the original image rather than some randomly edited image. I am not sure whether only just removing bitmap from the list would clear memory. I think you should call bitmap.recycle before removing that bitmap from the list.
fossasia-phimpme-android
java
@@ -1,4 +1,4 @@ -// Copyright 2019 Google Inc. All Rights Reserved. +// Copyright 2020 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License.
1
// Copyright 2019 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by MockGen. DO NOT EDIT. // Source: github.com/GoogleCloudPlatform/compute-image-tools/cli_tools/common/domain (interfaces: StorageClientInterface) // Package mocks is a generated GoMock package. package mocks import ( storage "cloud.google.com/go/storage" domain "github.com/GoogleCloudPlatform/compute-image-tools/cli_tools/common/domain" gomock "github.com/golang/mock/gomock" io "io" reflect "reflect" ) // MockStorageClientInterface is a mock of StorageClientInterface interface type MockStorageClientInterface struct { ctrl *gomock.Controller recorder *MockStorageClientInterfaceMockRecorder } // MockStorageClientInterfaceMockRecorder is the mock recorder for MockStorageClientInterface type MockStorageClientInterfaceMockRecorder struct { mock *MockStorageClientInterface } // NewMockStorageClientInterface creates a new mock instance func NewMockStorageClientInterface(ctrl *gomock.Controller) *MockStorageClientInterface { mock := &MockStorageClientInterface{ctrl: ctrl} mock.recorder = &MockStorageClientInterfaceMockRecorder{mock} return mock } // EXPECT returns an object that allows the caller to indicate expected use func (m *MockStorageClientInterface) EXPECT() *MockStorageClientInterfaceMockRecorder { return m.recorder } // Buckets mocks base method func (m *MockStorageClientInterface) Buckets(arg0 string) *storage.BucketIterator { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Buckets", arg0) ret0, _ := ret[0].(*storage.BucketIterator) return ret0 } // Buckets indicates an expected call of Buckets func (mr *MockStorageClientInterfaceMockRecorder) Buckets(arg0 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Buckets", reflect.TypeOf((*MockStorageClientInterface)(nil).Buckets), arg0) } // Close mocks base method func (m *MockStorageClientInterface) Close() error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Close") ret0, _ := ret[0].(error) return ret0 } // Close indicates an expected call of Close func (mr *MockStorageClientInterfaceMockRecorder) Close() *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockStorageClientInterface)(nil).Close)) } // CreateBucket mocks base method func (m *MockStorageClientInterface) CreateBucket(arg0, arg1 string, arg2 *storage.BucketAttrs) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "CreateBucket", arg0, arg1, arg2) ret0, _ := ret[0].(error) return ret0 } // CreateBucket indicates an expected call of CreateBucket func (mr *MockStorageClientInterfaceMockRecorder) CreateBucket(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateBucket", reflect.TypeOf((*MockStorageClientInterface)(nil).CreateBucket), arg0, arg1, arg2) } // DeleteGcsPath mocks base method func (m *MockStorageClientInterface) DeleteGcsPath(arg0 string) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "DeleteGcsPath", arg0) ret0, _ := ret[0].(error) return ret0 } // DeleteGcsPath indicates an expected call of DeleteGcsPath func (mr *MockStorageClientInterfaceMockRecorder) DeleteGcsPath(arg0 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteGcsPath", reflect.TypeOf((*MockStorageClientInterface)(nil).DeleteGcsPath), arg0) } // DeleteObject mocks base method func (m *MockStorageClientInterface) DeleteObject(arg0, arg1 string) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "DeleteObject", arg0, arg1) ret0, _ := ret[0].(error) return ret0 } // DeleteObject indicates an expected call of DeleteObject func (mr *MockStorageClientInterfaceMockRecorder) DeleteObject(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteObject", reflect.TypeOf((*MockStorageClientInterface)(nil).DeleteObject), arg0, arg1) } // FindGcsFile mocks base method func (m *MockStorageClientInterface) FindGcsFile(arg0, arg1 string) (*storage.ObjectHandle, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "FindGcsFile", arg0, arg1) ret0, _ := ret[0].(*storage.ObjectHandle) ret1, _ := ret[1].(error) return ret0, ret1 } // FindGcsFile indicates an expected call of FindGcsFile func (mr *MockStorageClientInterfaceMockRecorder) FindGcsFile(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindGcsFile", reflect.TypeOf((*MockStorageClientInterface)(nil).FindGcsFile), arg0, arg1) } // FindGcsFileDepthLimited mocks base method func (m *MockStorageClientInterface) FindGcsFileDepthLimited(arg0, arg1 string, arg2 int) (*storage.ObjectHandle, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "FindGcsFileDepthLimited", arg0, arg1, arg2) ret0, _ := ret[0].(*storage.ObjectHandle) ret1, _ := ret[1].(error) return ret0, ret1 } // FindGcsFileDepthLimited indicates an expected call of FindGcsFileDepthLimited func (mr *MockStorageClientInterfaceMockRecorder) FindGcsFileDepthLimited(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindGcsFileDepthLimited", reflect.TypeOf((*MockStorageClientInterface)(nil).FindGcsFileDepthLimited), arg0, arg1, arg2) } // GetBucket mocks base method func (m *MockStorageClientInterface) GetBucket(arg0 string) *storage.BucketHandle { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetBucket", arg0) ret0, _ := ret[0].(*storage.BucketHandle) return ret0 } // GetBucket indicates an expected call of GetBucket func (mr *MockStorageClientInterfaceMockRecorder) GetBucket(arg0 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBucket", reflect.TypeOf((*MockStorageClientInterface)(nil).GetBucket), arg0) } // GetBucketAttrs mocks base method func (m *MockStorageClientInterface) GetBucketAttrs(arg0 string) (*storage.BucketAttrs, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetBucketAttrs", arg0) ret0, _ := ret[0].(*storage.BucketAttrs) ret1, _ := ret[1].(error) return ret0, ret1 } // GetBucketAttrs indicates an expected call of GetBucketAttrs func (mr *MockStorageClientInterfaceMockRecorder) GetBucketAttrs(arg0 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBucketAttrs", reflect.TypeOf((*MockStorageClientInterface)(nil).GetBucketAttrs), arg0) } // GetGcsFileContent mocks base method func (m *MockStorageClientInterface) GetGcsFileContent(arg0 *storage.ObjectHandle) ([]byte, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetGcsFileContent", arg0) ret0, _ := ret[0].([]byte) ret1, _ := ret[1].(error) return ret0, ret1 } // GetGcsFileContent indicates an expected call of GetGcsFileContent func (mr *MockStorageClientInterfaceMockRecorder) GetGcsFileContent(arg0 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetGcsFileContent", reflect.TypeOf((*MockStorageClientInterface)(nil).GetGcsFileContent), arg0) } // GetObjectReader mocks base method func (m *MockStorageClientInterface) GetObjectReader(arg0, arg1 string) (io.ReadCloser, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetObjectReader", arg0, arg1) ret0, _ := ret[0].(io.ReadCloser) ret1, _ := ret[1].(error) return ret0, ret1 } // GetObjectReader indicates an expected call of GetObjectReader func (mr *MockStorageClientInterfaceMockRecorder) GetObjectReader(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetObjectReader", reflect.TypeOf((*MockStorageClientInterface)(nil).GetObjectReader), arg0, arg1) } // GetObjects mocks base method func (m *MockStorageClientInterface) GetObjects(arg0, arg1 string) domain.ObjectIteratorInterface { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetObjects", arg0, arg1) ret0, _ := ret[0].(domain.ObjectIteratorInterface) return ret0 } // GetObjects indicates an expected call of GetObjects func (mr *MockStorageClientInterfaceMockRecorder) GetObjects(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetObjects", reflect.TypeOf((*MockStorageClientInterface)(nil).GetObjects), arg0, arg1) } // WriteToGCS mocks base method func (m *MockStorageClientInterface) WriteToGCS(arg0, arg1 string, arg2 io.Reader) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "WriteToGCS", arg0, arg1, arg2) ret0, _ := ret[0].(error) return ret0 } // WriteToGCS indicates an expected call of WriteToGCS func (mr *MockStorageClientInterfaceMockRecorder) WriteToGCS(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WriteToGCS", reflect.TypeOf((*MockStorageClientInterface)(nil).WriteToGCS), arg0, arg1, arg2) }
1
11,074
this is a existing file so let's keep 2019
GoogleCloudPlatform-compute-image-tools
go
@@ -42,7 +42,7 @@ public class RemoteNetworkConnection implements NetworkConnection { @Override public ConnectionType setNetworkConnection( ConnectionType type) { - Map<String, ConnectionType> mode = ImmutableMap.of("type", type); + Map<String, Integer> mode = ImmutableMap.of("type", type.getBitMask()); return new ConnectionType(((Number) executeMethod.execute(DriverCommand.SET_NETWORK_CONNECTION, ImmutableMap .of("parameters", mode)))
1
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.remote.mobile; import com.google.common.collect.ImmutableMap; import org.openqa.selenium.mobile.NetworkConnection; import org.openqa.selenium.remote.DriverCommand; import org.openqa.selenium.remote.ExecuteMethod; import java.util.Map; public class RemoteNetworkConnection implements NetworkConnection { private final ExecuteMethod executeMethod; public RemoteNetworkConnection(ExecuteMethod executeMethod) { this.executeMethod = executeMethod; } @Override public ConnectionType getNetworkConnection() { return new ConnectionType(((Number) executeMethod.execute(DriverCommand.GET_NETWORK_CONNECTION, null)).intValue()); } @Override public ConnectionType setNetworkConnection( ConnectionType type) { Map<String, ConnectionType> mode = ImmutableMap.of("type", type); return new ConnectionType(((Number) executeMethod.execute(DriverCommand.SET_NETWORK_CONNECTION, ImmutableMap .of("parameters", mode))) .intValue()); } }
1
13,854
can you change this instead to just `type.toString()` and then you wouldn't have to expose the getBitMask in the enum. (Alternatively you could have used `type.hashCode()` but that doesn't feel as nice)
SeleniumHQ-selenium
java
@@ -257,7 +257,7 @@ namespace Nethermind.AuRa.Test.Transactions TransactionPermissionContractVersions = new LruCache<Keccak, UInt256>(PermissionBasedTxFilter.Cache.MaxCacheSize, nameof(TransactionPermissionContract)); - var trieStore = new ReadOnlyTrieStore(new TrieStore(DbProvider.StateDb, LimboLogs.Instance)); + var trieStore = new TrieStore(DbProvider.StateDb, LimboLogs.Instance).AsReadOnly(DbProvider.StateDb); IReadOnlyTxProcessorSource txProcessorSource = new ReadOnlyTxProcessingEnv( DbProvider, trieStore,
1
// Copyright (c) 2021 Demerzel Solutions Limited // This file is part of the Nethermind library. // // The Nethermind library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The Nethermind library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the Nethermind. If not, see <http://www.gnu.org/licenses/>. // using System; using System.Collections.Generic; using System.Globalization; using System.Threading.Tasks; using FluentAssertions; using Nethermind.Abi; using Nethermind.AuRa.Test.Contract; using Nethermind.Blockchain.Processing; using Nethermind.Blockchain.Rewards; using Nethermind.Blockchain.Validators; using Nethermind.Consensus.AuRa; using Nethermind.Consensus.AuRa.Contracts; using Nethermind.Consensus.AuRa.Transactions; using Nethermind.Core; using Nethermind.Core.Caching; using Nethermind.Core.Crypto; using Nethermind.Core.Extensions; using Nethermind.Core.Test.Builders; using Nethermind.Crypto; using Nethermind.Int256; using Nethermind.Evm; using Nethermind.Logging; using Nethermind.Specs.ChainSpecStyle; using Nethermind.State; using Nethermind.Trie.Pruning; using NSubstitute; using NUnit.Framework; namespace Nethermind.AuRa.Test.Transactions { public class TxPermissionFilterTest { private const string ContractAddress = "0xAB5b100cf7C8deFB3c8f3C48474223997A50fB13"; private static readonly Address _contractAddress = new Address(ContractAddress); private static readonly ITransactionPermissionContract.TxPermissions[] TxTypes = new[] { ITransactionPermissionContract.TxPermissions.Basic, ITransactionPermissionContract.TxPermissions.Call, ITransactionPermissionContract.TxPermissions.Create, }; public static IEnumerable<TestCaseData> V1Tests() { IList<Test> tests = new List<Test>() { new Test() {SenderKey = GetPrivateKey(1), ContractPermissions = ITransactionPermissionContract.TxPermissions.All}, new Test() {SenderKey = GetPrivateKey(2), ContractPermissions = ITransactionPermissionContract.TxPermissions.Basic | ITransactionPermissionContract.TxPermissions.Call}, new Test() {SenderKey = GetPrivateKey(3), ContractPermissions = ITransactionPermissionContract.TxPermissions.Basic, To = _contractAddress}, new Test() {SenderKey = GetPrivateKey(4), ContractPermissions = ITransactionPermissionContract.TxPermissions.None}, }; return GetTestCases(tests, nameof(V1), CreateV1Transaction); } private static TransactionBuilder<Transaction> CreateV1Transaction(Test test, ITransactionPermissionContract.TxPermissions txType) { var transactionBuilder = Build.A.Transaction.WithData(null).WithSenderAddress(test.Sender); switch (txType) { case ITransactionPermissionContract.TxPermissions.Call: transactionBuilder.WithData(Bytes.Zero32); transactionBuilder.To(test.To); break; case ITransactionPermissionContract.TxPermissions.Create: transactionBuilder.WithCode(Bytes.Zero32); break; } return transactionBuilder; } // Contract code: https://gist.github.com/arkpar/38a87cb50165b7e683585eec71acb05a [TestCaseSource(nameof(V1Tests))] public async Task<(bool IsAllowed, bool Cache)> V1(Func<Task<TestTxPermissionsBlockchain>> chainFactory, Transaction tx) => await ChainTest(chainFactory, tx, 1); public static IEnumerable<TestCaseData> V2Tests() { IList<Test> tests = new List<Test>() { new Test() {SenderKey = GetPrivateKey(1), ContractPermissions = ITransactionPermissionContract.TxPermissions.All, Cache = true}, new Test() {SenderKey = GetPrivateKey(2), ContractPermissions = ITransactionPermissionContract.TxPermissions.Basic | ITransactionPermissionContract.TxPermissions.Call, Cache = true}, new Test() {SenderKey = GetPrivateKey(3), ContractPermissions = ITransactionPermissionContract.TxPermissions.Basic, Cache = true, To = _contractAddress}, new Test() {SenderKey = GetPrivateKey(4), ContractPermissions = ITransactionPermissionContract.TxPermissions.None, Cache = true}, new Test() {SenderKey = GetPrivateKey(5), ContractPermissions = ITransactionPermissionContract.TxPermissions.None, Cache = true}, new Test() {SenderKey = GetPrivateKey(5), ContractPermissions = ITransactionPermissionContract.TxPermissions.All, Cache = false, Value = 0}, new Test() {SenderKey = GetPrivateKey(6), ContractPermissions = ITransactionPermissionContract.TxPermissions.None, Cache = true}, new Test() {SenderKey = GetPrivateKey(6), ContractPermissions = ITransactionPermissionContract.TxPermissions.Basic, Cache = false, ToKey = GetPrivateKey(7)}, new Test() {SenderKey = GetPrivateKey(7), ContractPermissions = ITransactionPermissionContract.TxPermissions.None, Cache = true}, new Test() {SenderKey = GetPrivateKey(7), ContractPermissions = ITransactionPermissionContract.TxPermissions.None, Cache = true, Value = 0}, new Test() {SenderKey = GetPrivateKey(7), ContractPermissions = ITransactionPermissionContract.TxPermissions.None, Cache = true, ToKey = GetPrivateKey(6)}, new Test() {SenderKey = GetPrivateKey(7), ContractPermissions = ITransactionPermissionContract.TxPermissions.Basic | ITransactionPermissionContract.TxPermissions.Call, Cache = false, ToKey = GetPrivateKey(6), Value = 0}, }; return GetTestCases(tests, nameof(V2), CreateV2Transaction); } private static TransactionBuilder<Transaction> CreateV2Transaction(Test test, ITransactionPermissionContract.TxPermissions txType) { var transactionBuilder = CreateV1Transaction(test, txType); transactionBuilder.To(test.To); switch (txType) { case ITransactionPermissionContract.TxPermissions.Basic: { if (test.To == _contractAddress) { transactionBuilder.To(Address.Zero); } break; } case ITransactionPermissionContract.TxPermissions.Call: if (test.Number == 6) { transactionBuilder.To(_contractAddress); test.Cache = true; } break; case ITransactionPermissionContract.TxPermissions.Create: if (test.Number == 6 || test.Number == 7) { test.Cache = true; } transactionBuilder.To(null); break; } transactionBuilder.WithValue(test.Value); return transactionBuilder; } // Contract code: https://gist.github.com/VladLupashevskyi/84f18eabb1e4afadf572cf92af3e7e7f [TestCaseSource(nameof(V2Tests))] public async Task<(bool IsAllowed, bool Cache)> V2(Func<Task<TestTxPermissionsBlockchain>> chainFactory, Transaction tx) => await ChainTest(chainFactory, tx, 2); public static IEnumerable<TestCaseData> V3Tests() { IList<Test> tests = new List<Test>() { new Test() {SenderKey = GetPrivateKey(1), ContractPermissions = ITransactionPermissionContract.TxPermissions.None, Cache = false}, new Test() {SenderKey = GetPrivateKey(1), ContractPermissions = ITransactionPermissionContract.TxPermissions.All, Cache = false, GasPrice = 1}, new Test() {SenderKey = GetPrivateKey(1), ContractPermissions = ITransactionPermissionContract.TxPermissions.All, Cache = false, Data = new byte[]{0, 1}}, new Test() {SenderKey = GetPrivateKey(1), ContractPermissions = ITransactionPermissionContract.TxPermissions.All, Cache = false, GasPrice = 5, Data = new byte[]{0, 2, 3}}, }; return GetTestCases(tests, nameof(V3), CreateV3Transaction); } private static TransactionBuilder<Transaction> CreateV3Transaction(Test test, ITransactionPermissionContract.TxPermissions txType) { var transactionBuilder = CreateV2Transaction(test, txType); transactionBuilder.WithData(test.Data); transactionBuilder.WithGasPrice(test.GasPrice); return transactionBuilder; } [TestCaseSource(nameof(V3Tests))] public async Task<(bool IsAllowed, bool Cache)> V3(Func<Task<TestTxPermissionsBlockchain>> chainFactory, Transaction tx) => await ChainTest(chainFactory, tx, 3); private static async Task<(bool IsAllowed, bool Cache)> ChainTest(Func<Task<TestTxPermissionsBlockchain>> chainFactory, Transaction tx, UInt256 version) { using var chain = await chainFactory(); var head = chain.BlockTree.Head; var isAllowed = chain.PermissionBasedTxFilter.IsAllowed(tx, head.Header); chain.TransactionPermissionContractVersions.Get(head.Header.Hash).Should().Be(version); return (isAllowed.Allowed, chain.TxPermissionFilterCache.Permissions.Contains((head.Hash, tx.SenderAddress))); } private static IEnumerable<TestCaseData> GetTestCases(IEnumerable<Test> tests, string testsName, Func<Test, ITransactionPermissionContract.TxPermissions, TransactionBuilder<Transaction>> transactionBuilder) { TestCaseData GetTestCase( Func<Task<TestTxPermissionsBlockchain>> chainFactory, Test test, ITransactionPermissionContract.TxPermissions txType) { var result = (test.ContractPermissions & txType) != ITransactionPermissionContract.TxPermissions.None; return new TestCaseData(chainFactory, transactionBuilder(test, txType).TestObject) .SetName($"{testsName} - {test.Number}: Expected {test.ContractPermissions}, check {txType} is {result}") .SetCategory(testsName + "Tests") .Returns((result, test.Cache ?? true)); } var chainTask = TestContractBlockchain.ForTest<TestTxPermissionsBlockchain, TxPermissionFilterTest>(testsName); Func<Task<TestTxPermissionsBlockchain>> testFactory = async () => { var chain = await chainTask; chain.TxPermissionFilterCache.Permissions.Clear(); chain.TransactionPermissionContractVersions.Clear(); return chain; }; foreach (var test in tests) { foreach (var txType in TxTypes) { yield return GetTestCase(testFactory, test, txType); } } } private static PrivateKey GetPrivateKey(int key) => new PrivateKey(key.ToString("X64")); [TestCase(1, ExpectedResult = true)] [TestCase(3, ExpectedResult = true)] public bool allows_transactions_before_transitions(long blockNumber) { var transactionPermissionContract = new VersionedTransactionPermissionContract(new AbiEncoder(), TestItem.AddressA, 5, Substitute.For<IReadOnlyTxProcessorSource>(), new LruCache<Keccak, UInt256>(100, "TestCache"), LimboLogs.Instance); var filter = new PermissionBasedTxFilter(transactionPermissionContract, new PermissionBasedTxFilter.Cache(), LimboLogs.Instance); return filter.IsAllowed(Build.A.Transaction.WithSenderAddress(TestItem.AddressB).TestObject, Build.A.BlockHeader.WithNumber(blockNumber).TestObject).Allowed; } public class TestTxPermissionsBlockchain : TestContractBlockchain { public PermissionBasedTxFilter PermissionBasedTxFilter { get; private set; } public PermissionBasedTxFilter.Cache TxPermissionFilterCache { get; private set; } public ICache<Keccak, UInt256> TransactionPermissionContractVersions { get; private set; } protected override BlockProcessor CreateBlockProcessor() { var validator = new AuRaParameters.Validator() { Addresses = TestItem.Addresses, ValidatorType = AuRaParameters.ValidatorType.List }; TransactionPermissionContractVersions = new LruCache<Keccak, UInt256>(PermissionBasedTxFilter.Cache.MaxCacheSize, nameof(TransactionPermissionContract)); var trieStore = new ReadOnlyTrieStore(new TrieStore(DbProvider.StateDb, LimboLogs.Instance)); IReadOnlyTxProcessorSource txProcessorSource = new ReadOnlyTxProcessingEnv( DbProvider, trieStore, BlockTree, SpecProvider, LimboLogs.Instance); var transactionPermissionContract = new VersionedTransactionPermissionContract(new AbiEncoder(), _contractAddress, 1, new ReadOnlyTxProcessingEnv(DbProvider, trieStore, BlockTree, SpecProvider, LimboLogs.Instance), TransactionPermissionContractVersions, LimboLogs.Instance); TxPermissionFilterCache = new PermissionBasedTxFilter.Cache(); PermissionBasedTxFilter = new PermissionBasedTxFilter(transactionPermissionContract, TxPermissionFilterCache, LimboLogs.Instance); return new AuRaBlockProcessor( SpecProvider, Always.Valid, new RewardCalculator(SpecProvider), TxProcessor, State, Storage, TxPool, ReceiptStorage, LimboLogs.Instance, BlockTree, PermissionBasedTxFilter); } protected override Task AddBlocksOnStart() => Task.CompletedTask; } public class Test { private Address _to; public PrivateKey SenderKey { get; set; } public PrivateKey ToKey { get; set; } public UInt256 Value { get; set; } = 1; public byte[] Data { get; set; } = Bytes.Zero32; public UInt256 GasPrice { get; set; } = 0; public Address Sender => SenderKey.Address; public Address To { get => _to ?? ToKey?.Address ?? Address.Zero; set => _to = value; } public ITransactionPermissionContract.TxPermissions ContractPermissions { get; set; } public bool? Cache { get; set; } public int Number => int.Parse(SenderKey.KeyBytes.ToHexString(), NumberStyles.HexNumber); } } }
1
25,090
AsReadOnly() would be better here
NethermindEth-nethermind
.cs
@@ -427,9 +427,12 @@ class Collection { * @param {function(collection, changes)} callback - A function to be called when changes occur. * The callback function is called with two arguments: * - `collection`: the collection instance that changed, - * - `changes`: a dictionary with keys `insertions`, `modifications` and `deletions`, - * each containing a list of indices that were inserted, updated or deleted respectively. If - * partial sync is enabled, an additional key `partial_sync` is added. + * - `changes`: a dictionary with keys `insertions`, `newModifications`, `oldModifications` + * and `deletions`, each containing a list of indices in the collection that were + * inserted, updated or deleted respectively. `deletions` and `oldModifications` are + * indices into the collection before the change happened, while `insertions` and + * `newModifications` are indices into the new version of the collection. If partial sync + * is enabled, an additional key `partial_sync` is added. * - `changes.partial_sync`: `error` indicates if an error has occurred, `old_state` is the previous * state, and `new_state` is the current state. * @throws {Error} If `callback` is not a function.
1
//////////////////////////////////////////////////////////////////////////// // // Copyright 2016 Realm Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////// /** * Abstract base class containing methods shared by {@link Realm.List} and {@link Realm.Results}. * * A Realm Collection is a homogenous sequence of values of any of the types * that can be stored as properties of Realm objects. A collection can be * accessed in any of the ways that a normal Javascript Array can, including * subscripting, enumerating with `for-of` and so on. * * A Collection always reflect the current state of the Realm. The one exception to this is * when using `for...in` or `for...of` enumeration, which will always enumerate over the * objects which matched the query when the enumeration is begun, even if some of them are * deleted or modified to be excluded by the filter during the enumeration. * * @memberof Realm * @since 0.11.0 */ class Collection { /** * The number of values in the collection. * @type {number} * @readonly */ get length() {} /** * The {@linkplain Realm~PropertyType type} of values in the collection. * @type {string} * @readonly * @since 2.0.0 */ get type() {} /** * Whether `null` is a valid value for the collection. * @type {boolean} * @readonly * @since 2.0.0 */ get optional() {} /** * Checks if this collection has not been deleted and is part of a valid Realm. * @returns {boolean} indicating if the collection can be safely accessed. * @since 0.14.0 */ isValid() {} /** * Checks if this collection is empty. * @returns {boolean} indicating if the collection is empty or not. * @since 2.7.0 */ isEmpty() {} /** * Returns new _Results_ that represent this collection being filtered by the provided query. * * @param {string} query - Query used to filter objects from the collection. * @param {...any} [arg] - Each subsequent argument is used by the placeholders * (e.g. `$0`, `$1`, `$2`, …) in the query. * @throws {Error} If the query or any other argument passed into this method is invalid. * @returns {Realm.Results<T>} filtered according to the provided query. * * This is currently only supported for collections of Realm Objects. * * See {@tutorial query-language} for details about the query language. * @example * let merlots = wines.filtered('variety == "Merlot" && vintage <= $0', maxYear); */ filtered(query, ...arg) {} /** * Returns new _Results_ that represent a sorted view of this collection. * * A collection of Realm Objects can be sorted on one or more properties of * those objects, or of properties of objects linked to by those objects. * To sort by a single property, simply pass the name of that property to * `sorted()`, optionally followed by a boolean indicating if the sort should be reversed. * For more than one property, you must pass an array of * {@linkplain Realm.Collection~SortDescriptor sort descriptors} which list * which properties to sort on. * * Collections of other types sort on the values themselves rather than * properties of the values, and so no property name or sort descriptors * should be supplied. * * @example * // Sort wines by age * wines.sorted('age') * @example * // Sort wines by price in descending order, then sort ties by age in * // ascending order * wines.sorted([['price', false], ['age']) * @example * // Sort a list of numbers in ascending order * let sortedPrices = wine.pricesSeen.sort() * @example * // Sort people by how expensive their favorite wine is * people.sort("favoriteWine.price") * * @param {string|Realm.Collection~SortDescriptor[]} [descriptor] - The property name(s) to sort the collection on. * @param {boolean} [reverse=false] - Sort in descending order rather than ascended. * May not be supplied if `descriptor` is an array of sort descriptors. * @throws {Error} If a specified property does not exist. * @returns {Realm.Results<T>} sorted according to the arguments passed in. */ sorted(descriptor, reverse) {} /** * Create a frozen snapshot of the collection. * * Values added to and removed from the original collection will not be * reflected in the _Results_ returned by this method, including if the * values of properties are changed to make them match or not match any * filters applied. * * This is **not** a _deep_ snapshot. Realm objects contained in this * snapshot will continue to update as changes are made to them, and if * they are deleted from the Realm they will be replaced by `null` at the * respective indices. * * @returns {Realm.Results<T>} which will **not** live update. */ snapshot() {} /** * Subscribe to a subset of objects matching the query of the collection. The Realm will only be * partially synced. Not all queries are currently supported. Once subscribed, it is highly recommended * to add a listener. * * @example * let wines = realm.objects('Wine').filtered('vintage <= $0', maxYear); * let subscription = wines.subscribe(); * wines.addListener((collection, changes) => { * if (subscription.state === Realm.Sync.SubscriptionState.Complete) { * // update UI * } * }); * * @param {string} subscriptionName - an optional name for the subscription. * @returns {Realm.Sync.Subscription} - the Realm.Sync.Subscription instance. * @throws {Error} if the partial sync is not enabled in the configuration or the query is not supported by Realm Object Server. * @since 2.3.0 */ subscribe(subscriptionName) {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/entries Array.prototype.entries} * @returns {Realm.Collection~Iterator<T>} of each `[index, object]` pair in the collection * @since 0.11.0 */ entries() {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/keys Array.prototype.keys} * @returns {Realm.Collection~Iterator<T>} of each index in the collection * @since 0.11.0 */ keys() {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/values Array.prototype.values} * @returns {Realm.Collection~Iterator<T>} of each Realm object in the collection * @since 0.11.0 */ values() {} /** * This is the same method as the {@link Realm.Collection#values values()} method. * Its presence makes collections _iterable_, thus able to be used with ES6 * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for...of `for-of`} * loops, * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Spread_operator `...`} * spread operators, and more. * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol/iterator Symbol.iterator} * and the {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#iterable iterable protocol} * @returns {Realm.Collection~Iterator<T>} of each Realm object in the collection * @since 0.11.0 * @example * for (let object of collection) { * // do something with each object * } */ [Symbol.iterator]() {} /** * Joins all objects in the collection into a string. * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/join Array.prototype.join} * @param {string} [separator=","] - A string to separate the return values of the * `toString()` method being called on each object in the collection. * @returns {string} * @since 0.11.0 */ join(separator) {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/slice Array.prototype.slice} * @param {number} [start=0] - The start index. If negative, then the start index will be * counted from the end of the collection. * @param {number} [end] - The end index. The objects up to, but not including, the end * index will be include in the return value. If negative, then the end index will be * counted from the end of the collection. If omitted, then all objects from the start * index will be included in the return value. * @returns {T[]} containing the objects from the start index up to, but not * including, the end index. * @since 0.11.0 */ slice(start, end) {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/find Array.prototype.find} * @param {function} callback - Function to execute on each object in the collection. * If this function returns `true`, then that object will be returned by this method. * This function takes three arguments: * - `object` – The current object being processed in the collection. * - `index` – The index of the object being processed in the collection. * - `collection` – The collection itself. * @param {object} [thisArg] - The value of `this` when `callback` is called. * @returns {T|undefined} if the `callback` did not return `true` for any object * in the collection. * @since 0.11.0 */ find(callback, thisArg) {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/findIndex Array.prototype.findIndex} * @param {function} callback - Function to execute on each object in the collection. * If this function returns `true`, then the index of that object will be returned * by this method. This function takes three arguments: * - `object` – The current object being processed in the collection. * - `index` – The index of the object being processed in the collection. * - `collection` – The collection itself. * @param {object} [thisArg] - The value of `this` when `callback` is called. * @returns {number} representing the index where the `callback` returned `true`, or `-1` * if `true` was never returned. * @since 0.11.0 */ findIndex(callback, thisArg) {} /** Finds the index of the given object in the collection. * @param {T} object - The value to search for in the collection. * @throws {Error} If the argument is a {@link Realm.Object} that does not * belong to the same Realm as the collection. * @returns {number} representing the index where the value was found, or * `-1` if not in collection. * @since 1.8.2 */ indexOf(object) {} /** * Returns the minimum value of the values in the collection or of the * given property among all the objects in the collection, or `undefined` * if the collection is empty. * * Only supported for int, float, double and date properties. `null` values * are ignored entirely by this method and will not be returned. * * @param {string} [property] - For a collection of objects, the property to take the minimum of. * @throws {Error} If no property with the name exists or if property is not numeric/date. * @returns {number} the minimum value. * @since 1.12.1 */ min(property) {} /** * Returns the maximum value of the values in the collection or of the * given property among all the objects in the collection, or `undefined` * if the collection is empty. * * Only supported for int, float, double and date properties. `null` values * are ignored entirely by this method and will not be returned. * * @param {string} [property] - For a collection of objects, the property to take the maximum of. * @throws {Error} If no property with the name exists or if property is not numeric/date. * @returns {number} the maximum value. * @since 1.12.1 */ max(property) {} /** * Computes the sum of the values in the collection or of the given * property among all the objects in the collection, or 0 if the collection * is empty. * * Only supported for int, float and double properties. `null` values are * ignored entirely by this method. * @param {string} [property] - For a collection of objects, the property to take the sum of. * @throws {Error} If no property with the name exists or if property is not numeric. * @returns {number} the sum. * @since 1.12.1 */ sum(property) {} /** * Computes the average of the values in the collection or of the given * property among all the objects in the collection, or `undefined` if the collection * is empty. * * Only supported for int, float and double properties. `null` values are * ignored entirely by this method and will not be factored into the average. * @param {string} [property] - For a collection of objects, the property to take the average of. * @throws {Error} If no property with the name exists or if property is not numeric. * @returns {number} the sum. * @since 1.12.1 */ avg(property) {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/forEach Array.prototype.forEach} * @param {function} callback - Function to execute on each object in the collection. * This function takes three arguments: * - `object` – The current object being processed in the collection. * - `index` – The index of the object being processed in the collection. * - `collection` – The collection itself. * @param {object} [thisArg] - The value of `this` when `callback` is called. * @since 0.11.0 */ forEach(callback, thisArg) {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/every Array.prototype.every} * @param {function} callback - Function to execute on each object in the collection. * If this function returns `true` for every object, then this method will return `true`. * This function takes three arguments: * - `object` – The current object being processed in the collection. * - `index` – The index of the object being processed in the collection. * - `collection` – The collection itself. * @param {object} [thisArg] - The value of `this` when `callback` is called. * @returns {boolean} representing if `callback` returned `true` for every object in the * collection. * @since 0.11.0 */ every(callback, thisArg) {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/some Array.prototype.some} * @param {function} callback - Function to execute on each object in the collection. * If this function ever returns `true`, then this method will return `true`. * This function takes three arguments: * - `object` – The current object being processed in the collection. * - `index` – The index of the object being processed in the collection. * - `collection` – The collection itself. * @param {object} [thisArg] - The value of `this` when `callback` is called. * @returns {boolean} – `true` when `callback` returns `true` for an object in the collection, * otherwise `false`. * @since 0.11.0 */ some(callback, thisArg) {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map Array.prototype.map} * @param {function} callback - Function to execute on each object in the collection. * This function takes three arguments: * - `object` – The current object being processed in the collection. * - `index` – The index of the object being processed in the collection. * - `collection` – The collection itself. * @param {object} [thisArg] - The value of `this` when `callback` is called. * @returns {any[]} – the return values of `callback` after being called on every object * in the collection. * @since 0.11.0 */ map(callback, thisArg) {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/reduce Array.prototype.reduce} * @param {function} callback - Function to execute on each object in the collection. * This function takes four arguments: * - `previousValue` – The value previously returned in the last invocation of the callback, * or `initialValue`, if supplied. * - `object` – The current object being processed in the collection. * - `index` – The index of the object being processed in the collection. * - `collection` – The collection itself. * @param {object} [initialValue] - The value to use as the first argument to the first call * of the `callback`. * @throws {TypeError} If the collection is empty and no `initialValue` was supplied. * @returns {any} – the value returned by the final invocation of `callback`, _except_ for * the following special cases: * - If collection consists of a single object, and no `initalValue` was supplied, then * that object will be returned. * - If the collection is empty, then `initialValue` _must_ be supplied and will be returned. * @since 0.11.0 */ reduce(callback, initialValue) {} /** * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/reduceRight Array.prototype.reduceRight} * @param {function} callback - Function to execute on each object, from **right to left**, * in the collection. This function takes four arguments: * - `previousValue` – The value previously returned in the last invocation of the callback, * or `initialValue`, if supplied. * - `object` – The current object being processed in the collection. * - `index` – The index of the object being processed in the collection. * - `collection` – The collection itself. * @param {object} [initialValue] - The value to use as the first argument to the first call * of the `callback`. * @throws {TypeError} If the collection is empty and no `initialValue` was supplied. * @returns {any} – the value returned by the final invocation of `callback`, _except_ for * the following special cases: * - If collection consists of a single object, and no `initalValue` was supplied, then * that object will be returned. * - If the collection is empty, then `initialValue` _must_ be supplied and will be returned. * @since 0.11.0 */ reduceRight(callback, initialValue) {} /** * Add a listener `callback` which will be called when a **live** collection instance changes. * @param {function(collection, changes)} callback - A function to be called when changes occur. * The callback function is called with two arguments: * - `collection`: the collection instance that changed, * - `changes`: a dictionary with keys `insertions`, `modifications` and `deletions`, * each containing a list of indices that were inserted, updated or deleted respectively. If * partial sync is enabled, an additional key `partial_sync` is added. * - `changes.partial_sync`: `error` indicates if an error has occurred, `old_state` is the previous * state, and `new_state` is the current state. * @throws {Error} If `callback` is not a function. * @example * wines.addListener((collection, changes) => { * // collection === wines * if (changes.partial_sync.new_state == Realm.Sync.SubscriptionState.Initialized) { * console.log('Our subset is ready'); * console.log(`${changes.insertions.length} insertions`); * console.log(`${changes.modifications.length} modifications`); * console.log(`${changes.deletions.length} deletions`); * console.log(`new size of collection: ${collection.length}`); * } * }); */ addListener(callback) {} /** * Remove the listener `callback` from the collection instance. * @param {function(collection, changes)} callback - Callback function that was previously * added as a listener through the {@link Collection#addListener addListener} method. * @throws {Error} If `callback` is not a function. */ removeListener(callback) {} /** * Remove all `callback` listeners from the collection instance. */ removeAllListeners(name) {} } /** * This is an ES6 iterator. * @typedef Realm.Collection~Iterator * @memberof Realm.Collection * @property {function} next - Returns an object with two properties: * - `done` – `true` if the iterator is done iterating through items in the collection, * otherwise `false` * - `value` – the next item being iterated through in the collection, or `undefined` when * `done` is `true` * @property {function} Symbol.iterator - This method simply returns `this`, thus making this * iterator itself _iterable_ (i.e. usable in * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for...of `for-of`} * loops, with the * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Spread_operator `...`} * spread operator, and more). * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#iterator iterator protocol} */ /** * A sort descriptor is either a string containing one or more property names * separate by dots, **or** an array with two items: `[propertyName, reverse]`. * * @typedef Realm.Collection~SortDescriptor * @memberof Realm.Collection * @type {string|Array} */
1
17,043
I suggest that we use `query_based_sync` instead of `partial_sync`.
realm-realm-js
js
@@ -15,6 +15,11 @@ // Package azureblob provides a blob implementation that uses Azure Storage’s // BlockBlob. Use OpenBucket to construct a *blob.Bucket. // +// NOTE: SignedURLs for PUT created with this package are not fully portable; +// they will not work unless the PUT request includes a "x-ms-blob-type" header +// set to "BlockBlob". +// See https://stackoverflow.com/questions/37824136/put-on-sas-blob-url-without-specifying-x-ms-blob-type-header. +// // URLs // // For blob.OpenBucket, azureblob registers for the scheme "azblob".
1
// Copyright 2018 The Go Cloud Development Kit Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package azureblob provides a blob implementation that uses Azure Storage’s // BlockBlob. Use OpenBucket to construct a *blob.Bucket. // // URLs // // For blob.OpenBucket, azureblob registers for the scheme "azblob". // The default URL opener will use credentials from the environment variables // AZURE_STORAGE_ACCOUNT, AZURE_STORAGE_KEY, and AZURE_STORAGE_SAS_TOKEN. // AZURE_STORAGE_ACCOUNT is required, along with one of the other two. // To customize the URL opener, or for more details on the URL format, // see URLOpener. // See https://gocloud.dev/concepts/urls/ for background information. // // Escaping // // Go CDK supports all UTF-8 strings; to make this work with services lacking // full UTF-8 support, strings must be escaped (during writes) and unescaped // (during reads). The following escapes are performed for azureblob: // - Blob keys: ASCII characters 0-31, 92 ("\"), and 127 are escaped to // "__0x<hex>__". Additionally, the "/" in "../" and a trailing "/" in a // key (e.g., "foo/") are escaped in the same way. // - Metadata keys: Per https://docs.microsoft.com/en-us/azure/storage/blobs/storage-properties-metadata, // Azure only allows C# identifiers as metadata keys. Therefore, characters // other than "[a-z][A-z][0-9]_" are escaped using "__0x<hex>__". In addition, // characters "[0-9]" are escaped when they start the string. // URL encoding would not work since "%" is not valid. // - Metadata values: Escaped using URL encoding. // // As // // azureblob exposes the following types for As: // - Bucket: *azblob.ContainerURL // - Error: azblob.StorageError // - ListObject: azblob.BlobItem for objects, azblob.BlobPrefix for "directories" // - ListOptions.BeforeList: *azblob.ListBlobsSegmentOptions // - Reader: azblob.DownloadResponse // - Reader.BeforeRead: *azblob.BlockBlobURL, *azblob.BlobAccessConditions // - Attributes: azblob.BlobGetPropertiesResponse // - CopyOptions.BeforeCopy: azblob.Metadata, *azblob.ModifiedAccessConditions, *azblob.BlobAccessConditions // - WriterOptions.BeforeWrite: *azblob.UploadStreamToBlockBlobOptions package azureblob import ( "context" "errors" "fmt" "io" "net/http" "net/url" "os" "sort" "strconv" "strings" "sync" "time" "github.com/Azure/azure-pipeline-go/pipeline" "github.com/Azure/azure-storage-blob-go/azblob" "github.com/google/uuid" "github.com/google/wire" "gocloud.dev/blob" "gocloud.dev/blob/driver" "gocloud.dev/gcerrors" "gocloud.dev/internal/escape" "gocloud.dev/internal/useragent" ) // Options sets options for constructing a *blob.Bucket backed by Azure Block Blob. type Options struct { // Credential represents the authorizer for SignedURL. // Required to use SignedURL. Credential *azblob.SharedKeyCredential // SASToken can be provided along with anonymous credentials to use // delegated privileges. // See https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1#shared-access-signature-parameters. SASToken SASToken } const ( defaultMaxDownloadRetryRequests = 3 // download retry policy (Azure default is zero) defaultPageSize = 1000 // default page size for ListPaged (Azure default is 5000) defaultUploadBuffers = 5 // configure the number of rotating buffers that are used when uploading (for degree of parallelism) defaultUploadBlockSize = 8 * 1024 * 1024 // configure the upload buffer size ) func init() { blob.DefaultURLMux().RegisterBucket(Scheme, new(lazyCredsOpener)) } // Set holds Wire providers for this package. var Set = wire.NewSet( NewPipeline, wire.Struct(new(Options), "Credential", "SASToken"), wire.Struct(new(URLOpener), "AccountName", "Pipeline", "Options"), ) // lazyCredsOpener obtains credentials from the environment on the first call // to OpenBucketURL. type lazyCredsOpener struct { init sync.Once opener *URLOpener err error } func (o *lazyCredsOpener) OpenBucketURL(ctx context.Context, u *url.URL) (*blob.Bucket, error) { o.init.Do(func() { // Use default credential info from the environment. // Ignore errors, as we'll get errors from OpenBucket later. accountName, _ := DefaultAccountName() accountKey, _ := DefaultAccountKey() sasToken, _ := DefaultSASToken() o.opener, o.err = openerFromEnv(accountName, accountKey, sasToken) }) if o.err != nil { return nil, fmt.Errorf("open bucket %v: %v", u, o.err) } return o.opener.OpenBucketURL(ctx, u) } // Scheme is the URL scheme gcsblob registers its URLOpener under on // blob.DefaultMux. const Scheme = "azblob" // URLOpener opens Azure URLs like "azblob://mybucket". // // The URL host is used as the bucket name. // // No query parameters are supported. type URLOpener struct { // AccountName must be specified. AccountName AccountName // Pipeline must be set to a non-nil value. Pipeline pipeline.Pipeline // Options specifies the options to pass to OpenBucket. Options Options } func openerFromEnv(accountName AccountName, accountKey AccountKey, sasToken SASToken) (*URLOpener, error) { // azblob.Credential is an interface; we will use either a SharedKeyCredential // or anonymous credentials. If the former, we will also fill in // Options.Credential so that SignedURL will work. var credential azblob.Credential var sharedKeyCred *azblob.SharedKeyCredential if accountKey != "" { var err error sharedKeyCred, err = NewCredential(accountName, accountKey) if err != nil { return nil, fmt.Errorf("invalid credentials %s/%s: %v", accountName, accountKey, err) } credential = sharedKeyCred } else { credential = azblob.NewAnonymousCredential() } return &URLOpener{ AccountName: accountName, Pipeline: NewPipeline(credential, azblob.PipelineOptions{}), Options: Options{ Credential: sharedKeyCred, SASToken: sasToken, }, }, nil } // OpenBucketURL opens a blob.Bucket based on u. func (o *URLOpener) OpenBucketURL(ctx context.Context, u *url.URL) (*blob.Bucket, error) { for k := range u.Query() { return nil, fmt.Errorf("open bucket %v: invalid query parameter %q", u, k) } return OpenBucket(ctx, o.Pipeline, o.AccountName, u.Host, &o.Options) } // DefaultIdentity is a Wire provider set that provides an Azure storage // account name, key, and SharedKeyCredential from environment variables. var DefaultIdentity = wire.NewSet( DefaultAccountName, DefaultAccountKey, NewCredential, wire.Bind(new(azblob.Credential), new(*azblob.SharedKeyCredential)), wire.Value(azblob.PipelineOptions{}), ) // SASTokenIdentity is a Wire provider set that provides an Azure storage // account name, SASToken, and anonymous credential from environment variables. var SASTokenIdentity = wire.NewSet( DefaultAccountName, DefaultSASToken, azblob.NewAnonymousCredential, wire.Value(azblob.PipelineOptions{}), ) // AccountName is an Azure storage account name. type AccountName string // AccountKey is an Azure storage account key (primary or secondary). type AccountKey string // SASToken is an Azure shared access signature. // https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1 type SASToken string // DefaultAccountName loads the Azure storage account name from the // AZURE_STORAGE_ACCOUNT environment variable. func DefaultAccountName() (AccountName, error) { s := os.Getenv("AZURE_STORAGE_ACCOUNT") if s == "" { return "", errors.New("azureblob: environment variable AZURE_STORAGE_ACCOUNT not set") } return AccountName(s), nil } // DefaultAccountKey loads the Azure storage account key (primary or secondary) // from the AZURE_STORAGE_KEY environment variable. func DefaultAccountKey() (AccountKey, error) { s := os.Getenv("AZURE_STORAGE_KEY") if s == "" { return "", errors.New("azureblob: environment variable AZURE_STORAGE_KEY not set") } return AccountKey(s), nil } // DefaultSASToken loads a Azure SAS token from the AZURE_STORAGE_SAS_TOKEN // environment variable. func DefaultSASToken() (SASToken, error) { s := os.Getenv("AZURE_STORAGE_SAS_TOKEN") if s == "" { return "", errors.New("azureblob: environment variable AZURE_STORAGE_SAS_TOKEN not set") } return SASToken(s), nil } // NewCredential creates a SharedKeyCredential. func NewCredential(accountName AccountName, accountKey AccountKey) (*azblob.SharedKeyCredential, error) { return azblob.NewSharedKeyCredential(string(accountName), string(accountKey)) } // NewPipeline creates a Pipeline for making HTTP requests to Azure. func NewPipeline(credential azblob.Credential, opts azblob.PipelineOptions) pipeline.Pipeline { opts.Telemetry.Value = useragent.AzureUserAgentPrefix("blob") + opts.Telemetry.Value return azblob.NewPipeline(credential, opts) } // bucket represents a Azure Storage Account Container, which handles read, // write and delete operations on objects within it. // See https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction. type bucket struct { name string pageMarkers map[string]azblob.Marker serviceURL *azblob.ServiceURL containerURL azblob.ContainerURL opts *Options } // OpenBucket returns a *blob.Bucket backed by Azure Storage Account. See the package // documentation for an example and // https://godoc.org/github.com/Azure/azure-storage-blob-go/azblob // for more details. func OpenBucket(ctx context.Context, pipeline pipeline.Pipeline, accountName AccountName, containerName string, opts *Options) (*blob.Bucket, error) { b, err := openBucket(ctx, pipeline, accountName, containerName, opts) if err != nil { return nil, err } return blob.NewBucket(b), nil } func openBucket(ctx context.Context, pipeline pipeline.Pipeline, accountName AccountName, containerName string, opts *Options) (*bucket, error) { if pipeline == nil { return nil, errors.New("azureblob.OpenBucket: pipeline is required") } if accountName == "" { return nil, errors.New("azureblob.OpenBucket: accountName is required") } if containerName == "" { return nil, errors.New("azureblob.OpenBucket: containerName is required") } if opts == nil { opts = &Options{} } blobURL, err := url.Parse(fmt.Sprintf("https://%s.blob.core.windows.net", accountName)) if err != nil { return nil, err } if opts.SASToken != "" { // The Azure portal includes a leading "?" for the SASToken, which we // don't want here. blobURL.RawQuery = strings.TrimPrefix(string(opts.SASToken), "?") } serviceURL := azblob.NewServiceURL(*blobURL, pipeline) return &bucket{ name: containerName, pageMarkers: map[string]azblob.Marker{}, serviceURL: &serviceURL, containerURL: serviceURL.NewContainerURL(containerName), opts: opts, }, nil } // Close implements driver.Close. func (b *bucket) Close() error { return nil } // Copy implements driver.Copy. func (b *bucket) Copy(ctx context.Context, dstKey, srcKey string, opts *driver.CopyOptions) error { dstKey = escapeKey(dstKey, false) dstBlobURL := b.containerURL.NewBlobURL(dstKey) srcKey = escapeKey(srcKey, false) srcURL := b.containerURL.NewBlobURL(srcKey).URL() md := azblob.Metadata{} mac := azblob.ModifiedAccessConditions{} bac := azblob.BlobAccessConditions{} if opts.BeforeCopy != nil { asFunc := func(i interface{}) bool { switch v := i.(type) { case *azblob.Metadata: *v = md return true case **azblob.ModifiedAccessConditions: *v = &mac return true case **azblob.BlobAccessConditions: *v = &bac return true } return false } if err := opts.BeforeCopy(asFunc); err != nil { return err } } resp, err := dstBlobURL.StartCopyFromURL(ctx, srcURL, md, mac, bac) if err != nil { return err } copyStatus := resp.CopyStatus() nErrors := 0 for copyStatus == azblob.CopyStatusPending { // Poll until the copy is complete. time.Sleep(500 * time.Millisecond) propertiesResp, err := dstBlobURL.GetProperties(ctx, azblob.BlobAccessConditions{}) if err != nil { // A GetProperties failure may be transient, so allow a couple // of them before giving up. nErrors++ if ctx.Err() != nil || nErrors == 3 { return err } } copyStatus = propertiesResp.CopyStatus() } if copyStatus != azblob.CopyStatusSuccess { return fmt.Errorf("Copy failed with status: %s", copyStatus) } return nil } // Delete implements driver.Delete. func (b *bucket) Delete(ctx context.Context, key string) error { key = escapeKey(key, false) blockBlobURL := b.containerURL.NewBlockBlobURL(key) _, err := blockBlobURL.Delete(ctx, azblob.DeleteSnapshotsOptionInclude, azblob.BlobAccessConditions{}) return err } // reader reads an azblob. It implements io.ReadCloser. type reader struct { body io.ReadCloser attrs driver.ReaderAttributes raw *azblob.DownloadResponse } func (r *reader) Read(p []byte) (int, error) { return r.body.Read(p) } func (r *reader) Close() error { return r.body.Close() } func (r *reader) Attributes() *driver.ReaderAttributes { return &r.attrs } func (r *reader) As(i interface{}) bool { p, ok := i.(*azblob.DownloadResponse) if !ok { return false } *p = *r.raw return true } // NewRangeReader implements driver.NewRangeReader. func (b *bucket) NewRangeReader(ctx context.Context, key string, offset, length int64, opts *driver.ReaderOptions) (driver.Reader, error) { key = escapeKey(key, false) blockBlobURL := b.containerURL.NewBlockBlobURL(key) blockBlobURLp := &blockBlobURL accessConditions := &azblob.BlobAccessConditions{} end := length if end < 0 { end = azblob.CountToEnd } if opts.BeforeRead != nil { asFunc := func(i interface{}) bool { if p, ok := i.(**azblob.BlockBlobURL); ok { *p = blockBlobURLp return true } if p, ok := i.(**azblob.BlobAccessConditions); ok { *p = accessConditions return true } return false } if err := opts.BeforeRead(asFunc); err != nil { return nil, err } } blobDownloadResponse, err := blockBlobURLp.Download(ctx, offset, end, *accessConditions, false) if err != nil { return nil, err } attrs := driver.ReaderAttributes{ ContentType: blobDownloadResponse.ContentType(), Size: getSize(blobDownloadResponse.ContentLength(), blobDownloadResponse.ContentRange()), ModTime: blobDownloadResponse.LastModified(), } var body io.ReadCloser if length == 0 { body = http.NoBody } else { body = blobDownloadResponse.Body(azblob.RetryReaderOptions{MaxRetryRequests: defaultMaxDownloadRetryRequests}) } return &reader{ body: body, attrs: attrs, raw: blobDownloadResponse, }, nil } func getSize(contentLength int64, contentRange string) int64 { // Default size to ContentLength, but that's incorrect for partial-length reads, // where ContentLength refers to the size of the returned Body, not the entire // size of the blob. ContentRange has the full size. size := contentLength if contentRange != "" { // Sample: bytes 10-14/27 (where 27 is the full size). parts := strings.Split(contentRange, "/") if len(parts) == 2 { if i, err := strconv.ParseInt(parts[1], 10, 64); err == nil { size = i } } } return size } // As implements driver.As. func (b *bucket) As(i interface{}) bool { p, ok := i.(**azblob.ContainerURL) if !ok { return false } *p = &b.containerURL return true } // As implements driver.ErrorAs. func (b *bucket) ErrorAs(err error, i interface{}) bool { switch v := err.(type) { case azblob.StorageError: if p, ok := i.(*azblob.StorageError); ok { *p = v return true } } return false } func (b *bucket) ErrorCode(err error) gcerrors.ErrorCode { serr, ok := err.(azblob.StorageError) switch { case !ok: return gcerrors.Unknown case serr.ServiceCode() == azblob.ServiceCodeBlobNotFound || serr.Response().StatusCode == 404: // Check and fail both the SDK ServiceCode and the Http Response Code for NotFound return gcerrors.NotFound default: return gcerrors.Unknown } } // Attributes implements driver.Attributes. func (b *bucket) Attributes(ctx context.Context, key string) (*driver.Attributes, error) { key = escapeKey(key, false) blockBlobURL := b.containerURL.NewBlockBlobURL(key) blobPropertiesResponse, err := blockBlobURL.GetProperties(ctx, azblob.BlobAccessConditions{}) if err != nil { return nil, err } azureMD := blobPropertiesResponse.NewMetadata() md := make(map[string]string, len(azureMD)) for k, v := range azureMD { // See the package comments for more details on escaping of metadata // keys & values. md[escape.HexUnescape(k)] = escape.URLUnescape(v) } return &driver.Attributes{ CacheControl: blobPropertiesResponse.CacheControl(), ContentDisposition: blobPropertiesResponse.ContentDisposition(), ContentEncoding: blobPropertiesResponse.ContentEncoding(), ContentLanguage: blobPropertiesResponse.ContentLanguage(), ContentType: blobPropertiesResponse.ContentType(), Size: blobPropertiesResponse.ContentLength(), MD5: blobPropertiesResponse.ContentMD5(), ModTime: blobPropertiesResponse.LastModified(), Metadata: md, AsFunc: func(i interface{}) bool { p, ok := i.(*azblob.BlobGetPropertiesResponse) if !ok { return false } *p = *blobPropertiesResponse return true }, }, nil } // ListPaged implements driver.ListPaged. func (b *bucket) ListPaged(ctx context.Context, opts *driver.ListOptions) (*driver.ListPage, error) { pageSize := opts.PageSize if pageSize == 0 { pageSize = defaultPageSize } marker := azblob.Marker{} if len(opts.PageToken) > 0 { if m, ok := b.pageMarkers[string(opts.PageToken)]; ok { marker = m } } azOpts := azblob.ListBlobsSegmentOptions{ MaxResults: int32(pageSize), Prefix: escapeKey(opts.Prefix, true), } if opts.BeforeList != nil { asFunc := func(i interface{}) bool { p, ok := i.(**azblob.ListBlobsSegmentOptions) if !ok { return false } *p = &azOpts return true } if err := opts.BeforeList(asFunc); err != nil { return nil, err } } listBlob, err := b.containerURL.ListBlobsHierarchySegment(ctx, marker, escapeKey(opts.Delimiter, true), azOpts) if err != nil { return nil, err } page := &driver.ListPage{} page.Objects = []*driver.ListObject{} for _, blobPrefix := range listBlob.Segment.BlobPrefixes { page.Objects = append(page.Objects, &driver.ListObject{ Key: unescapeKey(blobPrefix.Name), Size: 0, IsDir: true, AsFunc: func(i interface{}) bool { p, ok := i.(*azblob.BlobPrefix) if !ok { return false } *p = blobPrefix return true }}) } for _, blobInfo := range listBlob.Segment.BlobItems { page.Objects = append(page.Objects, &driver.ListObject{ Key: unescapeKey(blobInfo.Name), ModTime: blobInfo.Properties.LastModified, Size: *blobInfo.Properties.ContentLength, MD5: blobInfo.Properties.ContentMD5, IsDir: false, AsFunc: func(i interface{}) bool { p, ok := i.(*azblob.BlobItem) if !ok { return false } *p = blobInfo return true }, }) } if listBlob.NextMarker.NotDone() { token := uuid.New().String() b.pageMarkers[token] = listBlob.NextMarker page.NextPageToken = []byte(token) } if len(listBlob.Segment.BlobPrefixes) > 0 && len(listBlob.Segment.BlobItems) > 0 { sort.Slice(page.Objects, func(i, j int) bool { return page.Objects[i].Key < page.Objects[j].Key }) } return page, nil } // SignedURL implements driver.SignedURL. func (b *bucket) SignedURL(ctx context.Context, key string, opts *driver.SignedURLOptions) (string, error) { if b.opts.Credential == nil { return "", errors.New("to use SignedURL, you must call OpenBucket with a non-nil Options.Credential") } key = escapeKey(key, false) blockBlobURL := b.containerURL.NewBlockBlobURL(key) srcBlobParts := azblob.NewBlobURLParts(blockBlobURL.URL()) var err error srcBlobParts.SAS, err = azblob.BlobSASSignatureValues{ Protocol: azblob.SASProtocolHTTPS, ExpiryTime: time.Now().UTC().Add(opts.Expiry), ContainerName: b.name, BlobName: srcBlobParts.BlobName, Permissions: azblob.BlobSASPermissions{Read: true}.String(), }.NewSASQueryParameters(b.opts.Credential) if err != nil { return "", err } srcBlobURLWithSAS := srcBlobParts.URL() return srcBlobURLWithSAS.String(), nil } type writer struct { ctx context.Context blockBlobURL *azblob.BlockBlobURL uploadOpts *azblob.UploadStreamToBlockBlobOptions w *io.PipeWriter donec chan struct{} err error } // escapeKey does all required escaping for UTF-8 strings to work with Azure. // isPrefix indicates whether the key is a full key, or a prefix/delimiter. func escapeKey(key string, isPrefix bool) string { return escape.HexEscape(key, func(r []rune, i int) bool { c := r[i] switch { // Azure does not work well with backslashes in blob names. case c == '\\': return true // Azure doesn't handle these characters (determined via experimentation). case c < 32 || c == 127: return true // Escape trailing "/" for full keys, otherwise Azure can't address them // consistently. case !isPrefix && i == len(key)-1 && c == '/': return true // For "../", escape the trailing slash. case i > 1 && r[i] == '/' && r[i-1] == '.' && r[i-2] == '.': return true } return false }) } // unescapeKey reverses escapeKey. func unescapeKey(key string) string { return escape.HexUnescape(key) } // NewTypedWriter implements driver.NewTypedWriter. func (b *bucket) NewTypedWriter(ctx context.Context, key string, contentType string, opts *driver.WriterOptions) (driver.Writer, error) { key = escapeKey(key, false) blockBlobURL := b.containerURL.NewBlockBlobURL(key) if opts.BufferSize == 0 { opts.BufferSize = defaultUploadBlockSize } md := make(map[string]string, len(opts.Metadata)) for k, v := range opts.Metadata { // See the package comments for more details on escaping of metadata // keys & values. e := escape.HexEscape(k, func(runes []rune, i int) bool { c := runes[i] switch { case i == 0 && c >= '0' && c <= '9': return true case escape.IsASCIIAlphanumeric(c): return false case c == '_': return false } return true }) if _, ok := md[e]; ok { return nil, fmt.Errorf("duplicate keys after escaping: %q => %q", k, e) } md[e] = escape.URLEscape(v) } uploadOpts := &azblob.UploadStreamToBlockBlobOptions{ BufferSize: opts.BufferSize, MaxBuffers: defaultUploadBuffers, Metadata: md, BlobHTTPHeaders: azblob.BlobHTTPHeaders{ CacheControl: opts.CacheControl, ContentDisposition: opts.ContentDisposition, ContentEncoding: opts.ContentEncoding, ContentLanguage: opts.ContentLanguage, ContentMD5: opts.ContentMD5, ContentType: contentType, }, } if opts.BeforeWrite != nil { asFunc := func(i interface{}) bool { p, ok := i.(**azblob.UploadStreamToBlockBlobOptions) if !ok { return false } *p = uploadOpts return true } if err := opts.BeforeWrite(asFunc); err != nil { return nil, err } } return &writer{ ctx: ctx, blockBlobURL: &blockBlobURL, uploadOpts: uploadOpts, donec: make(chan struct{}), }, nil } // Write appends p to w. User must call Close to close the w after done writing. func (w *writer) Write(p []byte) (int, error) { if len(p) == 0 { return 0, nil } if w.w == nil { pr, pw := io.Pipe() w.w = pw if err := w.open(pr); err != nil { return 0, err } } return w.w.Write(p) } func (w *writer) open(pr *io.PipeReader) error { go func() { defer close(w.donec) var body io.Reader if pr == nil { body = http.NoBody } else { body = pr } _, w.err = azblob.UploadStreamToBlockBlob(w.ctx, body, *w.blockBlobURL, *w.uploadOpts) if w.err != nil { if pr != nil { pr.CloseWithError(w.err) } return } }() return nil } // Close completes the writer and closes it. Any error occurring during write will // be returned. If a writer is closed before any Write is called, Close will // create an empty file at the given key. func (w *writer) Close() error { if w.w == nil { w.open(nil) } else if err := w.w.Close(); err != nil { return err } <-w.donec return w.err }
1
19,675
How does the user use this? By converting the type using `As` function and add the header? Maybe add an example on how.
google-go-cloud
go
@@ -38,11 +38,14 @@ import java.util.List; */ public class DynamicLangApiMethodTransformer { private final ApiMethodParamTransformer apiMethodParamTransformer; - private final InitCodeTransformer initCodeTransformer = new InitCodeTransformer(); + private final InitCodeTransformer initCodeTransformer; private final LongRunningTransformer lroTransformer = new LongRunningTransformer(); - public DynamicLangApiMethodTransformer(ApiMethodParamTransformer apiMethodParamTransformer) { + public DynamicLangApiMethodTransformer( + ApiMethodParamTransformer apiMethodParamTransformer, + InitCodeTransformer initCodeTransformer) { this.apiMethodParamTransformer = apiMethodParamTransformer; + this.initCodeTransformer = initCodeTransformer; } public OptionalArrayMethodView generateMethod(MethodTransformerContext context) {
1
/* Copyright 2017 Google Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.api.codegen.transformer; import com.google.api.codegen.ServiceMessages; import com.google.api.codegen.config.FieldConfig; import com.google.api.codegen.config.GrpcStreamingConfig.GrpcStreamingType; import com.google.api.codegen.config.MethodConfig; import com.google.api.codegen.metacode.InitCodeContext; import com.google.api.codegen.metacode.InitCodeContext.InitCodeOutputType; import com.google.api.codegen.util.Name; import com.google.api.codegen.viewmodel.ApiMethodDocView; import com.google.api.codegen.viewmodel.ClientMethodType; import com.google.api.codegen.viewmodel.InitCodeView; import com.google.api.codegen.viewmodel.OptionalArrayMethodView; import com.google.api.codegen.viewmodel.RequestObjectParamView; import com.google.api.tools.framework.model.Field; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import java.util.ArrayList; import java.util.List; /** * DynamicLangApiMethodTransformer generates view objects from method definitions for dynamic * languages. */ public class DynamicLangApiMethodTransformer { private final ApiMethodParamTransformer apiMethodParamTransformer; private final InitCodeTransformer initCodeTransformer = new InitCodeTransformer(); private final LongRunningTransformer lroTransformer = new LongRunningTransformer(); public DynamicLangApiMethodTransformer(ApiMethodParamTransformer apiMethodParamTransformer) { this.apiMethodParamTransformer = apiMethodParamTransformer; } public OptionalArrayMethodView generateMethod(MethodTransformerContext context) { SurfaceNamer namer = context.getNamer(); OptionalArrayMethodView.Builder apiMethod = OptionalArrayMethodView.newBuilder(); if (context.getMethodConfig().isPageStreaming()) { apiMethod.type(ClientMethodType.PagedOptionalArrayMethod); } else { apiMethod.type(ClientMethodType.OptionalArrayMethod); } apiMethod.apiClassName(namer.getApiWrapperClassName(context.getInterface())); apiMethod.apiVariableName(namer.getApiWrapperVariableName(context.getInterface())); apiMethod.apiModuleName(namer.getApiWrapperModuleName()); InitCodeOutputType initCodeOutputType = context.getMethod().getRequestStreaming() ? InitCodeOutputType.SingleObject : InitCodeOutputType.FieldList; InitCodeView initCode = initCodeTransformer.generateInitCode( context.cloneWithEmptyTypeTable(), createInitCodeContext( context, context.getMethodConfig().getRequiredFieldConfigs(), initCodeOutputType)); apiMethod.initCode(initCode); apiMethod.doc(generateMethodDoc(context)); apiMethod.name( namer.getApiMethodName(context.getMethod(), context.getMethodConfig().getVisibility())); apiMethod.requestTypeName( context.getTypeTable().getAndSaveNicknameFor(context.getMethod().getInputType())); apiMethod.hasRequestParameters(initCode.lines().size() > 0); apiMethod.hasReturnValue(!ServiceMessages.s_isEmptyType(context.getMethod().getOutputType())); apiMethod.key(namer.getMethodKey(context.getMethod())); apiMethod.grpcMethodName(namer.getGrpcMethodName(context.getMethod())); apiMethod.stubName(namer.getStubName(context.getTargetInterface())); apiMethod.methodParams(apiMethodParamTransformer.generateMethodParams(context)); apiMethod.requiredRequestObjectParams( generateRequestObjectParams(context, context.getMethodConfig().getRequiredFieldConfigs())); apiMethod.optionalRequestObjectParams( generateRequestObjectParams(context, context.getMethodConfig().getOptionalFieldConfigs())); Iterable<FieldConfig> filteredFieldConfigs = removePageTokenFieldConfig(context, context.getMethodConfig().getOptionalFieldConfigs()); apiMethod.optionalRequestObjectParamsNoPageToken( generateRequestObjectParams(context, filteredFieldConfigs)); GrpcStreamingType grpcStreamingType = context.getMethodConfig().getGrpcStreamingType(); apiMethod.grpcStreamingType(grpcStreamingType); apiMethod.isSingularRequestMethod( grpcStreamingType.equals(GrpcStreamingType.NonStreaming) || grpcStreamingType.equals(GrpcStreamingType.ServerStreaming)); apiMethod.longRunningView( context.getMethodConfig().isLongRunningOperation() ? lroTransformer.generateDetailView(context) : null); return apiMethod.build(); } private ApiMethodDocView generateMethodDoc(MethodTransformerContext context) { ApiMethodDocView.Builder docBuilder = ApiMethodDocView.newBuilder(); docBuilder.mainDocLines( context.getNamer().getDocLines(context.getMethod(), context.getMethodConfig())); docBuilder.paramDocs(apiMethodParamTransformer.generateParamDocs(context)); docBuilder.returnTypeName( context .getNamer() .getDynamicLangReturnTypeName(context.getMethod(), context.getMethodConfig())); docBuilder.throwsDocLines(new ArrayList<String>()); return docBuilder.build(); } private List<RequestObjectParamView> generateRequestObjectParams( MethodTransformerContext context, Iterable<FieldConfig> fieldConfigs) { List<RequestObjectParamView> params = new ArrayList<>(); for (FieldConfig fieldConfig : fieldConfigs) { params.add(generateRequestObjectParam(context, fieldConfig)); } return params; } private Iterable<FieldConfig> removePageTokenFieldConfig( MethodTransformerContext context, Iterable<FieldConfig> fieldConfigs) { MethodConfig methodConfig = context.getMethodConfig(); if (methodConfig == null || !methodConfig.isPageStreaming()) { return fieldConfigs; } final Field requestTokenField = methodConfig.getPageStreaming().getRequestTokenField(); return Iterables.filter( fieldConfigs, new Predicate<FieldConfig>() { @Override public boolean apply(FieldConfig fieldConfig) { return !fieldConfig.getField().equals(requestTokenField); } }); } private RequestObjectParamView generateRequestObjectParam( MethodTransformerContext context, FieldConfig fieldConfig) { SurfaceNamer namer = context.getNamer(); FeatureConfig featureConfig = context.getFeatureConfig(); ModelTypeTable typeTable = context.getTypeTable(); Field field = fieldConfig.getField(); RequestObjectParamView.Builder param = RequestObjectParamView.newBuilder(); param.name(namer.getVariableName(field)); param.nameAsMethodName(namer.getFieldGetFunctionName(featureConfig, fieldConfig)); param.typeName(typeTable.getAndSaveNicknameFor(field.getType())); param.elementTypeName(typeTable.getAndSaveNicknameForElementType(field.getType())); param.setCallName(namer.getFieldSetFunctionName(featureConfig, fieldConfig)); param.addCallName(namer.getFieldAddFunctionName(field)); param.isMap(field.getType().isMap()); param.isArray(!field.getType().isMap() && field.getType().isRepeated()); return param.build(); } private InitCodeContext createInitCodeContext( MethodTransformerContext context, Iterable<FieldConfig> fieldConfigs, InitCodeOutputType initCodeOutputType) { return InitCodeContext.newBuilder() .initObjectType(context.getMethod().getInputType()) .suggestedName(Name.from("request")) .initFieldConfigStrings(context.getMethodConfig().getSampleCodeInitFields()) .initValueConfigMap(InitCodeTransformer.createCollectionMap(context)) .initFields(FieldConfig.toFieldIterable(fieldConfigs)) .outputType(initCodeOutputType) .fieldConfigMap(FieldConfig.toFieldConfigMap(fieldConfigs)) .build(); } }
1
21,404
I'm trying to understand the effects of this change. Is setting this to something other than the old default (that is, `initCodeTransformer = new InitCodeTransformer()`) generally necessary, or are we doing this only to support Python?
googleapis-gapic-generator
java
@@ -151,14 +151,15 @@ // // Representing Keys // -// The key of a docstore document is some function of its contents, usually a field. +// The key of a docstore document is its unique identifier, usually a field. // Keys never appear alone in the docstore API, only as part of a document. For // instance, to retrieve a document by key, you pass the Collection.Get method // a document as a struct pointer or map with the key field populated, and docstore -// populates the rest of that argument with the stored contents. +// populates the rest of that argument with the stored contents. Docstore +// doesn't take zero-value key. // -// When you open a collection using the OpenCollection method of the service-specific driver or -// a URL, you specify how to extract the key from a document. +// When you open a collection using the constructor of the service-specific +// driver or a URL, you specify how to extract the key from a document. // Usually, you provide the name of the key field, as in the example below: // // coll, err := memdocstore.OpenCollection("SSN", nil)
1
// Copyright 2019 The Go Cloud Development Kit Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package docstore provides a portable way of interacting with a document store. // A document store is a service that stores data in semi-structured JSON-like // documents. Like other NoSQL databases, document stores are schemaless. // See https://en.wikipedia.org/wiki/Document-oriented_database for more information. // // Subpackages contain distinct implementations ("drivers") of docstore for public // cloud services like Google Cloud Firestore and Amazon DynamoDB as well as any MongoDB // or MongoDB-compatible service hosted by a cloud provider or on-premise. For example, you can // use Azure Cosmos DB and Amazon DocumentDB via the MongoDB driver. There is also // memdocstore, an in-process, in-memory implementation suitable for testing and // development. // // In docstore, documents are grouped into collections, and each document has a key // that is unique in its collection. You can add, retrieve, modify and delete // documents by key, and you can query a collection to retrieve documents that match // certain criteria. // // Your application should import one of the service-specific subpackages and use // its exported functions to create a *Collection; do not use the NewCollection // function in this package. For example: // // coll, err := memdocstore.OpenCollection("SSN", nil) // if err != nil { // return fmt.Errorf("opening collection: %v", err) // } // defer coll.Close() // // coll is a *docstore.Collection // // Then, write your application code using the *Collection type. You can easily // reconfigure your initialization code to choose a different driver. // You can develop your application locally using memdocstore, or deploy it to // multiple Cloud providers. You may find http://github.com/google/wire useful // for managing your initialization code. // // Alternatively, you can construct a *Collection via a URL and the // OpenCollection function. See https://gocloud.dev/concepts/urls for more // information. // // // Representing Documents // // A document is a set of named fields, each with a value. A field's value can be a scalar, // a list, or a nested document. // // Docstore allows you to represent documents as either map[string]interface{} or // struct pointers. When you represent a document as a map, the fields are map keys // and the values are map values. Lists are represented with slices. For example, // here is a document about a book described as a map: // // doc := map[string]interface{}{ // "Title": "The Master and Margarita", // "Author": map[string]interface{}{ // "First": "Mikhail", // "Last": "Bulgakov", // }, // "PublicationYears": []int{1967, 1973}, // } // // Note that the value of "PublicationYears" is a list, and the value of "Author" is // itself a document. // // Here is the same document represented with structs: // // type Book struct { // Title string // Author Name // PublicationYears []int // } // // type Name struct { // First, Last string // } // // doc := &Book{ // Title: "The Master and Margarita", // Author: Name{ // First: "Mikhail", // Last: "Bulgakov", // }, // PublicationYears: []int{1967, 1973}, // } // // You must use a pointer to a struct to represent a document, although structs // nested inside a document, like the Name struct above, need not be pointers. // // Maps are best for applications where you don't know the structure of the // documents. Using structs is preferred because it enforces some structure on your // data. // // By default, Docstore treats a struct's exported fields as the fields of the // document. You can alter this default mapping by using a struct tag beginning // with "docstore:". Docstore struct tags support renaming, omitting fields // unconditionally, or omitting them only when they are empty, exactly like // encoding/json. For example, this is the Book struct with different field // names: // // type Book struct { // Title string `docstore:"title"` // Author Name `docstore:"author"` // PublicationYears []int `docstore:"pub_years,omitempty"` // NumPublications int `docstore:"-"` // } // // This struct describes a document with field names "title", "author" and // "pub_years". The pub_years field is omitted from the stored document if it has // length zero. The NumPublications field is never stored because it can easily be // computed from the PublicationYears field. // // Given a document field "Foo" and a struct type document, Docstore's decoder // will look through the destination struct's field to find (in order of // preference): // - An exported field with a tag of "Foo"; // - An exported field named "Foo". // // Note that unlike encoding/json, Docstore does case-sensitive matching during // decoding to match the behavior of decoders in most docstore services. // // // Representing Data // // Values stored in document fields can be any of a wide range of types. All // primitive types except for complex numbers are supported, as well as slices and // maps (the map key type must be a string, an integer, or a type that implements // encoding.TextMarshaler). In addition, any type that implements // encoding.BinaryMarshaler or encoding.TextMarshaler is permitted. This set of types // closely matches the encoding/json package (see https://golang.org/pkg/encoding/json). // // Times deserve special mention. Docstore can store and retrieve values of type // time.Time, with two caveats. First, the timezone will not be preserved. Second, // Docstore guarantees only that time.Time values are represented to millisecond // precision. Many services will do better, but if you need to be sure that times // are stored with nanosecond precision, convert the time.Time to another type before // storing and re-create when you retrieve it. For instance, if you store Unix // time in nanoseconds using time's UnixNano method, you can get the original // time back (in the local timezone) with the time.Unix function. // // // Representing Keys // // The key of a docstore document is some function of its contents, usually a field. // Keys never appear alone in the docstore API, only as part of a document. For // instance, to retrieve a document by key, you pass the Collection.Get method // a document as a struct pointer or map with the key field populated, and docstore // populates the rest of that argument with the stored contents. // // When you open a collection using the OpenCollection method of the service-specific driver or // a URL, you specify how to extract the key from a document. // Usually, you provide the name of the key field, as in the example below: // // coll, err := memdocstore.OpenCollection("SSN", nil) // // Here, the "SSN" field of the document is used as the key. Some drivers let you // supply a function to extract the key from the document, which can be useful if the // key is composed of more than one field. // // // Actions // // Docstore supports six actions on documents as methods on the Collection type: // - Get retrieves a document. // - Create creates a new document. // - Replace replaces an existing document. // - Put puts a document into a collection, replacing it if it is already present. // - Update applies a set of modifications to a document. // - Delete deletes a document. // // Each action acts atomically on a single document. You can execute actions // individually or you can group them into an action list, like so: // // err := coll.Actions().Put(doc1).Replace(doc2).Get(doc3).Do(ctx) // // When you use an action list, docstore will try to optimize the execution of the // actions. For example, multiple Get actions may be combined into a single "batch // get" RPC. For the most part, actions in a list execute in an undefined order // (perhaps concurrently) and independently, but read and write operations on the same // document are executed in the user-specified order. See the documentation of // ActionList for details. // // // Revisions // // Docstore supports document revisions to distinguish different versions of a // document and enable optimistic locking. By default, Docstore stores the // revision in the field named "DocstoreRevision" (stored in the constant // DefaultRevisionField). Providers give you the option of changing that field // name. // // When you pass a document with a revision field to a write action, Docstore // will give it a revision at creation time or update the revision value when // modifying the document. If you don't want Docstore to handle any revision // logic, simply do not have the revision field in your document. // // When you pass a document with a non-nil revision to Put, Replace, Update or // Delete, Docstore will also compare the revision of the stored document to // that of the given document before making the change. It returns an error with // code FailedPrecondition on mismatch. (See https://gocloud.dev/gcerrors for // information about error codes.) If modification methods are called on a // document struct or map a nil revision field, then no revision checks are // performed, and changes are forced blindly, but a new revision will still be // given for the document. For example, if you call Get to retrieve a document // with a revision, then later perform a write action with that same document, // it will fail if the document was changed since the Get. // // Since different services use different types for revisions, revision fields // of unspecified type must be handled. When defining a document struct, // define the field to be of type interface{}. For example, // // type User { // Name string // DocstoreRevision interface{} // } // // // Queries // // Docstore supports querying within a collection. Call the Query method on // Collection to obtain a Query value, then build your query by calling Query methods // like Where, Limit and so on. Finally, call the Get method on the query to execute it. // The result is an iterator, whose use is described below. // // iter := coll.Query().Where("size", ">", 10).Limit(5).Get(ctx) // // The Where method defines a filter condition, much like a WHERE clause in SQL. // Conditions are of the form "field op value", where field is any document field // path (including dot-separated paths), op is one of "=", ">", "<", ">=" or "<=", // and value can be any value. // // iter := coll.Query().Where("Author.Last", "=", "Bulgakov").Limit(3).Get(ctx) // // You can make multiple Where calls. In some cases, parts of a Where clause may be // processed in the driver rather than natively by the backing service, which may have // performance implications for large result sets. See the driver package // documentation for details. // // Use the DocumentIterator returned from Query.Get by repeatedly calling its Next // method until it returns io.EOF. Always call Stop when you are finished with an // iterator. It is wise to use a defer statement for this. // // iter := coll.Query().Where("size", ">", 10).Limit(5).Get(ctx) // defer iter.Stop() // for { // m := map[string]interface{}{} // err := iter.Next(ctx, m) // if err == io.EOF { // break // } // if err != nil { // return err // } // fmt.Println(m) // } // // // Errors // // The errors returned from this package can be inspected in several ways: // // The Code function from https://gocloud.dev/gcerrors will return an error code, also // defined in that package, when invoked on an error. // // The Collection.ErrorAs method can retrieve the underlying driver error from // the returned error. See the specific driver's package doc for the supported // types. // // // OpenCensus Integration // // TODO(jba): implement and document // package docstore // import "gocloud.dev/docstore"
1
19,542
"Constructor" isn't a standard term in Go or this project, although we use it informally amongst ourselves. And I think it will confuse people coming from languages like Java. So can we leave this as it was?
google-go-cloud
go
@@ -114,7 +114,7 @@ class TestCtuFailure(unittest.TestCase): """ Test that Clang indeed logs the AST import events when using on-demand mode. """ - self.__set_up_test_dir('ctu_on_demand_failure') + self.__set_up_test_dir('ctu_failure') output = self.__do_ctu_all(on_demand=True, extra_args=["--verbose", "debug"])
1
# # ------------------------------------------------------------------------- # # Part of the CodeChecker project, under the Apache License v2.0 with # LLVM Exceptions. See LICENSE for license information. # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception # # ------------------------------------------------------------------------- """ CTU function test.""" import json import os import shutil import unittest import zipfile from codechecker_analyzer import host_check from libtest import env from libtest import project from libtest.codechecker import call_command from libtest.ctu_decorators import makeSkipUnlessCTUCapable, \ makeSkipUnlessCTUOnDemandCapable, makeSkipUnlessCTUDisplayCapable CTU_ATTR = 'ctu_capable' ON_DEMAND_ATTR = 'ctu_on_demand_capable' DISPLAY_PROGRESS_ATTR = 'ctu_display_progress_capable' skipUnlessCTUCapable = makeSkipUnlessCTUCapable(attribute=CTU_ATTR) skipUnlessCTUOnDemandCapable = \ makeSkipUnlessCTUOnDemandCapable(attribute=ON_DEMAND_ATTR) skipUnlessCTUDisplayCapable = \ makeSkipUnlessCTUDisplayCapable(attribute=DISPLAY_PROGRESS_ATTR) class TestCtuFailure(unittest.TestCase): """ Test CTU functionality. """ def setUp(self): """ Set up workspace.""" # TEST_WORKSPACE is automatically set by test package __init__.py . self.test_workspace = os.environ['TEST_WORKSPACE'] test_class = self.__class__.__name__ print('Running ' + test_class + ' tests in ' + self.test_workspace) # Get the CodeChecker cmd if needed for the tests. self._codechecker_cmd = env.codechecker_cmd() self.env = env.codechecker_env() self.report_dir = os.path.join(self.test_workspace, 'reports') os.makedirs(self.report_dir) # Get if clang is CTU-capable or not. cmd = [self._codechecker_cmd, 'analyze', '-h'] output, _ = call_command(cmd, cwd=self.test_workspace, env=self.env) setattr(self, CTU_ATTR, '--ctu-' in output) print("'analyze' reported CTU compatibility? " + str(getattr(self, CTU_ATTR))) setattr(self, ON_DEMAND_ATTR, '--ctu-ast-mode' in output) print("'analyze' reported CTU-on-demand-compatibility? " + str(getattr(self, ON_DEMAND_ATTR))) setattr(self, DISPLAY_PROGRESS_ATTR, host_check.has_analyzer_config_option( self.__getClangSaPath(), 'display-ctu-progress', self.env)) print("Has display-ctu-progress=true? " + str(getattr(self, DISPLAY_PROGRESS_ATTR))) self.__old_pwd = os.getcwd() def __set_up_test_dir(self, project_path): self.test_dir = project.path(project_path) # Fix the "template" build JSONs to contain a proper directory # so the tests work. raw_buildlog = os.path.join(self.test_dir, 'buildlog.json') with open(raw_buildlog, encoding="utf-8", errors="ignore") as log_file: build_json = json.load(log_file) for command in build_json: command['directory'] = self.test_dir os.chdir(self.test_workspace) self.buildlog = os.path.join(self.test_workspace, 'buildlog.json') with open(self.buildlog, 'w', encoding="utf-8", errors="ignore") as log_file: json.dump(build_json, log_file) def tearDown(self): """ Tear down workspace.""" shutil.rmtree(self.report_dir, ignore_errors=True) os.chdir(self.__old_pwd) @skipUnlessCTUCapable @skipUnlessCTUDisplayCapable def test_ctu_logs_ast_import(self): """ Test that Clang indeed logs the AST import events. """ self.__set_up_test_dir('ctu_failure') output = self.__do_ctu_all(on_demand=False, extra_args=["--verbose", "debug"]) self.assertIn("CTU loaded AST file", output) @skipUnlessCTUCapable @skipUnlessCTUDisplayCapable @skipUnlessCTUOnDemandCapable def test_ctu_on_demand_logs_ast_import(self): """ Test that Clang indeed logs the AST import events when using on-demand mode. """ self.__set_up_test_dir('ctu_on_demand_failure') output = self.__do_ctu_all(on_demand=True, extra_args=["--verbose", "debug"]) self.assertIn("CTU loaded AST file", output) @skipUnlessCTUCapable @skipUnlessCTUDisplayCapable def test_ctu_failure_zip(self): """ Test the failure zip contains the source of imported TU """ self.__set_up_test_dir('ctu_failure') # The below special checker `ExprInspection` crashes when a function # with a specified name is analyzed. output = self.__do_ctu_all(on_demand=False, extra_args=[ "--verbose", "debug", "-e", "debug.ExprInspection" ]) # lib.c should be logged as its AST is loaded by Clang self.assertRegex(output, r"CTU loaded AST file: .*lib\.c.ast") # We expect a failure archive to be in the failed directory. failed_dir = os.path.join(self.report_dir, "failed") failed_files = os.listdir(failed_dir) self.assertEqual(len(failed_files), 1) # Ctu should fail during analysis of main.c self.assertIn("main.c", failed_files[0]) fail_zip = os.path.join(failed_dir, failed_files[0]) with zipfile.ZipFile(fail_zip, 'r') as archive: files = archive.namelist() self.assertIn("build-action", files) self.assertIn("analyzer-command", files) def check_source_in_archive(source_in_archive): source_file = os.path.join(self.test_dir, source_in_archive) source_in_archive = os.path.join("sources-root", source_file.lstrip('/')) self.assertIn(source_in_archive, files) # Check file content. with archive.open(source_in_archive, 'r') as archived_code: with open(source_file, 'r', encoding="utf-8", errors="ignore") as source_code: self.assertEqual(archived_code.read().decode("utf-8"), source_code.read()) check_source_in_archive("main.c") check_source_in_archive("lib.c") @skipUnlessCTUCapable @skipUnlessCTUDisplayCapable @skipUnlessCTUOnDemandCapable def test_ctu_on_demand_failure_zip(self): """ Test the failure zip contains the source of imported TU when using on-demand mode. """ self.__set_up_test_dir('ctu_on_demand_failure') # The below special checker `ExprInspection` crashes when a function # with a specified name is analyzed. output = self.__do_ctu_all(on_demand=True, extra_args=[ "--verbose", "debug", "-e", "debug.ExprInspection" ]) # lib.c should be logged as its AST is loaded by Clang self.assertRegex(output, r"CTU loaded AST file: .*lib\.c") # We expect a failure archive to be in the failed directory. failed_dir = os.path.join(self.report_dir, "failed") failed_files = os.listdir(failed_dir) self.assertEqual(len(failed_files), 1) # Ctu should fail during analysis of main.c self.assertIn("main.c", failed_files[0]) fail_zip = os.path.join(failed_dir, failed_files[0]) with zipfile.ZipFile(fail_zip, 'r') as archive: files = archive.namelist() self.assertIn("build-action", files) self.assertIn("analyzer-command", files) def check_source_in_archive(source_in_archive): source_file = os.path.join(self.test_dir, source_in_archive) source_in_archive = os.path.join("sources-root", source_file.lstrip('/')) self.assertIn(source_in_archive, files) # Check file content. with archive.open(source_in_archive, 'r') as archived_code: with open(source_file, 'r', encoding="utf-8", errors="ignore") as source_code: self.assertEqual(archived_code.read().decode("utf-8"), source_code.read()) check_source_in_archive("main.c") check_source_in_archive("lib.c") @skipUnlessCTUCapable @skipUnlessCTUDisplayCapable def test_ctu_failure_zip_with_headers(self): """ Test the failure zip contains the source of imported TU and all the headers on which the TU depends. """ self.__set_up_test_dir('ctu_failure_with_headers') # The below special checker `ExprInspection` crashes when a function # with a specified name is analyzed. output = self.__do_ctu_all(on_demand=False, extra_args=[ "--verbose", "debug", "-e", "debug.ExprInspection" ]) # lib.c should be logged as its AST is loaded by Clang self.assertRegex(output, r"CTU loaded AST file: .*lib\.c.ast") # We expect a failure archive to be in the failed directory. failed_dir = os.path.join(self.report_dir, "failed") failed_files = os.listdir(failed_dir) self.assertEqual(len(failed_files), 1) # Ctu should fail during analysis of main.c self.assertIn("main.c", failed_files[0]) fail_zip = os.path.join(failed_dir, failed_files[0]) with zipfile.ZipFile(fail_zip, 'r') as archive: files = archive.namelist() self.assertIn("build-action", files) self.assertIn("analyzer-command", files) def check_source_in_archive(source_in_archive): source_file = os.path.join(self.test_dir, source_in_archive) source_in_archive = os.path.join("sources-root", source_file.lstrip('/')) self.assertIn(source_in_archive, files) # Check file content. with archive.open(source_in_archive, 'r') as archived_code: with open(source_file, 'r', encoding="utf-8", errors="ignore") as source_code: self.assertEqual(archived_code.read().decode("utf-8"), source_code.read()) check_source_in_archive("main.c") check_source_in_archive("lib.c") check_source_in_archive("lib.h") @skipUnlessCTUCapable @skipUnlessCTUDisplayCapable @skipUnlessCTUOnDemandCapable def test_ctu_on_demand_failure_zip_with_headers(self): """ Test the failure zip contains the source of imported TU and all the headers on which the TU depends when using on-demand mode. """ self.__set_up_test_dir('ctu_on_demand_failure_with_headers') # The below special checker `ExprInspection` crashes when a function # with a specified name is analyzed. output = self.__do_ctu_all(on_demand=True, extra_args=[ "--verbose", "debug", "-e", "debug.ExprInspection" ]) # lib.c should be logged as its AST is loaded by Clang self.assertRegex(output, r"CTU loaded AST file: .*lib\.c") # We expect a failure archive to be in the failed directory. failed_dir = os.path.join(self.report_dir, "failed") failed_files = os.listdir(failed_dir) self.assertEqual(len(failed_files), 1) # Ctu should fail during analysis of main.c self.assertIn("main.c", failed_files[0]) fail_zip = os.path.join(failed_dir, failed_files[0]) with zipfile.ZipFile(fail_zip, 'r') as archive: files = archive.namelist() self.assertIn("build-action", files) self.assertIn("analyzer-command", files) def check_source_in_archive(source_in_archive): source_file = os.path.join(self.test_dir, source_in_archive) source_in_archive = os.path.join("sources-root", source_file.lstrip('/')) self.assertIn(source_in_archive, files) # Check file content. with archive.open(source_in_archive, 'r') as archived_code: with open(source_file, 'r', encoding="utf-8", errors="ignore") as source_code: self.assertEqual(archived_code.read().decode("utf-8"), source_code.read()) check_source_in_archive("main.c") check_source_in_archive("lib.c") check_source_in_archive("lib.h") @skipUnlessCTUCapable @skipUnlessCTUDisplayCapable def test_ctu_fallback(self): """ In case of ctu failure the non ctu analysis will be triggered. """ self.__set_up_test_dir('ctu_failure') output = self.__do_ctu_all(on_demand=False, extra_args=[ "--verbose", "debug", "-e", "debug.ExprInspection", "--ctu-reanalyze-on-failure" ]) # lib.c should be logged as its AST is loaded by Clang self.assertRegex(output, r"CTU loaded AST file: .*lib\.c.ast") # We expect two failure archives to be in the failed directory. # One failure archive is produced by the CTU analysis and the # other archive is produced by the non CTU analysis. failed_dir = os.path.join(self.report_dir, "failed") failed_files = os.listdir(failed_dir) print(failed_files) self.assertEqual(len(failed_files), 2) # Ctu should fail during analysis of main.c self.assertIn("main.c", failed_files[0]) @skipUnlessCTUCapable @skipUnlessCTUDisplayCapable @skipUnlessCTUOnDemandCapable def test_ctu_on_demand_fallback(self): """ In case of ctu failure the non ctu analysis will be triggered when using on-demand-mode. """ self.__set_up_test_dir('ctu_on_demand_failure') output = self.__do_ctu_all(on_demand=True, extra_args=[ "--verbose", "debug", "-e", "debug.ExprInspection", "--ctu-reanalyze-on-failure" ]) # lib.c should be logged as its AST is loaded by Clang self.assertRegex(output, r"CTU loaded AST file: .*lib\.c") # We expect two failure archives to be in the failed directory. # One failure archive is produced by the CTU analysis and the # other archive is produced by the non CTU analysis. failed_dir = os.path.join(self.report_dir, "failed") failed_files = os.listdir(failed_dir) print(failed_files) self.assertEqual(len(failed_files), 2) # Ctu should fail during analysis of main.c self.assertIn("main.c", failed_files[0]) def __do_ctu_all(self, on_demand, extra_args=None): """ Execute a full CTU run. @param extra_args: list of additional arguments """ cmd = [self._codechecker_cmd, 'analyze', '-o', self.report_dir, '--analyzers', 'clangsa', '--ctu-all'] if getattr(self, ON_DEMAND_ATTR): cmd.extend(['--ctu-ast-mode', 'parse-on-demand' if on_demand else 'load-from-pch']) if extra_args is not None: cmd.extend(extra_args) cmd.append(self.buildlog) out, _ = call_command(cmd, cwd=self.test_dir, env=self.env) return out def __getClangSaPath(self): cmd = [self._codechecker_cmd, 'analyzers', '--details', '-o', 'json'] output, _ = call_command(cmd, cwd=self.test_workspace, env=self.env) json_data = json.loads(output) if json_data[0]["name"] == "clangsa": return json_data[0]["path"] if json_data[1]["name"] == "clangsa": return json_data[1]["path"]
1
12,436
Why do we have to change the test dir?
Ericsson-codechecker
c
@@ -65,9 +65,11 @@ dom.isNativelyFocusable = function(el) { return el.type !== 'hidden'; case 'TEXTAREA': case 'SELECT': - case 'DETAILS': + case 'SUMMARY': case 'BUTTON': return true; + case 'DETAILS': + return !el.querySelector('summary'); } return false; };
1
/* global dom */ /** * Determines if focusing has been disabled on an element. * @param {HTMLElement} el The HTMLElement * @return {Boolean} Whether focusing has been disabled on an element. */ function focusDisabled(el) { return ( el.disabled || (el.nodeName.toUpperCase() !== 'AREA' && dom.isHiddenWithCSS(el)) ); } /** * Determines if an element is focusable * @method isFocusable * @memberof axe.commons.dom * @instance * @param {HTMLElement} el The HTMLElement * @return {Boolean} The element's focusability status */ dom.isFocusable = function(el) { 'use strict'; if (focusDisabled(el)) { return false; } else if (dom.isNativelyFocusable(el)) { return true; } // check if the tabindex is specified and a parseable number var tabindex = el.getAttribute('tabindex'); if (tabindex && !isNaN(parseInt(tabindex, 10))) { return true; } return false; }; /** * Determines if an element is focusable without considering its tabindex * @method isNativelyFocusable * @memberof axe.commons.dom * @instance * @param {HTMLElement} el The HTMLElement * @return {Boolean} True if the element is in the focus order but wouldn't be * if its tabindex were removed. Else, false. */ dom.isNativelyFocusable = function(el) { /* eslint indent: 0*/ 'use strict'; if (!el || focusDisabled(el)) { return false; } switch (el.nodeName.toUpperCase()) { case 'A': case 'AREA': if (el.href) { return true; } break; case 'INPUT': return el.type !== 'hidden'; case 'TEXTAREA': case 'SELECT': case 'DETAILS': case 'BUTTON': return true; } return false; }; /** * Determines if an element is in the focus order, but would not be if its * tabindex were unspecified. * @method insertedIntoFocusOrder * @memberof axe.commons.dom * @instance * @param {HTMLElement} el The HTMLElement * @return {Boolean} True if the element is in the focus order but wouldn't be * if its tabindex were removed. Else, false. */ dom.insertedIntoFocusOrder = function(el) { let tabIndex = parseInt(el.getAttribute('tabindex'), 10); // an element that has an invalid tabindex will return 0 or -1 based on // if it is natively focusable or not, which will always be false for this // check as NaN is not > 1 // @see https://www.w3.org/TR/html51/editing.html#the-tabindex-attribute return tabIndex > -1 && dom.isFocusable(el) && !dom.isNativelyFocusable(el); };
1
15,224
This should test the flattened tree instead. details > summary works across shadow tree boundaries.
dequelabs-axe-core
js
@@ -0,0 +1,17 @@ +// MvxBindingLog.cs + +// MvvmCross is licensed using Microsoft Public License (Ms-PL) +// Contributions and inspirations noted in readme.md and license.txt +// +// Project Lead - Stuart Lodge, @slodge, [email protected] + +using MvvmCross.Platform; +using MvvmCross.Platform.Logging; + +namespace MvvmCross +{ + internal static class MvxAndroidLog + { + internal static IMvxLog Instance { get; } = Mvx.Resolve<IMvxLogProvider>().GetLogFor("MvxAndroid"); + } +}
1
1
13,676
Not sure I am a big fan of these duplicated Log classes.
MvvmCross-MvvmCross
.cs
@@ -39,6 +39,10 @@ import ( const ( reasonDomainVerified = "DomainVerified" + CleanUpError = "CleanUpError" + PresentError = "PresentError" + Presented = "Presented" + Failed = "Failed" ) // solver solves ACME challenges by presenting the given token and key in an
1
/* Copyright 2020 The cert-manager Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package acmechallenges import ( "context" "fmt" acmeapi "golang.org/x/crypto/acme" corev1 "k8s.io/api/core/v1" apiequality "k8s.io/apimachinery/pkg/api/equality" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" utilerrors "k8s.io/apimachinery/pkg/util/errors" "github.com/jetstack/cert-manager/pkg/acme" acmecl "github.com/jetstack/cert-manager/pkg/acme/client" cmacme "github.com/jetstack/cert-manager/pkg/apis/acme/v1" cmapi "github.com/jetstack/cert-manager/pkg/apis/certmanager/v1" controllerpkg "github.com/jetstack/cert-manager/pkg/controller" "github.com/jetstack/cert-manager/pkg/feature" dnsutil "github.com/jetstack/cert-manager/pkg/issuer/acme/dns/util" logf "github.com/jetstack/cert-manager/pkg/logs" utilfeature "github.com/jetstack/cert-manager/pkg/util/feature" ) const ( reasonDomainVerified = "DomainVerified" ) // solver solves ACME challenges by presenting the given token and key in an // appropriate way given the config in the Issuer and Certificate. type solver interface { // Present the challenge value with the given solver. Present(ctx context.Context, issuer cmapi.GenericIssuer, ch *cmacme.Challenge) error // Check returns an Error if the propagation check didn't succeed. Check(ctx context.Context, issuer cmapi.GenericIssuer, ch *cmacme.Challenge) error // CleanUp will remove challenge records for a given solver. // This may involve deleting resources in the Kubernetes API Server, or // communicating with other external components (e.g. DNS providers). CleanUp(ctx context.Context, issuer cmapi.GenericIssuer, ch *cmacme.Challenge) error } // Sync will process this ACME Challenge. // It is the core control function for ACME challenges. func (c *controller) Sync(ctx context.Context, ch *cmacme.Challenge) (err error) { log := logf.FromContext(ctx).WithValues("dnsName", ch.Spec.DNSName, "type", ch.Spec.Type) ctx = logf.NewContext(ctx, log) oldChal := ch ch = ch.DeepCopy() if ch.DeletionTimestamp != nil { return c.handleFinalizer(ctx, ch) } defer func() { if apiequality.Semantic.DeepEqual(oldChal.Status, ch.Status) && len(oldChal.Finalizers) == len(ch.Finalizers) { return } _, updateErr := c.cmClient.AcmeV1().Challenges(ch.Namespace).UpdateStatus(context.TODO(), ch, metav1.UpdateOptions{}) if updateErr != nil { err = utilerrors.NewAggregate([]error{err, updateErr}) } }() // bail out early on if processing=false, as this challenge has not been // scheduled yet. if ch.Status.Processing == false { return nil } genericIssuer, err := c.helper.GetGenericIssuer(ch.Spec.IssuerRef, ch.Namespace) if err != nil { return fmt.Errorf("error reading (cluster)issuer %q: %v", ch.Spec.IssuerRef.Name, err) } // if a challenge is in a final state, we bail out early as there is nothing // left for us to do here. if acme.IsFinalState(ch.Status.State) { if ch.Status.Presented { solver, err := c.solverFor(ch.Spec.Type) if err != nil { log.Error(err, "error getting solver for challenge") return err } err = solver.CleanUp(ctx, genericIssuer, ch) if err != nil { c.recorder.Eventf(ch, corev1.EventTypeWarning, "CleanUpError", "Error cleaning up challenge: %v", err) ch.Status.Reason = err.Error() log.Error(err, "error cleaning up challenge") return err } ch.Status.Presented = false } ch.Status.Processing = false return nil } cl, err := c.accountRegistry.GetClient(string(genericIssuer.GetUID())) if err != nil { return err } if ch.Status.State == "" { err := c.syncChallengeStatus(ctx, cl, ch) if err != nil { return handleError(ch, err) } // if the state has not changed, return an error if ch.Status.State == "" { return fmt.Errorf("could not determine acme challenge status. retrying after applying back-off") } // the change in the challenges status will trigger a resync. // this ensures our cache is consistent so we don't call Present twice // due to the http01 solver creating resources that this controller // watches/syncs on return nil } if utilfeature.DefaultFeatureGate.Enabled(feature.ValidateCAA) { // check for CAA records. // CAA records are static, so we don't have to present anything // before we check for them. // Find out which identity the ACME server says it will use. dir, err := cl.Discover(ctx) if err != nil { return handleError(ch, err) } // TODO(dmo): figure out if missing CAA identity in directory // means no CAA check is performed by ACME server or if any valid // CAA would stop issuance (strongly suspect the former) if len(dir.CAA) != 0 { err := dnsutil.ValidateCAA(ch.Spec.DNSName, dir.CAA, ch.Spec.Wildcard, c.dns01Nameservers) if err != nil { ch.Status.Reason = fmt.Sprintf("CAA self-check failed: %s", err) return err } } } solver, err := c.solverFor(ch.Spec.Type) if err != nil { return err } if !ch.Status.Presented { err := solver.Present(ctx, genericIssuer, ch) if err != nil { c.recorder.Eventf(ch, corev1.EventTypeWarning, "PresentError", "Error presenting challenge: %v", err) ch.Status.Reason = err.Error() return err } ch.Status.Presented = true c.recorder.Eventf(ch, corev1.EventTypeNormal, "Presented", "Presented challenge using %s challenge mechanism", ch.Spec.Type) } err = solver.Check(ctx, genericIssuer, ch) if err != nil { log.Error(err, "propagation check failed") ch.Status.Reason = fmt.Sprintf("Waiting for %s challenge propagation: %s", ch.Spec.Type, err) key, err := controllerpkg.KeyFunc(ch) // This is an unexpected edge case and should never occur if err != nil { return err } c.queue.AddAfter(key, c.DNS01CheckRetryPeriod) return nil } err = c.acceptChallenge(ctx, cl, ch) if err != nil { return err } return nil } // handleError will handle ACME error types, updating the challenge resource // with any new information found whilst inspecting the error response. // This may include marking the challenge as expired. func handleError(ch *cmacme.Challenge, err error) error { if err == nil { return nil } var acmeErr *acmeapi.Error var ok bool if acmeErr, ok = err.(*acmeapi.Error); !ok { return err } switch acmeErr.ProblemType { // This response type is returned when an authorization has expired or the // request is in some way malformed. // In this case, we should mark the challenge as expired so that the order // can be retried. // TODO: don't mark *all* malformed errors as expired, we may be able to be // more informative to the user by further inspecting the Error response. case "urn:ietf:params:acme:error:malformed": ch.Status.State = cmacme.Expired // absorb the error as updating the challenge's status will trigger a sync return nil } if acmeErr.StatusCode >= 400 && acmeErr.StatusCode < 500 { ch.Status.State = cmacme.Errored ch.Status.Reason = fmt.Sprintf("Failed to retrieve Order resource: %v", err) return nil } return err } // handleFinalizer will attempt to 'finalize' the Challenge resource by calling // CleanUp if the resource is in a 'processing' state. func (c *controller) handleFinalizer(ctx context.Context, ch *cmacme.Challenge) (err error) { log := logf.FromContext(ctx, "finalizer") if len(ch.Finalizers) == 0 { return nil } if ch.Finalizers[0] != cmacme.ACMEFinalizer { log.V(logf.DebugLevel).Info("waiting to run challenge finalization...") return nil } defer func() { // call UpdateStatus first as we may have updated the challenge.status.reason field ch, updateErr := c.cmClient.AcmeV1().Challenges(ch.Namespace).UpdateStatus(context.TODO(), ch, metav1.UpdateOptions{}) if updateErr != nil { err = utilerrors.NewAggregate([]error{err, updateErr}) return } // call Update to remove the metadata.finalizers entry ch.Finalizers = ch.Finalizers[1:] _, updateErr = c.cmClient.AcmeV1().Challenges(ch.Namespace).Update(context.TODO(), ch, metav1.UpdateOptions{}) if updateErr != nil { err = utilerrors.NewAggregate([]error{err, updateErr}) return } }() if !ch.Status.Processing { return nil } genericIssuer, err := c.helper.GetGenericIssuer(ch.Spec.IssuerRef, ch.Namespace) if err != nil { return fmt.Errorf("error reading (cluster)issuer %q: %v", ch.Spec.IssuerRef.Name, err) } solver, err := c.solverFor(ch.Spec.Type) if err != nil { log.Error(err, "error getting solver for challenge") return nil } err = solver.CleanUp(ctx, genericIssuer, ch) if err != nil { c.recorder.Eventf(ch, corev1.EventTypeWarning, "CleanUpError", "Error cleaning up challenge: %v", err) ch.Status.Reason = err.Error() log.Error(err, "error cleaning up challenge") return nil } return nil } // syncChallengeStatus will communicate with the ACME server to retrieve the current // state of the Challenge. It will then update the Challenge's status block with the new // state of the Challenge. func (c *controller) syncChallengeStatus(ctx context.Context, cl acmecl.Interface, ch *cmacme.Challenge) error { if ch.Spec.URL == "" { return fmt.Errorf("challenge URL is blank - challenge has not been created yet") } acmeChallenge, err := cl.GetChallenge(ctx, ch.Spec.URL) if err != nil { return err } // TODO: should we validate the State returned by the ACME server here? cmState := cmacme.State(acmeChallenge.Status) // be nice to our users and check if there is an error that we // can tell them about in the reason field // TODO(dmo): problems may be compound and they may be tagged with // a type field that suggests changes we should make (like provisioning // an account). We might be able to handle errors more gracefully using // this info ch.Status.Reason = "" if acmeChallenge.Error != nil { if acmeErr, ok := acmeChallenge.Error.(*acmeapi.Error); ok { ch.Status.Reason = acmeErr.Detail } else { ch.Status.Reason = acmeChallenge.Error.Error() } } ch.Status.State = cmState return nil } // acceptChallenge will accept the challenge with the acme server and then wait // for the authorization to reach a 'final' state. // It will update the challenge's status to reflect the final state of the // challenge if it failed, or the final state of the challenge's authorization // if accepting the challenge succeeds. func (c *controller) acceptChallenge(ctx context.Context, cl acmecl.Interface, ch *cmacme.Challenge) error { log := logf.FromContext(ctx, "acceptChallenge") log.V(logf.DebugLevel).Info("accepting challenge with ACME server") // We manually construct an ACME challenge here from our own internal type // to save additional round trips to the ACME server. acmeChal := &acmeapi.Challenge{ URI: ch.Spec.URL, Token: ch.Spec.Token, } acmeChal, err := cl.Accept(ctx, acmeChal) if acmeChal != nil { ch.Status.State = cmacme.State(acmeChal.Status) } if err != nil { log.Error(err, "error accepting challenge") ch.Status.Reason = fmt.Sprintf("Error accepting challenge: %v", err) return handleError(ch, err) } log.V(logf.DebugLevel).Info("waiting for authorization for domain") authorization, err := cl.WaitAuthorization(ctx, ch.Spec.AuthorizationURL) if err != nil { log.Error(err, "error waiting for authorization") return c.handleAuthorizationError(ch, err) } ch.Status.State = cmacme.State(authorization.Status) ch.Status.Reason = "Successfully authorized domain" c.recorder.Eventf(ch, corev1.EventTypeNormal, reasonDomainVerified, "Domain %q verified with %q validation", ch.Spec.DNSName, ch.Spec.Type) return nil } func (c *controller) handleAuthorizationError(ch *cmacme.Challenge, err error) error { authErr, ok := err.(*acmeapi.AuthorizationError) if !ok { return handleError(ch, err) } // TODO: the AuthorizationError above could technically contain the final // state of the authorization in its raw JSON form. This isn't currently // exposed by the ACME client implementation, so for now we fix this to // 'invalid' if the returned type here is an AuthorizationError, which // should be safe as the client library only returns an AuthorizationError // if the returned state is 'invalid' ch.Status.State = cmacme.Invalid ch.Status.Reason = fmt.Sprintf("Error accepting authorization: %v", authErr) c.recorder.Eventf(ch, corev1.EventTypeWarning, "Failed", "Accepting challenge authorization failed: %v", authErr) // return nil here, as accepting the challenge did not error, the challenge // simply failed return nil } func (c *controller) solverFor(challengeType cmacme.ACMEChallengeType) (solver, error) { switch challengeType { case cmacme.ACMEChallengeTypeHTTP01: return c.httpSolver, nil case cmacme.ACMEChallengeTypeDNS01: return c.dnsSolver, nil } return nil, fmt.Errorf("no solver for %q implemented", challengeType) }
1
26,038
It's long-winded, but the convention is to give constants a common prefix which, see `reasonDomainVerified` .
jetstack-cert-manager
go
@@ -32,7 +32,7 @@ const constexpr double LOOKAHEAD_DISTANCE_WITHOUT_LANES = 10.0; // smaller widths, ranging from 2.5 to 3.25 meters. As a compromise, we use // the 3.25 here for our angle calculations const constexpr double ASSUMED_LANE_WIDTH = 3.25; -const constexpr double FAR_LOOKAHEAD_DISTANCE = 30.0; +const constexpr double FAR_LOOKAHEAD_DISTANCE = 20.0; // The count of lanes assumed when no lanes are present. Since most roads will have lanes for both // directions or a lane count specified, we use 2. Overestimating only makes our calculations safer,
1
#include "extractor/guidance/coordinate_extractor.hpp" #include "extractor/guidance/constants.hpp" #include "extractor/guidance/toolkit.hpp" #include <algorithm> #include <cstddef> #include <cstdint> #include <iomanip> #include <limits> #include <numeric> #include <tuple> #include <utility> #include <boost/range/algorithm/transform.hpp> namespace osrm { namespace extractor { namespace guidance { namespace { // to use the corrected coordinate, we require it to be at least a bit further down the // road than the offset coordinate. We postulate a minimum Distance of 2 Meters const constexpr double DESIRED_COORDINATE_DIFFERENCE = 2.0; // the default distance we lookahead on a road. This distance prevents small mapping // errors to impact the turn angles. const constexpr double LOOKAHEAD_DISTANCE_WITHOUT_LANES = 10.0; // The standard with of a interstate highway is 3.7 meters. Local roads have // smaller widths, ranging from 2.5 to 3.25 meters. As a compromise, we use // the 3.25 here for our angle calculations const constexpr double ASSUMED_LANE_WIDTH = 3.25; const constexpr double FAR_LOOKAHEAD_DISTANCE = 30.0; // The count of lanes assumed when no lanes are present. Since most roads will have lanes for both // directions or a lane count specified, we use 2. Overestimating only makes our calculations safer, // so we are fine for 1-lane ways. larger than 2 lanes should usually be specified in the data. const constexpr std::uint16_t ASSUMED_LANE_COUNT = 2; } CoordinateExtractor::CoordinateExtractor( const util::NodeBasedDynamicGraph &node_based_graph, const extractor::CompressedEdgeContainer &compressed_geometries, const std::vector<extractor::QueryNode> &node_coordinates) : node_based_graph(node_based_graph), compressed_geometries(compressed_geometries), node_coordinates(node_coordinates) { } util::Coordinate CoordinateExtractor::GetCoordinateAlongRoad(const NodeID intersection_node, const EdgeID turn_edge, const bool traversed_in_reverse, const NodeID to_node, const std::uint8_t intersection_lanes) const { const auto considered_lanes = (intersection_lanes == 0) ? ASSUMED_LANE_COUNT : intersection_lanes; // we first extract all coordinates from the road auto coordinates = GetCoordinatesAlongRoad(intersection_node, turn_edge, traversed_in_reverse, to_node); /* if we are looking at a straight line, we don't care where exactly the coordinate * is. Simply return the final coordinate. Turn angles/turn vectors are the same no matter which * coordinate we look at. */ if (coordinates.size() <= 2) return coordinates.back(); // fallback, mostly necessary for dead ends if (intersection_node == to_node) return TrimCoordinatesToLength(coordinates, 5).back(); const auto lookahead_distance = FAR_LOOKAHEAD_DISTANCE + considered_lanes * ASSUMED_LANE_WIDTH * 0.5; // reduce coordinates to the ones we care about coordinates = TrimCoordinatesToLength(std::move(coordinates), lookahead_distance); // If this reduction leaves us with only two coordinates, the turns/angles are represented in a // valid way. Only curved roads and other difficult scenarios will require multiple coordinates. if (coordinates.size() == 2) return coordinates.back(); const auto &turn_edge_data = node_based_graph.GetEdgeData(turn_edge); const util::Coordinate turn_coordinate = node_coordinates[traversed_in_reverse ? to_node : intersection_node]; // Low priority roads are usually modelled very strangely. The roads are so small, though, that // our basic heuristic looking at the road should be fine. if (turn_edge_data.road_classification.IsLowPriorityRoadClass()) { // Look ahead a tiny bit. Low priority road classes can be modelled fairly distinct in the // very first part of the road coordinates = TrimCoordinatesToLength(std::move(coordinates), 10); if (coordinates.size() > 2 && util::coordinate_calculation::haversineDistance(turn_coordinate, coordinates[1]) < ASSUMED_LANE_WIDTH) return GetCorrectedCoordinate(turn_coordinate, coordinates[1], coordinates.back()); else return coordinates.back(); } /* * The coordinates along the road are in different distances from the source. If only very few * coordinates are close to the intersection, It might just be we simply looked to far down the * road. We can decide to weight coordinates differently based on their distance from the * intersection. * In addition, changes very close to an intersection indicate graphical representation of the * intersection over perceived turn angles. * * a - * \ * -------------------- b * * Here the initial angle close to a might simply be due to OSM-Ways being located in the middle * of the actual roads. If a road splits in two, the ways for the separate direction can be * modeled very far apart with a steep angle at the split, even though the roads actually don't * take a turn. The distance between the coordinates can be an indicator for these small changes */ const auto segment_distances = [&coordinates]() { std::vector<double> segment_distances; segment_distances.reserve(coordinates.size()); // sentinel auto last_coordinate = coordinates.front(); boost::range::transform(coordinates, std::back_inserter(segment_distances), [&last_coordinate](const util::Coordinate current_coordinate) { const auto distance = util::coordinate_calculation::haversineDistance( last_coordinate, current_coordinate); last_coordinate = current_coordinate; return distance; }); return segment_distances; }(); /* if the very first coordinate along the road is reasonably far away from the road, we assume * the coordinate to correctly represent the turn. This could probably be improved using * information on the very first turn angle (requires knowledge about previous road) and the * respective lane widths. */ const bool first_coordinate_is_far_away = [&segment_distances, considered_lanes]() { const auto required_distance = considered_lanes * 0.5 * ASSUMED_LANE_WIDTH + LOOKAHEAD_DISTANCE_WITHOUT_LANES; return segment_distances[1] > required_distance; }(); if (first_coordinate_is_far_away) { return coordinates[1]; } const double max_deviation_from_straight = GetMaxDeviation( coordinates.begin(), coordinates.end(), coordinates.front(), coordinates.back()); // if the deviation from a straight line is small, we can savely use the coordinate. We use half // a lane as heuristic to determine if the road is straight enough. if (max_deviation_from_straight < 0.5 * ASSUMED_LANE_WIDTH) { return coordinates.back(); } /* * if a road turns barely in the beginning, it is similar to the first coordinate being * sufficiently far ahead. * possible negative: * http://www.openstreetmap.org/search?query=52.514503%2013.32252#map=19/52.51450/13.32252 */ const auto straight_distance_and_index = [&]() { auto straight_distance = segment_distances[1]; std::size_t index; for (index = 2; index < coordinates.size(); ++index) { // check the deviation from a straight line if (GetMaxDeviation(coordinates.begin(), coordinates.begin() + index, coordinates.front(), *(coordinates.begin() + index)) < 0.25 * ASSUMED_LANE_WIDTH) straight_distance += segment_distances[index]; else break; } return std::make_pair(index - 1, straight_distance); }(); const auto straight_distance = straight_distance_and_index.second; const auto straight_index = straight_distance_and_index.first; const bool starts_of_without_turn = [&]() { return straight_distance >= considered_lanes * 0.5 * ASSUMED_LANE_WIDTH + LOOKAHEAD_DISTANCE_WITHOUT_LANES; }(); if (starts_of_without_turn) { // skip over repeated coordinates return TrimCoordinatesToLength(std::move(coordinates), 5).back(); } // compute the regression vector based on the sum of least squares const auto regression_line = RegressionLine(coordinates); /* * If we can find a line that represents the full set of coordinates within a certain range in * relation to ASSUMED_LANE_WIDTH, we use the regression line to express the turn angle. * This yields a transformation similar to: * * c d d * b -> c * b * a a */ const double max_deviation_from_regression = GetMaxDeviation( coordinates.begin(), coordinates.end(), regression_line.first, regression_line.second); if (max_deviation_from_regression < 0.35 * ASSUMED_LANE_WIDTH) { // We use the locations on the regression line to offset the regression line onto the // intersection. const auto coord_between_front = util::coordinate_calculation::projectPointOnSegment( regression_line.first, regression_line.second, coordinates.front()) .second; const auto coord_between_back = util::coordinate_calculation::projectPointOnSegment( regression_line.first, regression_line.second, coordinates.back()) .second; return GetCorrectedCoordinate(turn_coordinate, coord_between_front, coord_between_back); } const auto total_distance = std::accumulate(segment_distances.begin(), segment_distances.end(), 0.); if (IsDirectOffset(coordinates, straight_index, straight_distance, total_distance, segment_distances, considered_lanes)) { // could be too agressive? Depend on lanes to check how far we want to go out? // compare // http://www.openstreetmap.org/search?query=52.411243%2013.363575#map=19/52.41124/13.36357 const auto offset_index = std::max<decltype(straight_index)>(1, straight_index); return GetCorrectedCoordinate( turn_coordinate, coordinates[offset_index], coordinates[offset_index + 1]); } if (IsCurve(coordinates, segment_distances, total_distance, considered_lanes * 0.5 * ASSUMED_LANE_WIDTH, turn_edge_data)) { /* * In curves we now have to distinguish between larger curves and tiny curves modelling the * actual turn in the beginnig. * * We distinguish between turns that simply model the initial way of getting onto the * destination lanes and the ones that performa a larger turn. */ const double offset = 0.5 * considered_lanes * ASSUMED_LANE_WIDTH; coordinates = TrimCoordinatesToLength(std::move(coordinates), offset); const auto vector_head = coordinates.back(); coordinates = TrimCoordinatesToLength(std::move(coordinates), offset); BOOST_ASSERT(coordinates.size() >= 2); return GetCorrectedCoordinate(turn_coordinate, coordinates.back(), vector_head); } { // skip over the first coordinates, in specific the assumed lane count. We add a small // safety factor, to not overshoot on the regression const auto trimming_length = 0.8 * (considered_lanes * ASSUMED_LANE_WIDTH); const auto trimmed_coordinates = TrimCoordinatesByLengthFront(coordinates, 0.8 * trimming_length); if (trimmed_coordinates.size() >= 2 && (total_distance >= trimming_length + 2)) { // get the regression line const auto regression_line_trimmed = RegressionLine(trimmed_coordinates); // and compute the maximum deviation from it const auto max_deviation_from_trimmed_regression = GetMaxDeviation(trimmed_coordinates.begin(), trimmed_coordinates.end(), regression_line_trimmed.first, regression_line_trimmed.second); if (max_deviation_from_trimmed_regression < 0.5 * ASSUMED_LANE_WIDTH) return GetCorrectedCoordinate( turn_coordinate, regression_line_trimmed.first, regression_line_trimmed.second); } } // We use the locations on the regression line to offset the regression line onto the // intersection. return TrimCoordinatesToLength(coordinates, LOOKAHEAD_DISTANCE_WITHOUT_LANES).back(); } std::vector<util::Coordinate> CoordinateExtractor::GetForwardCoordinatesAlongRoad(const NodeID from, const EdgeID turn_edge) const { return GetCoordinatesAlongRoad(from, turn_edge, false, node_based_graph.GetTarget(turn_edge)); } std::vector<util::Coordinate> CoordinateExtractor::GetCoordinatesAlongRoad(const NodeID intersection_node, const EdgeID turn_edge, const bool traversed_in_reverse, const NodeID to_node) const { if (!compressed_geometries.HasEntryForID(turn_edge)) { if (traversed_in_reverse) return {{node_coordinates[to_node]}, {node_coordinates[intersection_node]}}; else return {{node_coordinates[intersection_node]}, {node_coordinates[to_node]}}; } else { // extracts the geometry in coordinates from the compressed edge container std::vector<util::Coordinate> result; const auto &geometry = compressed_geometries.GetBucketReference(turn_edge); result.reserve(geometry.size() + 2); // the compressed edges contain node ids, we transfer them to coordinates accessing the // node_coordinates array const auto compressedGeometryToCoordinate = [this](const CompressedEdgeContainer::OnewayCompressedEdge &compressed_edge) -> util::Coordinate { return node_coordinates[compressed_edge.node_id]; }; // add the coordinates to the result in either normal or reversed order, based on // traversed_in_reverse if (traversed_in_reverse) { std::transform(geometry.rbegin(), geometry.rend(), std::back_inserter(result), compressedGeometryToCoordinate); result.push_back(node_coordinates[intersection_node]); } else { result.push_back(node_coordinates[intersection_node]); std::transform(geometry.begin(), geometry.end(), std::back_inserter(result), compressedGeometryToCoordinate); } return result; } } double CoordinateExtractor::GetMaxDeviation(std::vector<util::Coordinate>::const_iterator range_begin, const std::vector<util::Coordinate>::const_iterator &range_end, const util::Coordinate straight_begin, const util::Coordinate straight_end) const { // compute the deviation of a single coordinate from a straight line auto get_single_deviation = [&](const util::Coordinate coordinate) { // find the projected coordinate auto coord_between = util::coordinate_calculation::projectPointOnSegment( straight_begin, straight_end, coordinate) .second; // and calculate the distance between the intermediate coordinate and the coordinate // on the osrm-way return util::coordinate_calculation::haversineDistance(coord_between, coordinate); }; // note: we don't accumulate here but rather compute the maximum. The functor passed here is not // summing up anything. return std::accumulate( range_begin, range_end, 0.0, [&](const double current, const util::Coordinate coordinate) { return std::max(current, get_single_deviation(coordinate)); }); } bool CoordinateExtractor::IsCurve(const std::vector<util::Coordinate> &coordinates, const std::vector<double> &segment_distances, const double segment_length, const double considered_lane_width, const util::NodeBasedEdgeData &edge_data) const { BOOST_ASSERT(coordinates.size() > 2); // by default, we treat roundabout as curves if (edge_data.roundabout) return true; // TODO we might have to fix this to better compensate for errors due to repeated coordinates const bool takes_an_actual_turn = [&coordinates]() { const auto begin_bearing = util::coordinate_calculation::bearing(coordinates[0], coordinates[1]); const auto end_bearing = util::coordinate_calculation::bearing( coordinates[coordinates.size() - 2], coordinates[coordinates.size() - 1]); const auto total_angle = angularDeviation(begin_bearing, end_bearing); return total_angle > 0.5 * NARROW_TURN_ANGLE; }(); if (!takes_an_actual_turn) return false; const auto get_deviation = [](const util::Coordinate line_start, const util::Coordinate line_end, const util::Coordinate point) { // find the projected coordinate auto coord_between = util::coordinate_calculation::projectPointOnSegment(line_start, line_end, point).second; // and calculate the distance between the intermediate coordinate and the coordinate return util::coordinate_calculation::haversineDistance(coord_between, point); }; // a curve needs to be on one side of the coordinate array const bool all_same_side = [&]() { if (coordinates.size() <= 3) return true; const bool ccw = util::coordinate_calculation::isCCW( coordinates.front(), coordinates.back(), coordinates[1]); return std::all_of( coordinates.begin() + 2, coordinates.end() - 1, [&](const util::Coordinate coordinate) { const bool compare_ccw = util::coordinate_calculation::isCCW( coordinates.front(), coordinates.back(), coordinate); return ccw == compare_ccw; }); }(); if (!all_same_side) return false; // check if the deviation is a sequence that increases up to a maximum deviation and decreses // after, following what we would expect from a modelled curve bool has_up_down_deviation = false; std::size_t maximum_deviation_index = 0; double maximum_deviation = 0; std::tie(has_up_down_deviation, maximum_deviation_index, maximum_deviation) = [&coordinates, get_deviation]() -> std::tuple<bool, std::size_t, double> { const auto increasing = [&](const util::Coordinate lhs, const util::Coordinate rhs) { return get_deviation(coordinates.front(), coordinates.back(), lhs) <= get_deviation(coordinates.front(), coordinates.back(), rhs); }; const auto decreasing = [&](const util::Coordinate lhs, const util::Coordinate rhs) { return get_deviation(coordinates.front(), coordinates.back(), lhs) >= get_deviation(coordinates.front(), coordinates.back(), rhs); }; if (coordinates.size() < 3) return std::make_tuple(true, 0, 0.); if (coordinates.size() == 3) return std::make_tuple( true, 1, get_deviation(coordinates.front(), coordinates.back(), coordinates[1])); const auto maximum_itr = std::is_sorted_until(coordinates.begin() + 1, coordinates.end(), increasing); if (maximum_itr == coordinates.end()) return std::make_tuple(true, coordinates.size() - 1, 0.); else if (std::is_sorted(maximum_itr, coordinates.end(), decreasing)) return std::make_tuple( true, std::distance(coordinates.begin(), maximum_itr), get_deviation(coordinates.front(), coordinates.back(), *maximum_itr)); else return std::make_tuple(false, 0, 0.); }(); // a curve has increasing deviation from its front/back vertices to a certain point and after it // only decreases if (!has_up_down_deviation) return false; // if the maximum deviation is at a quarter of the total curve, we are probably looking at a // normal turn const auto distance_to_max_deviation = std::accumulate( segment_distances.begin(), segment_distances.begin() + maximum_deviation_index, 0.); if ((distance_to_max_deviation <= 0.35 * segment_length || maximum_deviation < std::max(0.3 * considered_lane_width, 0.5 * ASSUMED_LANE_WIDTH)) && segment_length > 10) return false; BOOST_ASSERT(coordinates.size() >= 3); // Compute all turn angles along the road const auto turn_angles = [coordinates]() { std::vector<double> turn_angles; turn_angles.reserve(coordinates.size() - 2); for (std::size_t index = 0; index + 2 < coordinates.size(); ++index) { turn_angles.push_back(util::coordinate_calculation::computeAngle( coordinates[index], coordinates[index + 1], coordinates[index + 2])); } return turn_angles; }(); const bool curve_is_valid = [&turn_angles, &segment_distances, &segment_length, &considered_lane_width]() { // internal state for our lamdae bool last_was_straight = false; // a turn angle represents two segments between three coordinates. We initialize the // distance with the very first segment length (in-segment) of the first turn-angle double straight_distance = std::max(0., segment_distances[1] - considered_lane_width); auto distance_itr = segment_distances.begin() + 1; // every call to the lamda requires a call to the distances. They need to be aligned BOOST_ASSERT(segment_distances.size() == turn_angles.size() + 2); const auto detect_invalid_curve = [&](const double previous_angle, const double current_angle) { const auto both_actually_turn = (angularDeviation(previous_angle, STRAIGHT_ANGLE) > FUZZY_ANGLE_DIFFERENCE) && (angularDeviation(current_angle, STRAIGHT_ANGLE) > FUZZY_ANGLE_DIFFERENCE); // they cannot be straight, since they differ at least by FUZZY_ANGLE_DIFFERENCE const auto turn_direction_switches = (previous_angle > STRAIGHT_ANGLE) == (current_angle < STRAIGHT_ANGLE); // a turn that switches direction mid-curve is not a valid curve if (both_actually_turn && turn_direction_switches) return true; const bool is_straight = angularDeviation(current_angle, STRAIGHT_ANGLE) < 5; ++distance_itr; if (is_straight) { // since the angle is straight, we augment it by the second part of the segment straight_distance += *distance_itr; if (last_was_straight && straight_distance > 0.3 * segment_length) { return true; } } // if a segment on its own is long enough, thats fair game as well else if (straight_distance > 0.3 * segment_length) return true; else { // we reset the last distance, starting with the next in-segment again straight_distance = *distance_itr; } last_was_straight = is_straight; return false; }; const auto end_of_straight_segment = std::adjacent_find(turn_angles.begin(), turn_angles.end(), detect_invalid_curve); // No curve should have a very long straight segment return end_of_straight_segment == turn_angles.end(); }(); return (segment_length > 2 * considered_lane_width && curve_is_valid); } bool CoordinateExtractor::IsDirectOffset(const std::vector<util::Coordinate> &coordinates, const std::size_t straight_index, const double straight_distance, const double segment_length, const std::vector<double> &segment_distances, const std::uint8_t considered_lanes) const { // check if a given length is with half a lane of the assumed lane offset const auto IsCloseToLaneDistance = [considered_lanes](const double width) { // a road usually is connected to the middle of the lanes. So the lane-offset has to // consider half to road const auto lane_offset = 0.5 * considered_lanes * ASSUMED_LANE_WIDTH; return std::abs(width - lane_offset) < 0.5 * ASSUMED_LANE_WIDTH; }; // Check whether the very first coordinate is simply an offset. This is the case if the initial // vertex is close to the turn and the remaining coordinates are nearly straight. const auto offset_index = std::max<decltype(straight_index)>(1, straight_index); // we need at least a single coordinate if (offset_index + 1 >= coordinates.size()) return false; // the straight part has to be around the lane distance if (!IsCloseToLaneDistance(segment_distances[offset_index])) return false; // the segment itself cannot be short if (segment_length < 0.8 * FAR_LOOKAHEAD_DISTANCE) return false; // if the remaining segment is short, we don't consider it an offset if ((segment_length - std::max(straight_distance, segment_distances[1])) > 0.1 * segment_length) return false; // finally, we cannot be far off from a straight line for the remaining coordinates return 0.5 * ASSUMED_LANE_WIDTH > GetMaxDeviation(coordinates.begin() + offset_index, coordinates.end(), coordinates[offset_index], coordinates.back()); } std::vector<util::Coordinate> CoordinateExtractor::TrimCoordinatesToLength(std::vector<util::Coordinate> coordinates, const double desired_length) const { BOOST_ASSERT(coordinates.size() >= 2); double distance_to_current_coordinate = 0; for (std::size_t coordinate_index = 1; coordinate_index < coordinates.size(); ++coordinate_index) { const auto distance_to_next_coordinate = distance_to_current_coordinate + util::coordinate_calculation::haversineDistance(coordinates[coordinate_index - 1], coordinates[coordinate_index]); // if we reached the number of coordinates, we can stop here if (distance_to_next_coordinate >= desired_length) { coordinates.resize(coordinate_index + 1); coordinates.back() = util::coordinate_calculation::interpolateLinear( ComputeInterpolationFactor( desired_length, distance_to_current_coordinate, distance_to_next_coordinate), coordinates[coordinate_index - 1], coordinates[coordinate_index]); break; } // remember the accumulated distance distance_to_current_coordinate = distance_to_next_coordinate; } if (coordinates.size() > 2 && util::coordinate_calculation::haversineDistance(coordinates[0], coordinates[1]) <= 1) coordinates.erase(coordinates.begin() + 1); BOOST_ASSERT(coordinates.size()); return coordinates; } util::Coordinate CoordinateExtractor::GetCorrectedCoordinate(const util::Coordinate fixpoint, const util::Coordinate vector_base, const util::Coordinate vector_head) const { // if the coordinates are close together, we were not able to look far ahead, so // we can use the end-coordinate if (util::coordinate_calculation::haversineDistance(vector_base, vector_head) < DESIRED_COORDINATE_DIFFERENCE) return vector_head; else { /* to correct for the initial offset, we move the lookahead coordinate close * to the original road. We do so by subtracting the difference between the * turn coordinate and the offset coordinate from the lookahead coordinge: * * a ------ b ------ c * | * d * \ * \ * e * * is converted to: * * a ------ b ------ c * \ * \ * e * * for turn node `b`, vector_base `d` and vector_head `e` */ const auto offset_percentage = 90; const auto corrected_lon = vector_head.lon - util::FixedLongitude{offset_percentage * static_cast<int>(vector_base.lon - fixpoint.lon) / 100}; const auto corrected_lat = vector_head.lat - util::FixedLatitude{offset_percentage * static_cast<int>(vector_base.lat - fixpoint.lat) / 100}; return util::Coordinate(corrected_lon, corrected_lat); } } std::vector<util::Coordinate> CoordinateExtractor::SampleCoordinates(const std::vector<util::Coordinate> &coordinates, const double max_sample_length, const double rate) const { BOOST_ASSERT(rate > 0 && coordinates.size() >= 2); // the return value std::vector<util::Coordinate> sampled_coordinates; sampled_coordinates.reserve(ceil(max_sample_length / rate) + 2); // the very first coordinate is always part of the sample sampled_coordinates.push_back(coordinates.front()); double carry_length = 0., total_length = 0.; // interpolate coordinates as long as we are not past the desired length const auto add_samples_until_length_limit = [&](const util::Coordinate previous_coordinate, const util::Coordinate current_coordinate) { // pretend to have found an element and stop the sampling if (total_length > max_sample_length) return true; const auto distance_between = util::coordinate_calculation::haversineDistance( previous_coordinate, current_coordinate); if (carry_length + distance_between >= rate) { // within the current segment, there is at least a single coordinate that we want to // sample. We extract all coordinates that are on our sampling intervals and update our // local sampling item to reflect the travelled distance const auto base_sampling = rate - carry_length; // the number of samples in the interval is equal to the length of the interval (+ the // already traversed part from the previous segment) divided by the sampling rate BOOST_ASSERT(max_sample_length > total_length); const std::size_t num_samples = std::floor( (std::min(max_sample_length - total_length, distance_between) + carry_length) / rate); for (std::size_t sample_value = 0; sample_value < num_samples; ++sample_value) { const auto interpolation_factor = ComputeInterpolationFactor( base_sampling + sample_value * rate, 0, distance_between); auto sampled_coordinate = util::coordinate_calculation::interpolateLinear( interpolation_factor, previous_coordinate, current_coordinate); sampled_coordinates.emplace_back(sampled_coordinate); } // current length needs to reflect how much is missing to the next sample. Here we can // ignore max sample range, because if we reached it, the loop is done anyhow carry_length = (distance_between + carry_length) - (num_samples * rate); } else { // do the necessary bookkeeping and continue carry_length += distance_between; } // the total length travelled is always updated by the full distance total_length += distance_between; return false; }; // misuse of adjacent_find. Loop over coordinates, until a total sample length is reached std::adjacent_find(coordinates.begin(), coordinates.end(), add_samples_until_length_limit); return sampled_coordinates; } double CoordinateExtractor::ComputeInterpolationFactor(const double desired_distance, const double distance_to_first, const double distance_to_second) const { BOOST_ASSERT(distance_to_first < desired_distance); double segment_length = distance_to_second - distance_to_first; BOOST_ASSERT(segment_length > 0); BOOST_ASSERT(distance_to_second >= desired_distance); double missing_distance = desired_distance - distance_to_first; return std::max(0., std::min(missing_distance / segment_length, 1.0)); } std::vector<util::Coordinate> CoordinateExtractor::TrimCoordinatesByLengthFront(std::vector<util::Coordinate> coordinates, const double desired_length) const { double distance_to_index = 0; std::size_t index = 0; for (std::size_t next_index = 1; next_index < coordinates.size(); ++next_index) { const double next_distance = distance_to_index + util::coordinate_calculation::haversineDistance( coordinates[index], coordinates[next_index]); if (next_distance >= desired_length) { const auto factor = ComputeInterpolationFactor(desired_length, distance_to_index, next_distance); auto interpolated_coordinate = util::coordinate_calculation::interpolateLinear( factor, coordinates[index], coordinates[next_index]); if (index > 0) coordinates.erase(coordinates.begin(), coordinates.begin() + index); coordinates.front() = interpolated_coordinate; return coordinates; } distance_to_index = next_distance; index = next_index; } // the coordinates in total are too short in length for the desired length // this part is only reached when we don't return from within the above loop coordinates.clear(); return coordinates; } std::pair<util::Coordinate, util::Coordinate> CoordinateExtractor::RegressionLine(const std::vector<util::Coordinate> &coordinates) const { // create a sample of all coordinates to improve the quality of our regression vector // (less dependent on modelling of the data in OSM) const auto sampled_coordinates = SampleCoordinates(coordinates, FAR_LOOKAHEAD_DISTANCE, 1); BOOST_ASSERT(!coordinates.empty()); if (sampled_coordinates.size() < 2) // less than 1 meter in length return {coordinates.front(), coordinates.back()}; // compute the regression vector based on the sum of least squares const auto regression_line = leastSquareRegression(sampled_coordinates); const auto coord_between_front = util::coordinate_calculation::projectPointOnSegment( regression_line.first, regression_line.second, coordinates.front()) .second; const auto coord_between_back = util::coordinate_calculation::projectPointOnSegment( regression_line.first, regression_line.second, coordinates.back()) .second; return {coord_between_front, coord_between_back}; } } // namespace guidance } // namespace extractor } // namespace osrm
1
18,773
Using lanes later down, this threshold could be reduced for similar effects. Otherwise we look a bit to far.
Project-OSRM-osrm-backend
cpp
@@ -78,6 +78,12 @@ typedef enum { CONTAINER_MODE_UMBRELLA } container_mode_t; +struct environment { + char *tarball; /* Conda environemnt as produced by conda-pack. */ + char *expansion; /* Directory in cache/. Only set when environemnt has been expanded.*/ + int error; /* Whether the expansion had an error. 1 for yes, 0 for no. */ +}; + #define DOCKER_WORK_DIR "/home/worker" // In single shot mode, immediately quit when disconnected.
1
/* Copyright (C) 2008- The University of Notre Dame This software is distributed under the GNU General Public License. See the file COPYING for details. */ #include "work_queue.h" #include "work_queue_protocol.h" #include "work_queue_internal.h" #include "work_queue_resources.h" #include "work_queue_process.h" #include "work_queue_catalog.h" #include "work_queue_watcher.h" #include "cctools.h" #include "macros.h" #include "catalog_query.h" #include "domain_name_cache.h" #include "jx.h" #include "copy_stream.h" #include "host_memory_info.h" #include "host_disk_info.h" #include "path_disk_size_info.h" #include "hash_cache.h" #include "link.h" #include "link_auth.h" #include "list.h" #include "xxmalloc.h" #include "debug.h" #include "stringtools.h" #include "path.h" #include "load_average.h" #include "getopt.h" #include "getopt_aux.h" #include "create_dir.h" #include "delete_dir.h" #include "itable.h" #include "random.h" #include "url_encode.h" #include "md5.h" #include "disk_alloc.h" #include "hash_table.h" #include "pattern.h" #include "gpu_info.h" #include <unistd.h> #include <dirent.h> #include <fcntl.h> #include <assert.h> #include <errno.h> #include <limits.h> #include <math.h> #include <signal.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <time.h> #include <poll.h> #include <signal.h> #include <sys/mman.h> #include <sys/resource.h> #include <sys/stat.h> #include <sys/types.h> #include <sys/utsname.h> #include <sys/wait.h> typedef enum { WORKER_MODE_WORKER, WORKER_MODE_FOREMAN } worker_mode_t; typedef enum { CONTAINER_MODE_NONE, CONTAINER_MODE_DOCKER, CONTAINER_MODE_DOCKER_PRESERVE, CONTAINER_MODE_UMBRELLA } container_mode_t; #define DOCKER_WORK_DIR "/home/worker" // In single shot mode, immediately quit when disconnected. // Useful for accelerating the test suite. static int single_shot_mode = 0; // Maximum time to stay connected to a single master without any work. static int idle_timeout = 900; // Current time at which we will give up if no work is received. static time_t idle_stoptime = 0; // Current time at which we will give up if no master is found. static time_t connect_stoptime = 0; // Maximum time to attempt connecting to all available masters before giving up. static int connect_timeout = 900; // Maximum time to attempt sending/receiving any given file or message. static const int active_timeout = 3600; // Maximum time for the foreman to spend waiting in its internal loop static const int foreman_internal_timeout = 5; // Initial value for backoff interval (in seconds) when worker fails to connect to a master. static int init_backoff_interval = 1; // Maximum value for backoff interval (in seconds) when worker fails to connect to a master. static int max_backoff_interval = 60; // Chance that a worker will decide to shut down each minute without warning, to simulate failure. static double worker_volatility = 0.0; // If flag is set, then the worker proceeds to immediately cleanup and shut down. // This can be set by Ctrl-C or by any condition that prevents further progress. static int abort_flag = 0; // Record the signal received, to inform the master if appropiate. static int abort_signal_received = 0; // Flag used to indicate a child must be waited for. static int sigchld_received_flag = 0; // Threshold for available memory, and disk space (MB) beyond which clean up and quit. static int64_t disk_avail_threshold = 100; static int64_t memory_avail_threshold = 100; // Password shared between master and worker. char *password = 0; // Allow worker to use symlinks when link() fails. Enabled by default. static int symlinks_enabled = 1; // Worker id. A unique id for this worker instance. static char *worker_id; static worker_mode_t worker_mode = WORKER_MODE_WORKER; static container_mode_t container_mode = CONTAINER_MODE_NONE; static int load_from_tar = 0; struct master_address { char host[DOMAIN_NAME_MAX]; int port; char addr[DOMAIN_NAME_MAX]; }; struct list *master_addresses; struct master_address *current_master_address; static char *workspace; static char *os_name = NULL; static char *arch_name = NULL; static char *user_specified_workdir = NULL; static time_t worker_start_time = 0; static struct work_queue_watcher * watcher = 0; static struct work_queue_resources * local_resources = 0; static struct work_queue_resources * total_resources = 0; static struct work_queue_resources * total_resources_last = 0; static int64_t last_task_received = 0; static int64_t manual_cores_option = 0; static int64_t manual_disk_option = 0; static int64_t manual_memory_option = 0; static int64_t manual_gpus_option = 0; static time_t manual_wall_time_option = 0; static int64_t cores_allocated = 0; static int64_t memory_allocated = 0; static int64_t disk_allocated = 0; static int64_t gpus_allocated = 0; // Allow worker to use disk_alloc loop devices for task sandbox. Disabled by default. static int disk_allocation = 0; static int64_t files_counted = 0; static int check_resources_interval = 5; static int max_time_on_measurement = 3; static struct work_queue *foreman_q = NULL; // docker image name static char *img_name = NULL; static char *container_name = NULL; static char *tar_fn = NULL; // Table of all processes in any state, indexed by taskid. // Processes should be created/deleted when added/removed from this table. static struct itable *procs_table = NULL; // Table of all processes currently running, indexed by pid. // These are additional pointers into procs_table. static struct itable *procs_running = NULL; // List of all procs that are waiting to be run. // These are additional pointers into procs_table. static struct list *procs_waiting = NULL; // Table of all processes with results to be sent back, indexed by taskid. // These are additional pointers into procs_table. static struct itable *procs_complete = NULL; //User specified features this worker provides. static struct hash_table *features = NULL; static int results_to_be_sent_msg = 0; static timestamp_t total_task_execution_time = 0; static int total_tasks_executed = 0; static const char *project_regex = 0; static int released_by_master = 0; __attribute__ (( format(printf,2,3) )) static void send_master_message( struct link *master, const char *fmt, ... ) { char debug_msg[2*WORK_QUEUE_LINE_MAX]; va_list va; va_list debug_va; va_start(va,fmt); string_nformat(debug_msg, sizeof(debug_msg), "tx to master: %s", fmt); va_copy(debug_va, va); vdebug(D_WQ, debug_msg, debug_va); link_putvfstring(master, fmt, time(0)+active_timeout, va); va_end(va); } static int recv_master_message( struct link *master, char *line, int length, time_t stoptime ) { int result = link_readline(master,line,length,stoptime); if(result) debug(D_WQ,"rx from master: %s",line); return result; } /* We track how much time has elapsed since the master assigned a task. If time(0) > idle_stoptime, then the worker will disconnect. */ void reset_idle_timer() { idle_stoptime = time(0) + idle_timeout; } /* Measure the disk used by the worker. We only manually measure the cache directory, as processes measure themselves. */ int64_t measure_worker_disk() { static struct path_disk_size_info *state = NULL; path_disk_size_info_get_r("./cache", max_time_on_measurement, &state); int64_t disk_measured = 0; if(state->last_byte_size_complete >= 0) { disk_measured = (int64_t) ceil(state->last_byte_size_complete/(1.0*MEGA)); } files_counted = state->last_file_count_complete; if(state->complete_measurement) { /* if a complete measurement has been done, then update * for the found value, and add the known values of the processes. */ struct work_queue_process *p; uint64_t taskid; itable_firstkey(procs_table); while(itable_nextkey(procs_table,&taskid,(void**)&p)) { if(p->sandbox_size > 0) { disk_measured += p->sandbox_size; files_counted += p->sandbox_file_count; } } } return disk_measured; } /* Measure only the resources associated with this particular node and apply any operations that override. */ void measure_worker_resources() { static time_t last_resources_measurement = 0; if(time(0) < last_resources_measurement + check_resources_interval) { return; } struct work_queue_resources *r = local_resources; work_queue_resources_measure_locally(r,workspace); if(worker_mode == WORKER_MODE_FOREMAN) { aggregate_workers_resources(foreman_q, total_resources, features); } else { if(manual_cores_option > 0) r->cores.total = manual_cores_option; if(manual_memory_option) r->memory.total = manual_memory_option; if(manual_gpus_option) r->gpus.total = manual_gpus_option; } if(manual_disk_option) r->disk.total = MIN(r->disk.total, manual_disk_option); r->cores.smallest = r->cores.largest = r->cores.total; r->memory.smallest = r->memory.largest = r->memory.total; r->disk.smallest = r->disk.largest = r->disk.total; r->gpus.smallest = r->gpus.largest = r->gpus.total; r->disk.inuse = measure_worker_disk(); r->tag = last_task_received; if(worker_mode == WORKER_MODE_FOREMAN) { total_resources->disk.total = r->disk.total; total_resources->disk.inuse = r->disk.inuse; total_resources->tag = last_task_received; } else { /* in a regular worker, total and local resources are the same. */ memcpy(total_resources, r, sizeof(struct work_queue_resources)); } last_resources_measurement = time(0); } /* Send a message to the master with user defined features. */ static void send_features(struct link *master) { char *f; void *dummy; hash_table_firstkey(features); char fenc[WORK_QUEUE_LINE_MAX]; while(hash_table_nextkey(features, &f, &dummy)) { url_encode(f, fenc, WORK_QUEUE_LINE_MAX); send_master_message(master, "feature %s\n", fenc); } } /* Send a message to the master with my current resources. */ static void send_resource_update(struct link *master) { time_t stoptime = time(0) + active_timeout; if(worker_mode == WORKER_MODE_FOREMAN) { total_resources->disk.total = local_resources->disk.total - disk_avail_threshold; total_resources->disk.inuse = local_resources->disk.inuse; } else { total_resources->memory.total = MAX(0, local_resources->memory.total - memory_avail_threshold); total_resources->memory.largest = MAX(0, local_resources->memory.largest - memory_avail_threshold); total_resources->memory.smallest = MAX(0, local_resources->memory.smallest - memory_avail_threshold); total_resources->disk.total = MAX(0, local_resources->disk.total - disk_avail_threshold); total_resources->disk.largest = MAX(0, local_resources->disk.largest - disk_avail_threshold); total_resources->disk.smallest = MAX(0, local_resources->disk.smallest - disk_avail_threshold); } work_queue_resources_send(master,total_resources,stoptime); send_master_message(master, "info end_of_resource_update %d\n", 0); } /* Send a message to the master with my current statistics information. */ static void send_stats_update(struct link *master) { if(worker_mode == WORKER_MODE_FOREMAN) { struct work_queue_stats s; work_queue_get_stats_hierarchy(foreman_q, &s); send_master_message(master, "info workers_joined %lld\n", (long long) s.workers_joined); send_master_message(master, "info workers_removed %lld\n", (long long) s.workers_removed); send_master_message(master, "info workers_released %lld\n", (long long) s.workers_released); send_master_message(master, "info workers_idled_out %lld\n", (long long) s.workers_idled_out); send_master_message(master, "info workers_fast_aborted %lld\n", (long long) s.workers_fast_aborted); send_master_message(master, "info workers_blacklisted %lld\n", (long long) s.workers_blacklisted); send_master_message(master, "info workers_lost %lld\n", (long long) s.workers_lost); send_master_message(master, "info tasks_waiting %lld\n", (long long) s.tasks_waiting); send_master_message(master, "info tasks_on_workers %lld\n", (long long) s.tasks_on_workers); send_master_message(master, "info tasks_running %lld\n", (long long) s.tasks_running); send_master_message(master, "info tasks_waiting %lld\n", (long long) list_size(procs_waiting)); send_master_message(master, "info tasks_with_results %lld\n", (long long) s.tasks_with_results); send_master_message(master, "info time_send %lld\n", (long long) s.time_send); send_master_message(master, "info time_receive %lld\n", (long long) s.time_receive); send_master_message(master, "info time_send_good %lld\n", (long long) s.time_send_good); send_master_message(master, "info time_receive_good %lld\n", (long long) s.time_receive_good); send_master_message(master, "info time_workers_execute %lld\n", (long long) s.time_workers_execute); send_master_message(master, "info time_workers_execute_good %lld\n", (long long) s.time_workers_execute_good); send_master_message(master, "info time_workers_execute_exhaustion %lld\n", (long long) s.time_workers_execute_exhaustion); send_master_message(master, "info bytes_sent %lld\n", (long long) s.bytes_sent); send_master_message(master, "info bytes_received %lld\n", (long long) s.bytes_received); } else { send_master_message(master, "info tasks_running %lld\n", (long long) itable_size(procs_running)); } } static int send_keepalive(struct link *master, int force_resources){ send_master_message(master, "alive\n"); /* for regular workers we only send resources on special ocassions, thus * the force_resources. */ if(force_resources || worker_mode == WORKER_MODE_FOREMAN) { send_resource_update(master); } send_stats_update(master); return 1; } /* Send the initial "ready" message to the master with the version and so forth. The master will not start sending tasks until this message is recevied. */ static void report_worker_ready( struct link *master ) { char hostname[DOMAIN_NAME_MAX]; domain_name_cache_guess(hostname); send_master_message(master,"workqueue %d %s %s %s %d.%d.%d\n",WORK_QUEUE_PROTOCOL_VERSION,hostname,os_name,arch_name,CCTOOLS_VERSION_MAJOR,CCTOOLS_VERSION_MINOR,CCTOOLS_VERSION_MICRO); send_master_message(master, "info worker-id %s\n", worker_id); send_features(master); send_keepalive(master, 1); } const char *skip_dotslash( const char *s ) { while(!strncmp(s,"./",2)) s+=2; return s; } /* Link a file from one place to another. If a hard link doesn't work, use a symbolic link. If it is a directory, do it recursively. */ int link_recursive( const char *source, const char *target ) { struct stat info; if(lstat(source,&info)<0) return 0; if(S_ISDIR(info.st_mode)) { DIR *dir = opendir(source); if(!dir) return 0; mkdir(target, 0777); struct dirent *d; int result = 1; while((d = readdir(dir))) { if(!strcmp(d->d_name,".")) continue; if(!strcmp(d->d_name,"..")) continue; char *subsource = string_format("%s/%s",source,d->d_name); char *subtarget = string_format("%s/%s",target,d->d_name); result = link_recursive(subsource,subtarget); free(subsource); free(subtarget); if(!result) break; } closedir(dir); return result; } else { if(link(source, target)==0) return 1; /* If the hard link failed, perhaps because the source was a directory, or if hard links are not supported in that file system, fall back to a symlink. */ if(symlinks_enabled) { /* Use an absolute path when symlinking, otherwise the link will be accidentally relative to the current directory. */ char *cwd = path_getcwd(); char *absolute_source = string_format("%s/%s", cwd, source); int result = symlink(absolute_source, target); free(absolute_source); free(cwd); if(result==0) return 1; } return 0; } } /* Start executing the given process on the local host, accounting for the resources as necessary. */ static int start_process( struct work_queue_process *p ) { pid_t pid; if (container_mode == CONTAINER_MODE_DOCKER) pid = work_queue_process_execute(p, container_mode, img_name); else if (container_mode == CONTAINER_MODE_DOCKER_PRESERVE) pid = work_queue_process_execute(p, container_mode, container_name); else pid = work_queue_process_execute(p, container_mode); if(pid<0) fatal("unable to fork process for taskid %d!",p->task->taskid); itable_insert(procs_running,pid,p); struct work_queue_task *t = p->task; cores_allocated += t->resources_requested->cores; memory_allocated += t->resources_requested->memory; disk_allocated += t->resources_requested->disk; gpus_allocated += t->resources_requested->gpus; return 1; } /* Transmit the results of the given process to the master. If a local worker, stream the output from disk. If a foreman, send the outputs contained in the task structure. */ static void report_task_complete( struct link *master, struct work_queue_process *p ) { int64_t output_length; struct stat st; if(worker_mode==WORKER_MODE_WORKER) { fstat(p->output_fd, &st); output_length = st.st_size; lseek(p->output_fd, 0, SEEK_SET); send_master_message(master, "result %d %d %lld %llu %d\n", p->task_status, p->exit_status, (long long) output_length, (unsigned long long) p->execution_end-p->execution_start, p->task->taskid); link_stream_from_fd(master, p->output_fd, output_length, time(0)+active_timeout); total_task_execution_time += (p->execution_end - p->execution_start); total_tasks_executed++; } else { struct work_queue_task *t = p->task; if(t->output) { output_length = strlen(t->output); } else { output_length = 0; } send_master_message(master, "result %d %d %lld %llu %d\n", t->result, t->return_status, (long long) output_length, (unsigned long long) t->time_workers_execute_last, t->taskid); if(output_length) { link_putlstring(master, t->output, output_length, time(0)+active_timeout); } total_task_execution_time += t->time_workers_execute_last; total_tasks_executed++; } send_stats_update(master); } /* Remove one item from an itable, ignoring the key */ static void * itable_pop(struct itable *t ) { uint64_t key; void *value; itable_firstkey(t); if(itable_nextkey(t, &key, (void*)&value)) { return itable_remove(t,key); } else { return 0; } } /* For every unreported complete task and watched file, send the results to the master. */ static void report_tasks_complete( struct link *master ) { struct work_queue_process *p; while((p=itable_pop(procs_complete))) { report_task_complete(master,p); } work_queue_watcher_send_changes(watcher,master,time(0)+active_timeout); send_master_message(master, "end\n"); results_to_be_sent_msg = 0; } static void expire_procs_running() { struct work_queue_process *p; uint64_t pid; timestamp_t current_time = timestamp_get(); itable_firstkey(procs_running); while(itable_nextkey(procs_running, (uint64_t*)&pid, (void**)&p)) { if(p->task->resources_requested->end > 0 && current_time > (uint64_t) p->task->resources_requested->end) { p->task_status = WORK_QUEUE_RESULT_TASK_TIMEOUT; kill(pid, SIGKILL); } } } /* Scan over all of the processes known by the worker, and if they have exited, move them into the procs_complete table for later processing. */ static int handle_tasks(struct link *master) { struct work_queue_process *p; pid_t pid; int status; itable_firstkey(procs_running); while(itable_nextkey(procs_running, (uint64_t*)&pid, (void**)&p)) { int result = wait4(pid, &status, WNOHANG, &p->rusage); if(result==0) { // pid is still going } else if(result<0) { debug(D_WQ, "wait4 on pid %d returned an error: %s",pid,strerror(errno)); } else if(result>0) { if (!WIFEXITED(status)){ p->exit_status = WTERMSIG(status); debug(D_WQ, "task %d (pid %d) exited abnormally with signal %d",p->task->taskid,p->pid,p->exit_status); } else { p->exit_status = WEXITSTATUS(status); FILE *loop_full_check; char *buf = malloc(PATH_MAX); char *pwd = getcwd(buf, PATH_MAX); char *disk_alloc_filename = work_queue_generate_disk_alloc_full_filename(pwd, p->task->taskid); if(p->loop_mount == 1 && (loop_full_check = fopen(disk_alloc_filename, "r"))) { p->task_status = WORK_QUEUE_RESULT_DISK_ALLOC_FULL; p->task->disk_allocation_exhausted = 1; fclose(loop_full_check); unlink(disk_alloc_filename); } free(buf); free(disk_alloc_filename); debug(D_WQ, "task %d (pid %d) exited normally with exit code %d",p->task->taskid,p->pid,p->exit_status); } p->execution_end = timestamp_get(); cores_allocated -= p->task->resources_requested->cores; memory_allocated -= p->task->resources_requested->memory; disk_allocated -= p->task->resources_requested->disk; gpus_allocated -= p->task->resources_requested->gpus; itable_remove(procs_running, p->pid); itable_firstkey(procs_running); // Output files must be moved back into the cache directory. struct work_queue_file *f; list_first_item(p->task->output_files); while((f = list_next_item(p->task->output_files))) { char *sandbox_name = string_format("%s/%s",p->sandbox,f->remote_name); debug(D_WQ,"moving output file from %s to %s",sandbox_name,f->payload); /* First we try a cheap rename. It that does not work, we try to copy the file. */ if(rename(sandbox_name,f->payload) == -1) { debug(D_WQ, "could not rename output file %s to %s: %s",sandbox_name,f->payload,strerror(errno)); if(copy_file_to_file(sandbox_name, f->payload) == -1) { debug(D_WQ, "could not copy output file %s to %s: %s",sandbox_name,f->payload,strerror(errno)); } } free(sandbox_name); } itable_insert(procs_complete, p->task->taskid, p); } } return 1; } /** * Stream file/directory contents for the recursive get/put protocol. * Format: * for a directory: a new line in the format of "dir $DIR_NAME 0" * for a file: a new line in the format of "file $FILE_NAME $FILE_LENGTH" * then file contents. * string "end" at the end of the stream (on a new line). * * Example: * Assume we have the following directory structure: * mydir * -- 1.txt * -- 2.txt * -- mysubdir * -- a.txt * -- b.txt * -- z.jpg * * The stream contents would be: * * dir mydir 0 * file 1.txt $file_len * $$ FILE 1.txt's CONTENTS $$ * file 2.txt $file_len * $$ FILE 2.txt's CONTENTS $$ * dir mysubdir 0 * file mysubdir/a.txt $file_len * $$ FILE mysubdir/a.txt's CONTENTS $$ * file mysubdir/b.txt $file_len * $$ FILE mysubdir/b.txt's CONTENTS $$ * file z.jpg $file_len * $$ FILE z.jpg's CONTENTS $$ * end * */ static int stream_output_item(struct link *master, const char *filename, int recursive) { DIR *dir; struct dirent *dent; char dentline[WORK_QUEUE_LINE_MAX]; char cached_filename[WORK_QUEUE_LINE_MAX]; struct stat info; int64_t actual, length; int fd; string_nformat(cached_filename, sizeof(cached_filename), "cache/%s", filename); if(stat(cached_filename, &info) != 0) { goto failure; } if(S_ISDIR(info.st_mode)) { // stream a directory dir = opendir(cached_filename); if(!dir) { goto failure; } send_master_message(master, "dir %s 0\n", filename); while(recursive && (dent = readdir(dir))) { if(!strcmp(dent->d_name, ".") || !strcmp(dent->d_name, "..")) continue; string_nformat(dentline, sizeof(dentline), "%s/%s", filename, dent->d_name); stream_output_item(master, dentline, recursive); } closedir(dir); } else { // stream a file fd = open(cached_filename, O_RDONLY, 0); if(fd >= 0) { length = info.st_size; send_master_message(master, "file %s %"PRId64"\n", filename, length ); actual = link_stream_from_fd(master, fd, length, time(0) + active_timeout); close(fd); if(actual != length) { debug(D_WQ, "Sending back output file - %s failed: bytes to send = %"PRId64" and bytes actually sent = %"PRId64".", filename, length, actual); return 0; } } else { goto failure; } } return 1; failure: send_master_message(master, "missing %s %d\n", filename, errno); return 0; } /* For each of the files and directories needed by a task, link them into the sandbox. Return true if successful. */ int setup_sandbox( struct work_queue_process *p ) { struct work_queue_file *f; list_first_item(p->task->input_files); while((f = list_next_item(p->task->input_files))) { char *sandbox_name = string_format("%s/%s",skip_dotslash(p->sandbox),f->remote_name); int result = 0; // remote name may contain relative path components, so create them in advance create_dir_parents(sandbox_name,0777); if(f->type == WORK_QUEUE_DIRECTORY) { debug(D_WQ,"creating directory %s",sandbox_name); result = create_dir(sandbox_name, 0700); if(!result) debug(D_WQ,"couldn't create directory %s: %s", sandbox_name, strerror(errno)); } else { debug(D_WQ,"linking %s to %s",f->payload,sandbox_name); result = link_recursive(skip_dotslash(f->payload),skip_dotslash(sandbox_name)); if(!result) { if(errno==EEXIST) { // XXX silently ignore the case where the target file exists. // This happens when masters apps map the same input file twice, or to the same name. // Would be better to reject this at the master instead. result = 1; } else { debug(D_WQ,"couldn't link %s into sandbox as %s: %s",f->payload,sandbox_name,strerror(errno)); } } } free(sandbox_name); if(!result) return 0; } return 1; } /* For a task run locally, if the resources are all set to -1, then assume that the task occupies all worker resources. Otherwise, just make sure all values are non-zero. */ static void normalize_resources( struct work_queue_process *p ) { struct work_queue_task *t = p->task; if(t->resources_requested->cores < 0 && t->resources_requested->memory < 0 && t->resources_requested->disk < 0 && t->resources_requested->gpus < 0) { t->resources_requested->cores = local_resources->cores.total; t->resources_requested->memory = local_resources->memory.total; t->resources_requested->disk = local_resources->disk.total; t->resources_requested->gpus = local_resources->gpus.total; } else { t->resources_requested->cores = MAX(t->resources_requested->cores, 0); t->resources_requested->memory = MAX(t->resources_requested->memory, 0); t->resources_requested->disk = MAX(t->resources_requested->disk, 0); t->resources_requested->gpus = MAX(t->resources_requested->gpus, 0); } } /* Handle an incoming task message from the master. Generate a work_queue_process wrapped around a work_queue_task, and deposit it into the waiting list or the foreman_q as appropriate. */ static int do_task( struct link *master, int taskid, time_t stoptime ) { char line[WORK_QUEUE_LINE_MAX]; char filename[WORK_QUEUE_LINE_MAX]; char localname[WORK_QUEUE_LINE_MAX]; char taskname[WORK_QUEUE_LINE_MAX]; char taskname_encoded[WORK_QUEUE_LINE_MAX]; char category[WORK_QUEUE_LINE_MAX]; int flags, length; int64_t n; int disk_alloc = disk_allocation; timestamp_t nt; struct work_queue_task *task = work_queue_task_create(0); task->taskid = taskid; while(recv_master_message(master,line,sizeof(line),stoptime)) { if(!strcmp(line,"end")) { break; } else if(sscanf(line, "category %s",category)) { work_queue_task_specify_category(task, category); } else if(sscanf(line,"cmd %d",&length)==1) { char *cmd = malloc(length+1); link_read(master,cmd,length,stoptime); cmd[length] = 0; work_queue_task_specify_command(task,cmd); debug(D_WQ,"rx from master: %s",cmd); free(cmd); } else if(sscanf(line,"infile %s %s %d", filename, taskname_encoded, &flags)) { string_nformat(localname, sizeof(localname), "cache/%s", filename); url_decode(taskname_encoded, taskname, WORK_QUEUE_LINE_MAX); work_queue_task_specify_file(task, localname, taskname, WORK_QUEUE_INPUT, flags); } else if(sscanf(line,"outfile %s %s %d", filename, taskname_encoded, &flags)) { string_nformat(localname, sizeof(localname), "cache/%s", filename); url_decode(taskname_encoded, taskname, WORK_QUEUE_LINE_MAX); work_queue_task_specify_file(task, localname, taskname, WORK_QUEUE_OUTPUT, flags); } else if(sscanf(line, "dir %s", filename)) { work_queue_task_specify_directory(task, filename, filename, WORK_QUEUE_INPUT, 0700, 0); } else if(sscanf(line,"cores %" PRId64,&n)) { work_queue_task_specify_cores(task, n); } else if(sscanf(line,"memory %" PRId64,&n)) { work_queue_task_specify_memory(task, n); } else if(sscanf(line,"disk %" PRId64,&n)) { work_queue_task_specify_disk(task, n); } else if(sscanf(line,"gpus %" PRId64,&n)) { work_queue_task_specify_gpus(task, n); } else if(sscanf(line,"wall_time %" PRIu64,&nt)) { work_queue_task_specify_running_time(task, nt); } else if(sscanf(line,"end_time %" PRIu64,&nt)) { work_queue_task_specify_end_time(task, nt); } else if(sscanf(line,"env %d",&length)==1) { char *env = malloc(length+2); /* +2 for \n and \0 */ link_read(master, env, length+1, stoptime); env[length] = 0; /* replace \n with \0 */ char *value = strchr(env,'='); if(value) { *value = 0; value++; work_queue_task_specify_environment_variable(task,env,value); } free(env); } else { debug(D_WQ|D_NOTICE,"invalid command from master: %s",line); return 0; } } last_task_received = task->taskid; struct work_queue_process *p = work_queue_process_create(task, disk_alloc); if(!p) { return 0; } // Every received task goes into procs_table. itable_insert(procs_table,taskid,p); if(worker_mode==WORKER_MODE_FOREMAN) { work_queue_submit_internal(foreman_q,task); } else { // XXX sandbox setup should be done in task execution, // so that it can be returned cleanly as a failure to execute. if(!setup_sandbox(p)) { itable_remove(procs_table,taskid); work_queue_process_delete(p); return 0; } normalize_resources(p); list_push_tail(procs_waiting,p); } work_queue_watcher_add_process(watcher,p); return 1; } /* Return false if name is invalid as a simple filename. For example, if it contains a slash, which would escape the current working directory. */ int is_valid_filename( const char *name ) { if(strchr(name,'/')) return 0; return 1; } /* Handle an incoming symbolic link inside the rput protocol. The filename of the symlink was already given in the message, and the target of the symlink is given as the "body" which must be read off of the wire. The symlink target does not need to be url_decoded because it is sent in the body. */ static int do_put_symlink_internal( struct link *master, char *filename, int length ) { char *target = malloc(length); int actual = link_read(master,target,length,time(0)+active_timeout); if(actual!=length) { free(target); return 0; } int result = symlink(target,filename); if(result<0) { debug(D_WQ,"could not create symlink %s: %s",filename,strerror(errno)); free(target); return 0; } free(target); return 1; } /* Handle an incoming file inside the rput protocol. Notice that we trust the caller to have created the necessary parent directories and checked the name for validity. */ static int do_put_file_internal( struct link *master, char *filename, int64_t length, int mode ) { if(!check_disk_space_for_filesize(".", length, disk_avail_threshold)) { debug(D_WQ, "Could not put file %s, not enough disk space (%"PRId64" bytes needed)\n", filename, length); return 0; } /* Ensure that worker can access the file! */ mode = mode | 0600; int fd = open(filename, O_WRONLY | O_CREAT | O_TRUNC, mode); if(fd<0) { debug(D_WQ, "Could not open %s for writing. (%s)\n", filename, strerror(errno)); return 0; } int64_t actual = link_stream_to_fd(master, fd, length, time(0) + active_timeout); close(fd); if(actual!=length) { debug(D_WQ, "Failed to put file - %s (%s)\n", filename, strerror(errno)); return 0; } return 1; } /* Handle an incoming directory inside the recursive dir protocol. Notice that we have already checked the dirname for validity, and now we process "put" and "dir" commands within the list until "end" is reached. Note that "put" is used instead of "file" for historical reasons, to support recursive reuse of existing code. */ static int do_put_dir_internal( struct link *master, char *dirname ) { char line[WORK_QUEUE_LINE_MAX]; char name_encoded[WORK_QUEUE_LINE_MAX]; char name[WORK_QUEUE_LINE_MAX]; int64_t size; int mode; int result = mkdir(dirname,0777); if(result<0) { debug(D_WQ,"unable to create %s: %s",dirname,strerror(errno)); return 0; } while(1) { if(!recv_master_message(master,line,sizeof(line),time(0)+active_timeout)) return 0; int r = 0; if(sscanf(line,"put %s %" SCNd64 " %o",name_encoded,&size,&mode)==3) { url_decode(name_encoded,name,sizeof(name)); if(!is_valid_filename(name)) return 0; char *subname = string_format("%s/%s",dirname,name); r = do_put_file_internal(master,subname,size,mode); free(subname); } else if(sscanf(line,"symlink %s %" SCNd64,name_encoded,&size)==2) { url_decode(name_encoded,name,sizeof(name)); if(!is_valid_filename(name)) return 0; char *subname = string_format("%s/%s",dirname,name); r = do_put_symlink_internal(master,subname,size); free(subname); } else if(sscanf(line,"dir %s",name_encoded)==1) { url_decode(name_encoded,name,sizeof(name)); if(!is_valid_filename(name)) return 0; char *subname = string_format("%s/%s",dirname,name); r = do_put_dir_internal(master,subname); free(subname); } else if(!strcmp(line,"end")) { break; } if(!r) return 0; } return 1; } static int do_put_dir( struct link *master, char *dirname ) { if(!is_valid_filename(dirname)) return 0; char * cachename = string_format("cache/%s",dirname); int result = do_put_dir_internal(master,cachename); free(cachename); return result; } /* This is the old method for sending a single file. It works, but it has the deficiency that the master expects the worker to create all parent directories for the file, which is horrifically expensive when sending a large directory tree. The direction put protocol (above) is preferred instead. */ static int do_put_single_file( struct link *master, char *filename, int64_t length, int mode ) { if(!path_within_dir(filename, workspace)) { debug(D_WQ, "Path - %s is not within workspace %s.", filename, workspace); return 0; } char * cached_filename = string_format("cache/%s",filename); if(strchr(filename,'/')) { char dirname[WORK_QUEUE_LINE_MAX]; path_dirname(filename,dirname); if(!create_dir(dirname,0777)) { debug(D_WQ, "could not create directory %s: %s",dirname,strerror(errno)); free(cached_filename); return 0; } } int result = do_put_file_internal(master,cached_filename,length,mode); free(cached_filename); return result; } static int file_from_url(const char *url, const char *filename) { debug(D_WQ, "Retrieving %s from (%s)\n", filename, url); char command[WORK_QUEUE_LINE_MAX]; string_nformat(command, sizeof(command), "curl -f -o \"%s\" \"%s\"", filename, url); if (system(command) == 0) { debug(D_WQ, "Success, file retrieved from %s\n", url); } else { debug(D_WQ, "Failed to retrieve file from %s\n", url); return 0; } return 1; } static int do_url(struct link* master, const char *filename, int length, int mode) { char url[WORK_QUEUE_LINE_MAX]; link_read(master, url, length, time(0) + active_timeout); char cache_name[WORK_QUEUE_LINE_MAX]; string_nformat(cache_name, sizeof(cache_name), "cache/%s", filename); return file_from_url(url, cache_name); } static int do_unlink(const char *path) { char cached_path[WORK_QUEUE_LINE_MAX]; string_nformat(cached_path, sizeof(cached_path), "cache/%s", path); if(!path_within_dir(cached_path, workspace)) { debug(D_WQ, "%s is not within workspace %s",cached_path,workspace); return 0; } //Use delete_dir() since it calls unlink() if path is a file. if(delete_dir(cached_path) != 0) { struct stat buf; if(stat(cached_path, &buf) != 0) { if(errno == ENOENT) { // If the path does not exist, return success return 1; } } // Failed to do unlink return 0; } return 1; } static int do_get(struct link *master, const char *filename, int recursive) { stream_output_item(master, filename, recursive); send_master_message(master, "end\n"); return 1; } static int do_thirdget(int mode, char *filename, const char *path) { char cmd[WORK_QUEUE_LINE_MAX]; char cached_filename[WORK_QUEUE_LINE_MAX]; char *cur_pos; if(mode != WORK_QUEUE_FS_CMD) { struct stat info; if(stat(path, &info) != 0) { debug(D_WQ, "Path %s not accessible. (%s)\n", path, strerror(errno)); return 0; } if(!strcmp(filename, path)) { debug(D_WQ, "thirdget aborted: filename (%s) and path (%s) are the same\n", filename, path); return 1; } } cur_pos = filename; while(!strncmp(cur_pos, "./", 2)) { cur_pos += 2; } string_nformat(cached_filename, sizeof(cached_filename), "cache/%s", cur_pos); cur_pos = strrchr(cached_filename, '/'); if(cur_pos) { *cur_pos = '\0'; if(!create_dir(cached_filename, mode | 0700)) { debug(D_WQ, "Could not create directory - %s (%s)\n", cached_filename, strerror(errno)); return 0; } *cur_pos = '/'; } switch (mode) { case WORK_QUEUE_FS_SYMLINK: if(symlink(path, cached_filename) != 0) { debug(D_WQ, "Could not thirdget %s, symlink (%s) failed. (%s)\n", filename, path, strerror(errno)); return 0; } /* falls through */ case WORK_QUEUE_FS_PATH: string_nformat(cmd, sizeof(cmd), "/bin/cp %s %s", path, cached_filename); if(system(cmd) != 0) { debug(D_WQ, "Could not thirdget %s, copy (%s) failed. (%s)\n", filename, path, strerror(errno)); return 0; } break; case WORK_QUEUE_FS_CMD: string_nformat(cmd, sizeof(cmd), "%s > %s", path, cached_filename); if(system(cmd) != 0) { debug(D_WQ, "Could not thirdget %s, command (%s) failed. (%s)\n", filename, cmd, strerror(errno)); return 0; } break; } return 1; } static int do_thirdput(struct link *master, int mode, char *filename, const char *path) { struct stat info; char cmd[WORK_QUEUE_LINE_MAX]; char cached_filename[WORK_QUEUE_LINE_MAX]; char *cur_pos; int result = 1; cur_pos = filename; while(!strncmp(cur_pos, "./", 2)) { cur_pos += 2; } string_nformat(cached_filename, sizeof(cached_filename), "cache/%s", cur_pos); if(stat(cached_filename, &info) != 0) { debug(D_WQ, "File %s not accessible. (%s)\n", cached_filename, strerror(errno)); result = 0; } switch (mode) { case WORK_QUEUE_FS_SYMLINK: case WORK_QUEUE_FS_PATH: if(!strcmp(filename, path)) { debug(D_WQ, "thirdput aborted: filename (%s) and path (%s) are the same\n", filename, path); result = 1; } cur_pos = strrchr(path, '/'); if(cur_pos) { *cur_pos = '\0'; if(!create_dir(path, mode | 0700)) { debug(D_WQ, "Could not create directory - %s (%s)\n", path, strerror(errno)); result = 0; *cur_pos = '/'; break; } *cur_pos = '/'; } string_nformat(cmd, sizeof(cmd), "/bin/cp -r %s %s", cached_filename, path); if(system(cmd) != 0) { debug(D_WQ, "Could not thirdput %s, copy (%s) failed. (%s)\n", cached_filename, path, strerror(errno)); result = 0; } break; case WORK_QUEUE_FS_CMD: string_nformat(cmd, sizeof(cmd), "%s < %s", path, cached_filename); if(system(cmd) != 0) { debug(D_WQ, "Could not thirdput %s, command (%s) failed. (%s)\n", filename, cmd, strerror(errno)); result = 0; } break; } send_master_message(master, "thirdput-complete %d\n", result); return result; } /* do_kill removes a process currently known by the worker. Note that a kill message from the master is used for every case where a task is to be removed, whether it is waiting, running, of finished. Regardless of the state, we kill the process and remove all of the associated files and other state. */ static int do_kill(int taskid) { struct work_queue_process *p; p = itable_remove(procs_table, taskid); if(!p) { debug(D_WQ,"master requested kill of task %d which does not exist!",taskid); return 1; } if(worker_mode == WORKER_MODE_FOREMAN) { work_queue_cancel_by_taskid(foreman_q, taskid); } else { if(itable_remove(procs_running, p->pid)) { work_queue_process_kill(p); cores_allocated -= p->task->resources_requested->cores; memory_allocated -= p->task->resources_requested->memory; disk_allocated -= p->task->resources_requested->disk; gpus_allocated -= p->task->resources_requested->gpus; } } itable_remove(procs_complete, p->task->taskid); list_remove(procs_waiting,p); work_queue_watcher_remove_process(watcher,p); work_queue_process_delete(p); return 1; } /* Kill off all known tasks by iterating over the complete procs_table and calling do_kill. This should result in all empty procs_* structures and zero resources allocated. If this failed to bring the system back to a fresh state, then we need to abort to clean things up. */ static void kill_all_tasks() { struct work_queue_process *p; uint64_t taskid; itable_firstkey(procs_table); while(itable_nextkey(procs_table,&taskid,(void**)&p)) { do_kill(taskid); } assert(itable_size(procs_table)==0); assert(itable_size(procs_running)==0); assert(itable_size(procs_complete)==0); assert(list_size(procs_waiting)==0); assert(cores_allocated==0); assert(memory_allocated==0); assert(disk_allocated==0); assert(gpus_allocated==0); debug(D_WQ,"all data structures are clean"); } /* Remove a file, even when mark as cached. Foreman broadcast this message to * foremen down its hierarchy. It is invalid for a worker to receice this message. */ static int do_invalidate_file(const char *filename) { if(worker_mode == WORKER_MODE_FOREMAN) { work_queue_invalidate_cached_file_internal(foreman_q, filename); return 1; } return -1; } static void finish_running_task(struct work_queue_process *p, work_queue_result_t result) { p->task_status |= result; kill(p->pid, SIGKILL); } static void finish_running_tasks(work_queue_result_t result) { struct work_queue_process *p; pid_t pid; itable_firstkey(procs_running); while(itable_nextkey(procs_running, (uint64_t*) &pid, (void**)&p)) { finish_running_task(p, result); } } static int enforce_process_limits(struct work_queue_process *p) { /* If the task did not specify disk usage, return right away. */ if(p->disk < 1) return 1; work_queue_process_measure_disk(p, max_time_on_measurement); if(p->sandbox_size > p->task->resources_requested->disk) { debug(D_WQ,"Task %d went over its disk size limit: %" PRId64 " MB > %" PRIu64 " MB\n", p->task->taskid, p->sandbox_size, p->task->resources_requested->disk); return 0; } return 1; } static int enforce_processes_limits() { static time_t last_check_time = 0; struct work_queue_process *p; pid_t pid; int ok = 1; /* Do not check too often, as it is expensive (particularly disk) */ if((time(0) - last_check_time) < check_resources_interval ) return 1; itable_firstkey(procs_table); while(itable_nextkey(procs_table,(uint64_t*)&pid,(void**)&p)) { if(!enforce_process_limits(p)) { finish_running_task(p, WORK_QUEUE_RESULT_RESOURCE_EXHAUSTION); /* we delete the sandbox, to free the exhausted resource. If a loop device is used, use remove loop device*/ if(p->loop_mount == 1) { disk_alloc_delete(p->sandbox); } else { delete_dir(p->sandbox); } ok = 0; } } last_check_time = time(0); return ok; } /* We check maximum_running_time by itself (not in enforce_processes_limits), * as other running tasks should not be affected by a task timeout. */ static void enforce_processes_max_running_time() { struct work_queue_process *p; pid_t pid; timestamp_t now = timestamp_get(); itable_firstkey(procs_running); while(itable_nextkey(procs_running, (uint64_t*) &pid, (void**) &p)) { /* If the task did not specify wall_time, return right away. */ if(p->task->resources_requested->wall_time < 1) continue; if(now < p->execution_start + p->task->resources_requested->wall_time) { debug(D_WQ,"Task %d went over its running time limit: %" PRId64 " us > %" PRIu64 " us\n", p->task->taskid, now - p->execution_start, p->task->resources_requested->wall_time); p->task_status = WORK_QUEUE_RESULT_TASK_MAX_RUN_TIME; kill(pid, SIGKILL); } } return; } static int do_release() { debug(D_WQ, "released by master %s:%d.\n", current_master_address->addr, current_master_address->port); released_by_master = 1; return 0; } static void disconnect_master(struct link *master) { debug(D_WQ, "disconnecting from master %s:%d", current_master_address->addr, current_master_address->port); link_close(master); debug(D_WQ, "killing all outstanding tasks"); kill_all_tasks(); //KNOWN HACK: We remove all workers on a master disconnection to avoid //returning old tasks to a new master. if(foreman_q) { debug(D_WQ, "Disconnecting all workers...\n"); release_all_workers(foreman_q); if(project_regex) { update_catalog(foreman_q, master, 1); } } if(released_by_master) { released_by_master = 0; } else if(abort_flag) { // Bail out quickly } else { sleep(5); } } static int handle_master(struct link *master) { char line[WORK_QUEUE_LINE_MAX]; char filename_encoded[WORK_QUEUE_LINE_MAX]; char filename[WORK_QUEUE_LINE_MAX]; char path[WORK_QUEUE_LINE_MAX]; int64_t length; int64_t taskid = 0; int mode, r, n; if(recv_master_message(master, line, sizeof(line), idle_stoptime )) { if(sscanf(line,"task %" SCNd64, &taskid)==1) { r = do_task(master, taskid,time(0)+active_timeout); } else if(sscanf(line,"put %s %"SCNd64" %o",filename_encoded,&length,&mode)==3) { url_decode(filename_encoded,filename,sizeof(filename)); r = do_put_single_file(master, filename, length, mode); reset_idle_timer(); } else if(sscanf(line, "dir %s", filename_encoded)==1) { url_decode(filename_encoded,filename,sizeof(filename)); r = do_put_dir(master,filename); reset_idle_timer(); } else if(sscanf(line, "url %s %" SCNd64 " %o", filename, &length, &mode) == 3) { r = do_url(master, filename, length, mode); reset_idle_timer(); } else if(sscanf(line, "unlink %s", filename_encoded) == 1) { url_decode(filename_encoded,filename,sizeof(filename)); r = do_unlink(filename); } else if(sscanf(line, "get %s %d", filename_encoded, &mode) == 2) { url_decode(filename_encoded,filename,sizeof(filename)); r = do_get(master, filename, mode); } else if(sscanf(line, "thirdget %o %s %[^\n]", &mode, filename_encoded, path) == 3) { url_decode(filename_encoded,filename,sizeof(filename)); r = do_thirdget(mode, filename, path); } else if(sscanf(line, "thirdput %o %s %[^\n]", &mode, filename_encoded, path) == 3) { url_decode(filename_encoded,filename,sizeof(filename)); r = do_thirdput(master, mode, filename, path); reset_idle_timer(); } else if(sscanf(line, "kill %" SCNd64, &taskid) == 1) { if(taskid >= 0) { r = do_kill(taskid); } else { kill_all_tasks(); r = 1; } } else if(sscanf(line, "invalidate-file %s", filename_encoded) == 1) { url_decode(filename_encoded,filename,sizeof(filename)); r = do_invalidate_file(filename); } else if(!strncmp(line, "release", 8)) { r = do_release(); } else if(!strncmp(line, "exit", 5)) { work_queue_broadcast_message(foreman_q, "exit\n"); abort_flag = 1; r = 1; } else if(!strncmp(line, "check", 6)) { r = send_keepalive(master, 0); } else if(!strncmp(line, "auth", 4)) { fprintf(stderr,"work_queue_worker: this master requires a password. (use the -P option)\n"); r = 0; } else if(sscanf(line, "send_results %d", &n) == 1) { report_tasks_complete(master); r = 1; } else { debug(D_WQ, "Unrecognized master message: %s.\n", line); r = 0; } } else { debug(D_WQ, "Failed to read from master.\n"); r = 0; } return r; } /* Return true if this task can run with the resources currently available. */ static int task_resources_fit_now(struct work_queue_task *t) { return (cores_allocated + t->resources_requested->cores <= local_resources->cores.total) && (memory_allocated + t->resources_requested->memory <= local_resources->memory.total) && (disk_allocated + t->resources_requested->disk <= local_resources->disk.total) && (gpus_allocated + t->resources_requested->gpus <= local_resources->gpus.total); } /* Return true if this task can eventually run with the resources available. For example, this is needed for when the worker is launched without the --memory option, and the free available memory of the system is consumed by some other process. */ static int task_resources_fit_eventually(struct work_queue_task *t) { struct work_queue_resources *r; if(worker_mode == WORKER_MODE_FOREMAN) { r = total_resources; } else { r = local_resources; } return (t->resources_requested->cores <= r->cores.largest) && (t->resources_requested->memory <= r->memory.largest) && (t->resources_requested->disk <= r->disk.largest) && (t->resources_requested->gpus <= r->gpus.largest); } void forsake_waiting_process(struct link *master, struct work_queue_process *p) { /* the task cannot run in this worker */ p->task_status = WORK_QUEUE_RESULT_FORSAKEN; itable_insert(procs_complete, p->task->taskid, p); debug(D_WQ, "Waiting task %d has been forsaken.", p->task->taskid); /* we also send updated resources to the master. */ send_keepalive(master, 1); } /* If 0, the worker is using more resources than promised. 1 if resource usage holds that promise. */ static int enforce_worker_limits(struct link *master) { if( manual_wall_time_option > 0 && (time(0) - worker_start_time) > manual_wall_time_option) { fprintf(stderr,"work_queue_worker: reached the wall time limit %"PRIu64" s\n", (uint64_t)manual_wall_time_option); if(master) { send_master_message(master, "info wall_time_exhausted %"PRIu64"\n", (uint64_t)manual_wall_time_option); } return 0; } if( manual_disk_option > 0 && local_resources->disk.inuse > (manual_disk_option - disk_avail_threshold/2) ) { fprintf(stderr,"work_queue_worker: %s used more than declared disk space (--disk - --disk-threshold < disk used) %"PRIu64" - %"PRIu64 " < %"PRIu64" MB\n", workspace, manual_disk_option, disk_avail_threshold, local_resources->disk.inuse); if(master) { send_master_message(master, "info disk_exhausted %lld\n", (long long) local_resources->disk.inuse); } return 0; } if( manual_memory_option > 0 && local_resources->memory.inuse > (manual_memory_option - memory_avail_threshold/2) ) { fprintf(stderr,"work_queue_worker: used more than declared memory (--memory - --memory-threshold < memory used) %"PRIu64" - %"PRIu64 " < %"PRIu64" MB\n", manual_memory_option, memory_avail_threshold, local_resources->memory.inuse); if(master) { send_master_message(master, "info memory_exhausted %lld\n", (long long) local_resources->memory.inuse); } return 0; } return 1; } /* If 0, the worker has less resources than promised. 1 otherwise. */ static int enforce_worker_promises(struct link *master) { if( manual_disk_option > 0 && local_resources->disk.total < manual_disk_option) { fprintf(stderr,"work_queue_worker: has less than the promised disk space (--disk > disk total) %"PRIu64" < %"PRIu64" MB\n", manual_disk_option, local_resources->disk.total); if(master) { send_master_message(master, "info disk_error %lld\n", (long long) local_resources->disk.total); } return 0; } return 1; } static void work_for_master(struct link *master) { sigset_t mask; debug(D_WQ, "working for master at %s:%d.\n", current_master_address->addr, current_master_address->port); sigemptyset(&mask); sigaddset(&mask, SIGCHLD); sigaddset(&mask, SIGTERM); sigaddset(&mask, SIGQUIT); sigaddset(&mask, SIGINT); sigaddset(&mask, SIGUSR1); sigaddset(&mask, SIGUSR2); reset_idle_timer(); time_t volatile_stoptime = time(0) + 60; // Start serving masters while(!abort_flag) { if(time(0) > idle_stoptime) { debug(D_NOTICE, "disconnecting from %s:%d because I did not receive any task in %d seconds (--idle-timeout).\n", current_master_address->addr,current_master_address->port,idle_timeout); send_master_message(master, "info idle-disconnecting %lld\n", (long long) idle_timeout); break; } if(worker_volatility && time(0) > volatile_stoptime) { if( (double)rand()/(double)RAND_MAX < worker_volatility) { debug(D_NOTICE, "work_queue_worker: disconnect from master due to volatility check.\n"); break; } else { volatile_stoptime = time(0) + 60; } } /* link_usleep will cause the worker to sleep for a time until interrupted by a SIGCHILD signal. However, the signal could have been delivered while we were outside of the wait function, setting sigchld_received_flag. In that case, do not block but proceed with the There is a still a (very small) race condition in that the signal could be received between the check and link_usleep, hence a maximum wait time of five seconds is enforced. */ int wait_msec = 5000; if(sigchld_received_flag) { wait_msec = 0; sigchld_received_flag = 0; } int master_activity = link_usleep_mask(master, wait_msec*1000, &mask, 1, 0); if(master_activity < 0) break; int ok = 1; if(master_activity) { ok &= handle_master(master); } expire_procs_running(); ok &= handle_tasks(master); measure_worker_resources(); if(!enforce_worker_promises(master)) { abort_flag = 1; break; } enforce_processes_max_running_time(); /* end a running processes if goes above its declared limits. * Mark offending process as RESOURCE_EXHASTION. */ enforce_processes_limits(); /* end running processes if worker resources are exhasusted, and marked * them as FORSAKEN, so they can be resubmitted somewhere else. */ if(!enforce_worker_limits(master)) { finish_running_tasks(WORK_QUEUE_RESULT_FORSAKEN); // finish all tasks, disconnect from master, but don't kill the worker (no abort_flag = 1) break; } int task_event = 0; if(ok) { struct work_queue_process *p; int visited; int waiting = list_size(procs_waiting); for(visited = 0; visited < waiting; visited++) { p = list_pop_head(procs_waiting); if(!p) { break; } else if(task_resources_fit_now(p->task)) { start_process(p); task_event++; } else if(task_resources_fit_eventually(p->task)) { list_push_tail(procs_waiting, p); } else { forsake_waiting_process(master, p); task_event++; } } } if(task_event > 0) { send_stats_update(master); } if(ok && !results_to_be_sent_msg) { if(work_queue_watcher_check(watcher) || itable_size(procs_complete) > 0) { send_master_message(master, "available_results\n"); results_to_be_sent_msg = 1; } } if(!ok) { break; } //Reset idle_stoptime if something interesting is happening at this worker. if(list_size(procs_waiting) > 0 || itable_size(procs_table) > 0 || itable_size(procs_complete) > 0) { reset_idle_timer(); } } } static void foreman_for_master(struct link *master) { int master_active = 0; if(!master) { return; } debug(D_WQ, "working for master at %s:%d as foreman.\n", current_master_address->addr, current_master_address->port); reset_idle_timer(); int prev_num_workers = 0; while(!abort_flag) { int result = 1; struct work_queue_task *task = NULL; if(time(0) > idle_stoptime && work_queue_empty(foreman_q)) { debug(D_NOTICE, "giving up because did not receive any task in %d seconds.\n", idle_timeout); send_master_message(master, "info idle-disconnecting %lld\n", (long long) idle_timeout); break; } measure_worker_resources(); /* if the number of workers changed by more than %10, send an status update */ int curr_num_workers = total_resources->workers.total; if(10*abs(curr_num_workers - prev_num_workers) > prev_num_workers) { send_keepalive(master, 0); } prev_num_workers = curr_num_workers; task = work_queue_wait_internal(foreman_q, foreman_internal_timeout, master, &master_active); if(task) { struct work_queue_process *p; p = itable_lookup(procs_table,task->taskid); if(!p) fatal("no entry in procs table for taskid %d",task->taskid); itable_insert(procs_complete, task->taskid, p); result = 1; } if(!results_to_be_sent_msg && itable_size(procs_complete) > 0) { send_master_message(master, "available_results\n"); results_to_be_sent_msg = 1; } if(master_active) { result &= handle_master(master); reset_idle_timer(); } if(!result) break; } } /* workspace_create is done once when the worker starts. */ static int workspace_create() { char absolute[WORK_QUEUE_LINE_MAX]; // Setup working space(dir) const char *workdir; const char *workdir_tmp; if (user_specified_workdir) { workdir = user_specified_workdir; } else if((workdir_tmp = getenv("_CONDOR_SCRATCH_DIR")) && access(workdir_tmp, R_OK|W_OK|X_OK) == 0) { workdir = workdir_tmp; } else if((workdir_tmp = getenv("TMPDIR")) && access(workdir_tmp, R_OK|W_OK|X_OK) == 0) { workdir = workdir_tmp; } else if((workdir_tmp = getenv("TEMP")) && access(workdir_tmp, R_OK|W_OK|X_OK) == 0) { workdir = workdir_tmp; } else if((workdir_tmp = getenv("TMP")) && access(workdir_tmp, R_OK|W_OK|X_OK) == 0) { workdir = workdir_tmp; } else { workdir = "/tmp"; } if(!workspace) { workspace = string_format("%s/worker-%d-%d", workdir, (int) getuid(), (int) getpid()); } printf( "work_queue_worker: creating workspace %s\n", workspace); if(!create_dir(workspace,0777)) { return 0; } path_absolute(workspace, absolute, 1); free(workspace); workspace = xxstrdup(absolute); return 1; } /* Create a test script and try to execute. With this we check the scratch directory allows file execution. */ static int workspace_check() { int error = 0; /* set 1 on error */ char *fname = string_format("%s/test.sh", workspace); FILE *file = fopen(fname, "w"); if(!file) { warn(D_NOTICE, "Could not write to %s", workspace); error = 1; } else { fprintf(file, "#!/bin/sh\nexit 0\n"); fclose(file); chmod(fname, 0755); int exit_status = system(fname); if(WIFEXITED(exit_status) && WEXITSTATUS(exit_status) == 126) { /* Note that we do not set status=1 on 126, as the executables may live ouside workspace. */ warn(D_NOTICE, "Could not execute a test script in the workspace directory '%s'.", workspace); warn(D_NOTICE, "Is the filesystem mounted as 'noexec'?\n"); warn(D_NOTICE, "Unless the task command is an absolute path, the task will fail with exit status 126.\n"); } else if(!WIFEXITED(exit_status) || WEXITSTATUS(exit_status) != 0) { error = 1; } } unlink(fname); free(fname); if(error) { warn(D_NOTICE, "The workspace %s could not be used.\n", workspace); warn(D_NOTICE, "Use the --workdir command line switch to change where the workspace is created.\n"); } return !error; } /* workspace_prepare is called every time we connect to a new master, */ static int workspace_prepare() { debug(D_WQ,"preparing workspace %s",workspace); char *cachedir = string_format("%s/cache",workspace); int result = create_dir(cachedir,0777); free(cachedir); char *tmp_name = string_format("%s/cache/tmp", workspace); result |= create_dir(tmp_name,0777); setenv("WORKER_TMPDIR", tmp_name, 1); free(tmp_name); return result; } /* workspace_cleanup is called every time we disconnect from a master, to remove any state left over from a previous run. */ static void workspace_cleanup() { debug(D_WQ,"cleaning workspace %s",workspace); delete_dir_contents(workspace); } /* workspace_delete is called when the worker is about to exit, so that all files are removed. XXX the cleanup of internal data structures doesn't quite belong here. */ static void workspace_delete() { if(user_specified_workdir) free(user_specified_workdir); if(os_name) free(os_name); if(arch_name) free(arch_name); if(foreman_q) work_queue_delete(foreman_q); if(procs_running) itable_delete(procs_running); if(procs_table) itable_delete(procs_table); if(procs_complete) itable_delete(procs_complete); if(procs_waiting) list_delete(procs_waiting); if(watcher) work_queue_watcher_delete(watcher); printf( "work_queue_worker: deleting workspace %s\n", workspace); delete_dir(workspace); free(workspace); } static int serve_master_by_hostport( const char *host, int port, const char *verify_project ) { if(!domain_name_cache_lookup(host,current_master_address->addr)) { fprintf(stderr,"couldn't resolve hostname %s",host); return 0; } /* For the preliminary steps of password and project verification, we use the idle timeout, because we have not yet been assigned any work and should leave if the master is not responsive. It is tempting to use a short timeout here, but DON'T. The name and password messages are ayncronous; if the master is busy handling other workers, a short window is not enough for a response to come back. */ reset_idle_timer(); struct link *master = link_connect(current_master_address->addr,port,idle_stoptime); if(!master) { fprintf(stderr,"couldn't connect to %s:%d: %s\n",current_master_address->addr,port,strerror(errno)); return 0; } link_tune(master,LINK_TUNE_INTERACTIVE); char local_addr[LINK_ADDRESS_MAX]; int local_port; link_address_local(master, local_addr, &local_port); printf("connected to master %s:%d via local address %s:%d\n", host, port, local_addr, local_port); debug(D_WQ, "connected to master %s:%d via local address %s:%d", host, port, local_addr, local_port); if(password) { debug(D_WQ,"authenticating to master"); if(!link_auth_password(master,password,idle_stoptime)) { fprintf(stderr,"work_queue_worker: wrong password for master %s:%d\n",host,port); link_close(master); return 0; } } if(verify_project) { char line[WORK_QUEUE_LINE_MAX]; debug(D_WQ, "verifying master's project name"); send_master_message(master, "name\n"); if(!recv_master_message(master,line,sizeof(line),idle_stoptime)) { debug(D_WQ,"no response from master while verifying name"); link_close(master); return 0; } if(strcmp(line,verify_project)) { fprintf(stderr, "work_queue_worker: master has project %s instead of %s\n", line, verify_project); link_close(master); return 0; } } workspace_prepare(); measure_worker_resources(); report_worker_ready(master); if(worker_mode == WORKER_MODE_FOREMAN) { foreman_for_master(master); } else { work_for_master(master); } if(abort_signal_received) { send_master_message(master, "info vacating %d\n", abort_signal_received); } last_task_received = 0; results_to_be_sent_msg = 0; workspace_cleanup(); disconnect_master(master); printf("disconnected from master %s:%d\n", host, port ); return 1; } int serve_master_by_hostport_list(struct list *master_addresses) { int result = 0; /* keep trying masters in the list, until all master addresses * are tried, or a succesful connection was done */ list_first_item(master_addresses); while((current_master_address = list_next_item(master_addresses))) { result = serve_master_by_hostport(current_master_address->host,current_master_address->port,0); if(result) { break; } } return result; } static struct list *interfaces_to_list(const char *addr, int port, struct jx *ifas) { struct list *l = list_create(); struct jx *ifa; int found_canonical = 0; for (void *i = NULL; (ifa = jx_iterate_array(ifas, &i));) { const char *ifa_addr = jx_lookup_string(ifa, "host"); if(ifa_addr && strcmp(addr, ifa_addr) == 0) { found_canonical = 1; } struct master_address *m = calloc(1, sizeof(*m)); strncpy(m->host, ifa_addr, LINK_ADDRESS_MAX); m->port = port; list_push_tail(l, m); } if(ifas && !found_canonical) { warn(D_NOTICE, "Did not find the master address '%s' in the list of interfaces.", addr); } if(!found_canonical) { /* We get here if no interfaces were defined, or if addr was not found in the interfaces. */ struct master_address *m = calloc(1, sizeof(*m)); strncpy(m->host, addr, LINK_ADDRESS_MAX); m->port = port; list_push_tail(l, m); } return l; } static int serve_master_by_name( const char *catalog_hosts, const char *project_regex ) { struct list *masters_list = work_queue_catalog_query_cached(catalog_hosts,-1,project_regex); debug(D_WQ,"project name %s matches %d masters",project_regex,list_size(masters_list)); if(list_size(masters_list)==0) return 0; // shuffle the list by r items to distribute the load across masters int r = rand() % list_size(masters_list); int i; for(i=0;i<r;i++) { list_push_tail(masters_list,list_pop_head(masters_list)); } static struct master_address *last_addr = NULL; while(1) { struct jx *jx = list_peek_head(masters_list); const char *project = jx_lookup_string(jx,"project"); const char *name = jx_lookup_string(jx,"name"); const char *addr = jx_lookup_string(jx,"address"); const char *pref = jx_lookup_string(jx,"master_preferred_connection"); struct jx *ifas = jx_lookup(jx,"network_interfaces"); int port = jx_lookup_integer(jx,"port"); if(last_addr) { if(time(0) > idle_stoptime && strcmp(addr, last_addr->host) == 0 && port == last_addr->port) { if(list_size(masters_list) < 2) { free(last_addr); last_addr = NULL; /* convert idle_stoptime into connect_stoptime (e.g., time already served). */ connect_stoptime = idle_stoptime; debug(D_WQ,"Previous idle disconnection from only master available project=%s name=%s addr=%s port=%d",project,name,addr,port); return 0; } else { list_push_tail(masters_list,list_pop_head(masters_list)); continue; } } } int result; if(pref && strcmp(pref, "by_hostname") == 0) { debug(D_WQ,"selected master with project=%s name=%s addr=%s port=%d",project,name,addr,port); result = serve_master_by_hostport(name,port,project); } else { master_addresses = interfaces_to_list(addr, port, ifas); result = serve_master_by_hostport_list(master_addresses); struct master_address *m; while((m = list_pop_head(master_addresses))) { free(m); } list_delete(master_addresses); master_addresses = NULL; } if(result) { free(last_addr); last_addr = calloc(1,sizeof(*last_addr)); strncpy(last_addr->host, addr, DOMAIN_NAME_MAX); last_addr->port = port; } return result; } } void set_worker_id() { srand(time(NULL)); char *salt_and_pepper = string_format("%d%d%d", getpid(), getppid(), rand()); unsigned char digest[MD5_DIGEST_LENGTH]; md5_buffer(salt_and_pepper, strlen(salt_and_pepper), digest); worker_id = string_format("worker-%s", md5_string(digest)); free(salt_and_pepper); } static void handle_abort(int sig) { abort_flag = 1; abort_signal_received = sig; } static void handle_sigchld(int sig) { sigchld_received_flag = 1; } static void read_resources_env_var(const char *name, int64_t *manual_option) { char *value; value = getenv(name); if(value) { *manual_option = atoi(value); /* unset variable so that children task cannot read the global value */ unsetenv(name); } } static void read_resources_env_vars() { read_resources_env_var("CORES", &manual_cores_option); read_resources_env_var("MEMORY", &manual_memory_option); read_resources_env_var("DISK", &manual_disk_option); read_resources_env_var("GPUS", &manual_gpus_option); } struct list *parse_master_addresses(const char *specs, int default_port) { struct list *masters = list_create(); char *masters_args = xxstrdup(specs); char *next_master = strtok(masters_args, ";"); while(next_master) { int port = default_port; char *port_str = strchr(next_master, ':'); if(port_str) { char *no_ipv4 = strchr(port_str+1, ':'); /* if another ':', then this is not ipv4. */ if(!no_ipv4) { *port_str = '\0'; port = atoi(port_str+1); } } if(port < 1) { fatal("Invalid port for master '%s'", next_master); } struct master_address *m = calloc(1, sizeof(*m)); strncpy(m->host, next_master, LINK_ADDRESS_MAX); m->port = port; if(port_str) { *port_str = ':'; } list_push_tail(masters, m); next_master = strtok(NULL, ";"); } free(masters_args); return(masters); } static void show_help(const char *cmd) { printf( "Use: %s [options] <masterhost> <port> \n" "or\n %s [options] \"masterhost:port[;masterhost:port;masterhost:port;...]\"\n" "or\n %s [options] -M projectname\n", cmd, cmd, cmd); printf( "where options are:\n"); printf( " %-30s Show version string\n", "-v,--version"); printf( " %-30s Show this help screen\n", "-h,--help"); printf( " %-30s Name of master (project) to contact. May be a regular expression.\n", "-N,-M,--master-name=<name>"); printf( " %-30s Catalog server to query for masters. (default: %s:%d) \n", "-C,--catalog=<host:port>",CATALOG_HOST,CATALOG_PORT); printf( " %-30s Enable debugging for this subsystem.\n", "-d,--debug=<subsystem>"); printf( " %-30s Send debugging to this file. (can also be :stderr, :stdout, :syslog, or :journal)\n", "-o,--debug-file=<file>"); printf( " %-30s Set the maximum size of the debug log (default 10M, 0 disables).\n", "--debug-rotate-max=<bytes>"); printf( " %-30s Set worker to run as a foreman.\n", "--foreman"); printf( " %-30s Run as a foreman, and advertise to the catalog server with <name>.\n", "-f,--foreman-name=<name>"); printf( " %-30s\n", "--foreman-port=<port>[:<highport>]"); printf( " %-30s Set the port for the foreman to listen on. If <highport> is specified\n", ""); printf( " %-30s the port is chosen from the range port:highport. Implies --foreman.\n", ""); printf( " %-30s Select port to listen to at random and write to this file. Implies --foreman.\n", "-Z,--foreman-port-file=<file>"); printf( " %-30s Set the fast abort multiplier for foreman (default=disabled).\n", "-F,--fast-abort=<mult>"); printf( " %-30s Send statistics about foreman to this file.\n", "--specify-log=<logfile>"); printf( " %-30s Password file for authenticating to the master.\n", "-P,--password=<pwfile>"); printf( " %-30s Set both --idle-timeout and --connect-timeout.\n", "-t,--timeout=<time>"); printf( " %-30s Disconnect after this time if master sends no work. (default=%ds)\n", " --idle-timeout=<time>", idle_timeout); printf( " %-30s Abort after this time if no masters are available. (default=%ds)\n", " --connect-timeout=<time>", idle_timeout); printf( " %-30s Set TCP window size.\n", "-w,--tcp-window-size=<size>"); printf( " %-30s Set initial value for backoff interval when worker fails to connect\n", "-i,--min-backoff=<time>"); printf( " %-30s to a master. (default=%ds)\n", "", init_backoff_interval); printf( " %-30s Set maximum value for backoff interval when worker fails to connect\n", "-b,--max-backoff=<time>"); printf( " %-30s to a master. (default=%ds)\n", "", max_backoff_interval); printf( " %-30s Minimum free disk space in MB. When free disk space is less than this value, the\n", "-z,--disk-threshold=<size>"); printf( " %-30s worker will clean up and try to reconnect. (default=%" PRIu64 "MB)\n", "", disk_avail_threshold); printf( " %-30s Set available memory size threshold (in MB). When exceeded worker will\n", "--memory-threshold=<size>"); printf( " %-30s clean up and reconnect. (default=%" PRIu64 "MB)\n", "", memory_avail_threshold); printf( " %-30s Set architecture string for the worker to report to master instead\n", "-A,--arch=<arch>"); printf( " %-30s of the value in uname (%s).\n", "", arch_name); printf( " %-30s Set operating system string for the worker to report to master instead\n", "-O,--os=<os>"); printf( " %-30s of the value in uname (%s).\n", "", os_name); printf( " %-30s Set the location for creating the working directory of the worker.\n", "-s,--workdir=<path>"); printf( " %-30s Set the maximum bandwidth the foreman will consume in bytes per second. Example: 100M for 100MBps. (default=unlimited)\n", "--bandwidth=<Bps>"); printf( " %-30s Set the number of cores reported by this worker. Set to 0 to have the\n", "--cores=<n>"); printf( " %-30s worker automatically measure. (default=%"PRId64")\n", "", manual_cores_option); printf( " %-30s Set the number of GPUs reported by this worker. (default=0)\n", "--gpus=<n>"); printf( " %-30s Manually set the amount of memory (in MB) reported by this worker.\n", "--memory=<mb> "); printf( " %-30s Manually set the amount of disk (in MB) reported by this worker.\n", "--disk=<mb>"); printf( " %-30s Use loop devices for task sandboxes (default=disabled, requires root access).\n", "--disk-allocation"); printf( " %-30s Specifies a user-defined feature the worker provides. May be specified several times.\n", "--feature"); printf( " %-30s Set the maximum number of seconds the worker may be active. (in s).\n", "--wall-time=<s>"); printf( " %-30s Forbid the use of symlinks for cache management.\n", "--disable-symlinks"); printf(" %-30s Single-shot mode -- quit immediately after disconnection.\n", "--single-shot"); printf(" %-30s docker mode -- run each task with a container based on this docker image.\n", "--docker=<image>"); printf(" %-30s docker-preserve mode -- tasks execute by a worker share a container based on this docker image.\n", "--docker-preserve=<image>"); printf(" %-30s docker-tar mode -- build docker image from tarball, this mode must be used with --docker or --docker-preserve.\n", "--docker-tar=<tarball>"); printf( " %-30s Set the percent chance per minute that the worker will shut down (simulates worker failures, for testing only).\n", "--volatility=<chance>"); } enum {LONG_OPT_DEBUG_FILESIZE = 256, LONG_OPT_VOLATILITY, LONG_OPT_BANDWIDTH, LONG_OPT_DEBUG_RELEASE, LONG_OPT_SPECIFY_LOG, LONG_OPT_CORES, LONG_OPT_MEMORY, LONG_OPT_DISK, LONG_OPT_GPUS, LONG_OPT_FOREMAN, LONG_OPT_FOREMAN_PORT, LONG_OPT_DISABLE_SYMLINKS, LONG_OPT_IDLE_TIMEOUT, LONG_OPT_CONNECT_TIMEOUT, LONG_OPT_RUN_DOCKER, LONG_OPT_RUN_DOCKER_PRESERVE, LONG_OPT_BUILD_FROM_TAR, LONG_OPT_SINGLE_SHOT, LONG_OPT_WALL_TIME, LONG_OPT_DISK_ALLOCATION, LONG_OPT_MEMORY_THRESHOLD, LONG_OPT_FEATURE}; static const struct option long_options[] = { {"advertise", no_argument, 0, 'a'}, {"catalog", required_argument, 0, 'C'}, {"debug", required_argument, 0, 'd'}, {"debug-file", required_argument, 0, 'o'}, {"debug-rotate-max", required_argument, 0, LONG_OPT_DEBUG_FILESIZE}, {"disk-allocation", no_argument, 0, LONG_OPT_DISK_ALLOCATION}, {"foreman", no_argument, 0, LONG_OPT_FOREMAN}, {"foreman-port", required_argument, 0, LONG_OPT_FOREMAN_PORT}, {"foreman-port-file", required_argument, 0, 'Z'}, {"foreman-name", required_argument, 0, 'f'}, {"measure-capacity", no_argument, 0, 'c'}, {"fast-abort", required_argument, 0, 'F'}, {"specify-log", required_argument, 0, LONG_OPT_SPECIFY_LOG}, {"master-name", required_argument, 0, 'M'}, {"password", required_argument, 0, 'P'}, {"timeout", required_argument, 0, 't'}, {"idle-timeout", required_argument, 0, LONG_OPT_IDLE_TIMEOUT}, {"connect-timeout", required_argument, 0, LONG_OPT_CONNECT_TIMEOUT}, {"tcp-window-size", required_argument, 0, 'w'}, {"min-backoff", required_argument, 0, 'i'}, {"max-backoff", required_argument, 0, 'b'}, {"single-shot", no_argument, 0, LONG_OPT_SINGLE_SHOT }, {"disable-symlinks", no_argument, 0, LONG_OPT_DISABLE_SYMLINKS}, {"disk-threshold", required_argument, 0, 'z'}, {"memory-threshold", required_argument, 0, LONG_OPT_MEMORY_THRESHOLD}, {"arch", required_argument, 0, 'A'}, {"os", required_argument, 0, 'O'}, {"workdir", required_argument, 0, 's'}, {"volatility", required_argument, 0, LONG_OPT_VOLATILITY}, {"bandwidth", required_argument, 0, LONG_OPT_BANDWIDTH}, {"cores", required_argument, 0, LONG_OPT_CORES}, {"memory", required_argument, 0, LONG_OPT_MEMORY}, {"disk", required_argument, 0, LONG_OPT_DISK}, {"gpus", required_argument, 0, LONG_OPT_GPUS}, {"wall-time", required_argument, 0, LONG_OPT_WALL_TIME}, {"help", no_argument, 0, 'h'}, {"version", no_argument, 0, 'v'}, {"disable-symlinks", no_argument, 0, LONG_OPT_DISABLE_SYMLINKS}, {"docker", required_argument, 0, LONG_OPT_RUN_DOCKER}, {"docker-preserve", required_argument, 0, LONG_OPT_RUN_DOCKER_PRESERVE}, {"docker-tar", required_argument, 0, LONG_OPT_BUILD_FROM_TAR}, {"feature", required_argument, 0, LONG_OPT_FEATURE}, {0,0,0,0} }; int main(int argc, char *argv[]) { int c; int w; int foreman_port = -1; char * foreman_name = NULL; char * port_file = NULL; struct utsname uname_data; int enable_capacity = 1; // enabled by default double fast_abort_multiplier = 0; char *foreman_stats_filename = NULL; char * catalog_hosts = CATALOG_HOST; features = hash_table_create(4, 0); worker_start_time = time(0); set_worker_id(); //obtain the architecture and os on which worker is running. uname(&uname_data); os_name = xxstrdup(uname_data.sysname); arch_name = xxstrdup(uname_data.machine); worker_mode = WORKER_MODE_WORKER; debug_config(argv[0]); read_resources_env_vars(); while((c = getopt_long(argc, argv, "acC:d:f:F:t:o:p:M:N:P:w:i:b:z:A:O:s:vZ:h", long_options, 0)) != -1) { switch (c) { case 'a': //Left here for backwards compatibility break; case 'C': catalog_hosts = xxstrdup(optarg); break; case 'd': debug_flags_set(optarg); break; case LONG_OPT_DEBUG_FILESIZE: debug_config_file_size(MAX(0, string_metric_parse(optarg))); break; case 'f': worker_mode = WORKER_MODE_FOREMAN; foreman_name = xxstrdup(optarg); break; case LONG_OPT_FOREMAN_PORT: { char *low_port = optarg; char *high_port= strchr(optarg, ':'); worker_mode = WORKER_MODE_FOREMAN; if(high_port) { *high_port = '\0'; high_port++; } else { foreman_port = atoi(low_port); break; } setenv("WORK_QUEUE_LOW_PORT", low_port, 0); setenv("WORK_QUEUE_HIGH_PORT", high_port, 0); foreman_port = -1; break; } case 'c': // This option is deprecated. Capacity estimation is now on by default for the foreman. enable_capacity = 1; break; case 'F': fast_abort_multiplier = atof(optarg); break; case LONG_OPT_SPECIFY_LOG: foreman_stats_filename = xxstrdup(optarg); break; case 't': connect_timeout = idle_timeout = string_time_parse(optarg); break; case LONG_OPT_IDLE_TIMEOUT: idle_timeout = string_time_parse(optarg); break; case LONG_OPT_CONNECT_TIMEOUT: connect_timeout = string_time_parse(optarg); break; case 'o': debug_config_file(optarg); break; case LONG_OPT_FOREMAN: worker_mode = WORKER_MODE_FOREMAN; break; case 'M': case 'N': project_regex = optarg; break; case 'p': // ignore for backwards compatibility break; case 'w': w = string_metric_parse(optarg); link_window_set(w, w); break; case 'i': init_backoff_interval = string_metric_parse(optarg); break; case 'b': max_backoff_interval = string_metric_parse(optarg); if (max_backoff_interval < init_backoff_interval) { fprintf(stderr, "Maximum backoff interval provided must be greater than the initial backoff interval of %ds.\n", init_backoff_interval); exit(1); } break; case 'z': disk_avail_threshold = atoll(optarg) * MEGA; break; case LONG_OPT_MEMORY_THRESHOLD: memory_avail_threshold = atoll(optarg); break; case 'A': free(arch_name); //free the arch string obtained from uname arch_name = xxstrdup(optarg); break; case 'O': free(os_name); //free the os string obtained from uname os_name = xxstrdup(optarg); break; case 's': { char temp_abs_path[PATH_MAX]; path_absolute(optarg, temp_abs_path, 1); user_specified_workdir = xxstrdup(temp_abs_path); break; } case 'v': cctools_version_print(stdout, argv[0]); exit(EXIT_SUCCESS); break; case 'P': if(copy_file_to_buffer(optarg, &password, NULL) < 0) { fprintf(stderr,"work_queue_worker: couldn't load password from %s: %s\n",optarg,strerror(errno)); exit(EXIT_FAILURE); } break; case 'Z': port_file = xxstrdup(optarg); worker_mode = WORKER_MODE_FOREMAN; break; case LONG_OPT_VOLATILITY: worker_volatility = atof(optarg); break; case LONG_OPT_BANDWIDTH: setenv("WORK_QUEUE_BANDWIDTH", optarg, 1); break; case LONG_OPT_DEBUG_RELEASE: setenv("WORK_QUEUE_RESET_DEBUG_FILE", "yes", 1); break; case LONG_OPT_CORES: if(!strncmp(optarg, "all", 3)) { manual_cores_option = 0; } else { manual_cores_option = atoi(optarg); } break; case LONG_OPT_MEMORY: if(!strncmp(optarg, "all", 3)) { manual_memory_option = 0; } else { manual_memory_option = atoll(optarg); } break; case LONG_OPT_DISK: if(!strncmp(optarg, "all", 3)) { manual_disk_option = 0; } else { manual_disk_option = atoll(optarg); } break; case LONG_OPT_GPUS: if(!strncmp(optarg, "all", 3)) { manual_gpus_option = 0; } else { manual_gpus_option = atoi(optarg); } break; case LONG_OPT_WALL_TIME: manual_wall_time_option = atoi(optarg); break; case LONG_OPT_DISABLE_SYMLINKS: symlinks_enabled = 0; break; case LONG_OPT_SINGLE_SHOT: single_shot_mode = 1; break; case 'h': show_help(argv[0]); return 0; case LONG_OPT_RUN_DOCKER: container_mode = CONTAINER_MODE_DOCKER; img_name = xxstrdup(optarg); break; case LONG_OPT_RUN_DOCKER_PRESERVE: container_mode = CONTAINER_MODE_DOCKER_PRESERVE; img_name = xxstrdup(optarg); break; case LONG_OPT_BUILD_FROM_TAR: load_from_tar = 1; tar_fn = xxstrdup(optarg); break; case LONG_OPT_DISK_ALLOCATION: { char *abs_path_preloader = string_format("%s/lib/libforce_halt_enospc.so", INSTALL_PATH); int preload_result; char *curr_ld_preload = getenv("LD_PRELOAD"); if(curr_ld_preload && abs_path_preloader) { char *new_ld_preload = string_format("%s:%s", curr_ld_preload, abs_path_preloader); preload_result = setenv("LD_PRELOAD", new_ld_preload, 1); free(new_ld_preload); } else if(abs_path_preloader) { preload_result = setenv("LD_PRELOAD", abs_path_preloader, 1); } else { preload_result = 1; } free(abs_path_preloader); if(preload_result) { timestamp_t preload_fail_time = timestamp_get(); debug(D_WQ|D_NOTICE, "i/o dynamic library linking via LD_PRELOAD for loop device failed at: %"PRId64"", preload_fail_time); } disk_allocation = 1; break; } case LONG_OPT_FEATURE: hash_table_insert(features, optarg, (void **) 1); break; default: show_help(argv[0]); return 1; } } cctools_version_debug(D_DEBUG, argv[0]); // for backwards compatibility with the old syntax for specifying a worker's project name if(worker_mode != WORKER_MODE_FOREMAN && foreman_name) { if(foreman_name) { project_regex = foreman_name; } } //checks that the foreman has a unique name from the master if(worker_mode == WORKER_MODE_FOREMAN && foreman_name){ if(project_regex && strcmp(foreman_name,project_regex) == 0) { fatal("Foreman (%s) and Master (%s) share a name. Ensure that these are unique.\n",foreman_name,project_regex); } } //checks disk options make sense if(manual_disk_option > 0 && manual_disk_option <= disk_avail_threshold) { fatal("Disk space specified (%" PRId64 " MB) is less than minimum threshold (%"PRId64 " MB).\n See --disk and --disk-threshold options.", manual_disk_option, disk_avail_threshold); } //checks memory options make sense if(manual_memory_option > 0 && manual_memory_option <= memory_avail_threshold) { fatal("Memory specified (%" PRId64 " MB) is less than minimum threshold (%"PRId64 " MB).\n See --memory and --memory-threshold options.", manual_memory_option, memory_avail_threshold); } if(!project_regex) { if((argc - optind) < 1 || (argc - optind) > 2) { show_help(argv[0]); exit(1); } int default_master_port = (argc - optind) == 2 ? atoi(argv[optind+1]) : 0; master_addresses = parse_master_addresses(argv[optind], default_master_port); if(list_size(master_addresses) < 1) { show_help(argv[0]); fatal("No master has been specified"); } } //Check GPU name char *gpu_name = gpu_name_get(); if(gpu_name) { hash_table_insert(features, gpu_name, (void **) 1); } signal(SIGTERM, handle_abort); signal(SIGQUIT, handle_abort); signal(SIGINT, handle_abort); //Also do cleanup on SIGUSR1 & SIGUSR2 to allow using -notify and -l s_rt= options if submitting //this worker process with SGE qsub. Otherwise task processes are left running when SGE //terminates this process with SIGKILL. signal(SIGUSR1, handle_abort); signal(SIGUSR2, handle_abort); signal(SIGCHLD, handle_sigchld); random_init(); if(!workspace_create()) { fprintf(stderr, "work_queue_worker: failed to setup workspace at %s.\n", workspace); exit(1); } if(!workspace_check()) { return 1; } // set $WORK_QUEUE_SANDBOX to workspace. debug(D_WQ, "WORK_QUEUE_SANDBOX set to %s.\n", workspace); setenv("WORK_QUEUE_SANDBOX", workspace, 0); //get absolute pathnames of port and log file. char temp_abs_path[PATH_MAX]; if(port_file) { path_absolute(port_file, temp_abs_path, 0); free(port_file); port_file = xxstrdup(temp_abs_path); } if(foreman_stats_filename) { path_absolute(foreman_stats_filename, temp_abs_path, 0); free(foreman_stats_filename); foreman_stats_filename = xxstrdup(temp_abs_path); } // change to workspace chdir(workspace); if(worker_mode == WORKER_MODE_FOREMAN) { char foreman_string[WORK_QUEUE_LINE_MAX]; free(os_name); //free the os string obtained from uname os_name = xxstrdup("foreman"); string_nformat(foreman_string, sizeof(foreman_string), "%s-foreman", argv[0]); debug_config(foreman_string); foreman_q = work_queue_create(foreman_port); if(!foreman_q) { fprintf(stderr, "work_queue_worker-foreman: failed to create foreman queue. Terminating.\n"); exit(1); } printf( "work_queue_worker-foreman: listening on port %d\n", work_queue_port(foreman_q)); if(port_file) { opts_write_port_file(port_file, work_queue_port(foreman_q)); } if(foreman_name) { work_queue_specify_name(foreman_q, foreman_name); work_queue_specify_master_mode(foreman_q, WORK_QUEUE_MASTER_MODE_CATALOG); } if(password) { work_queue_specify_password(foreman_q,password); } work_queue_specify_estimate_capacity_on(foreman_q, enable_capacity); work_queue_activate_fast_abort(foreman_q, fast_abort_multiplier); work_queue_specify_category_mode(foreman_q, NULL, WORK_QUEUE_ALLOCATION_MODE_FIXED); if(foreman_stats_filename) { work_queue_specify_log(foreman_q, foreman_stats_filename); } } if(container_mode == CONTAINER_MODE_DOCKER && load_from_tar == 1) { char load_cmd[1024]; string_nformat(load_cmd, sizeof(load_cmd), "docker load < %s", tar_fn); system(load_cmd); } if(container_mode == CONTAINER_MODE_DOCKER_PRESERVE) { if (load_from_tar == 1) { char load_cmd[1024]; string_nformat(load_cmd, sizeof(load_cmd), "docker load < %s", tar_fn); system(load_cmd); } string_nformat(container_name, sizeof(container_name), "worker-%d-%d", (int) getuid(), (int) getpid()); char container_mnt_point[1024]; char start_container_cmd[1024]; string_nformat(container_mnt_point, sizeof(container_mnt_point), "%s:%s", workspace, DOCKER_WORK_DIR); string_nformat(start_container_cmd, sizeof(start_container_cmd), "docker run -i -d --name=\"%s\" -v %s -w %s %s", container_name, container_mnt_point, DOCKER_WORK_DIR, img_name); system(start_container_cmd); } procs_running = itable_create(0); procs_table = itable_create(0); procs_waiting = list_create(); procs_complete = itable_create(0); watcher = work_queue_watcher_create(); if(!check_disk_space_for_filesize(".", 0, disk_avail_threshold)) { fprintf(stderr,"work_queue_worker: %s has less than minimum disk space %"PRIu64" MB\n",workspace,disk_avail_threshold); return 1; } local_resources = work_queue_resources_create(); total_resources = work_queue_resources_create(); total_resources_last = work_queue_resources_create(); if(manual_cores_option < 1) { manual_cores_option = load_average_get_cpus(); } int backoff_interval = init_backoff_interval; connect_stoptime = time(0) + connect_timeout; measure_worker_resources(); printf("work_queue_worker: using %"PRId64 " cores, %"PRId64 " MB memory, %"PRId64 " MB disk, %"PRId64 " gpus\n", total_resources->cores.total, total_resources->memory.total, total_resources->disk.total, total_resources->gpus.total); while(1) { int result = 0; measure_worker_resources(); if(!enforce_worker_promises(NULL)) { abort_flag = 1; break; } if(project_regex) { result = serve_master_by_name(catalog_hosts, project_regex); } else { result = serve_master_by_hostport_list(master_addresses); } /* If the last attempt was a succesful connection, then reset the backoff_interval, and the connect timeout, then try again if a project name was given. If the connect attempt failed, then slow down the retries. */ if(result) { if(single_shot_mode) { debug(D_DEBUG,"stopping: single shot mode"); break; } backoff_interval = init_backoff_interval; connect_stoptime = time(0) + connect_timeout; if(!project_regex && (time(0)>idle_stoptime)) { debug(D_NOTICE,"stopping: no other masters available"); break; } } else { backoff_interval = MIN(backoff_interval*2,max_backoff_interval); } if(abort_flag) { debug(D_NOTICE,"stopping: abort signal received"); break; } if(time(0)>connect_stoptime) { debug(D_NOTICE,"stopping: could not connect after %d seconds.",connect_timeout); break; } sleep(backoff_interval); } if(container_mode == CONTAINER_MODE_DOCKER_PRESERVE || container_mode == CONTAINER_MODE_DOCKER) { char stop_container_cmd[WORK_QUEUE_LINE_MAX]; char rm_container_cmd[WORK_QUEUE_LINE_MAX]; string_nformat(stop_container_cmd, sizeof(stop_container_cmd), "docker stop %s", container_name); string_nformat(rm_container_cmd, sizeof(rm_container_cmd), "docker rm %s", container_name); if(container_mode == CONTAINER_MODE_DOCKER_PRESERVE) { //1. stop the container system(stop_container_cmd); //2. remove the container system(rm_container_cmd); } } workspace_delete(); return 0; } /* vim: set noexpandtab tabstop=4: */
1
14,861
Let's get more verbose about names. struct `wq_conda_environment`: if it can really only be used for conda. struct `wq_software_environment`: if it has potential use outside of conda.
cooperative-computing-lab-cctools
c
@@ -28,6 +28,8 @@ func TestStore_ListProjects(t *testing.T) { cowProjectString, err := marshal(cowProject) require.NoError(t, err, "Marshal project should not fail") + lastPageInPaginatedResp := false + testCases := map[string]struct { mockGetParametersByPath func(t *testing.T, param *ssm.GetParametersByPathInput) (*ssm.GetParametersByPathOutput, error)
1
// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 package ssm import ( "fmt" "testing" "github.com/aws/PRIVATE-amazon-ecs-archer/internal/pkg/archer" "github.com/aws/PRIVATE-amazon-ecs-archer/internal/pkg/store" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/service/ssm" "github.com/aws/aws-sdk-go/service/ssm/ssmiface" "github.com/aws/aws-sdk-go/service/sts" "github.com/aws/aws-sdk-go/service/sts/stsiface" "github.com/stretchr/testify/require" ) func TestStore_ListProjects(t *testing.T) { testProject := archer.Project{Name: "chicken", Version: "1.0"} testProjectString, err := marshal(testProject) require.NoError(t, err, "Marshal project should not fail") cowProject := archer.Project{Name: "cow", Version: "1.0"} cowProjectString, err := marshal(cowProject) require.NoError(t, err, "Marshal project should not fail") testCases := map[string]struct { mockGetParametersByPath func(t *testing.T, param *ssm.GetParametersByPathInput) (*ssm.GetParametersByPathOutput, error) wantedProjectNames []string wantedErr error }{ "with multiple existing projects": { mockGetParametersByPath: func(t *testing.T, param *ssm.GetParametersByPathInput) (output *ssm.GetParametersByPathOutput, e error) { require.Equal(t, rootProjectPath, *param.Path) return &ssm.GetParametersByPathOutput{ Parameters: []*ssm.Parameter{ { Name: aws.String("/archer/chicken"), Value: aws.String(testProjectString), }, { Name: aws.String("/archer/cow"), Value: aws.String(cowProjectString), }, }, }, nil }, wantedProjectNames: []string{"chicken", "cow"}, wantedErr: nil, }, "with malfored json": { mockGetParametersByPath: func(t *testing.T, param *ssm.GetParametersByPathInput) (output *ssm.GetParametersByPathOutput, e error) { require.Equal(t, rootProjectPath, *param.Path) return &ssm.GetParametersByPathOutput{ Parameters: []*ssm.Parameter{ { Name: aws.String("/archer/chicken"), Value: aws.String("oops"), }, }, }, nil }, wantedErr: fmt.Errorf("invalid character 'o' looking for beginning of value"), }, "with SSM error": { mockGetParametersByPath: func(t *testing.T, param *ssm.GetParametersByPathInput) (output *ssm.GetParametersByPathOutput, e error) { require.Equal(t, rootProjectPath, *param.Path) return nil, fmt.Errorf("broken") }, wantedProjectNames: nil, wantedErr: fmt.Errorf("broken"), }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { // GIVEN store := &SSM{ systemManager: &mockSSM{ t: t, mockGetParametersByPath: tc.mockGetParametersByPath, }, } // WHEN projects, err := store.ListProjects() // THEN if tc.wantedErr != nil { require.EqualError(t, err, tc.wantedErr.Error()) } else { var names []string for _, p := range projects { names = append(names, p.Name) } require.Equal(t, tc.wantedProjectNames, names) } }) } } func TestStore_GetProject(t *testing.T) { testProject := archer.Project{Name: "chicken", Version: "1.0"} testProjectString, err := marshal(testProject) testProjectPath := fmt.Sprintf(fmtProjectPath, testProject.Name) require.NoError(t, err, "Marshal project should not fail") testCases := map[string]struct { mockGetParameter func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) mockGetCallerIdentity func(t *testing.T, param *sts.GetCallerIdentityInput) (*sts.GetCallerIdentityOutput, error) wantedProject archer.Project wantedErr error }{ "with existing project": { mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { require.Equal(t, testProjectPath, *param.Name) return &ssm.GetParameterOutput{ Parameter: &ssm.Parameter{ Name: aws.String(testProjectPath), Value: aws.String(testProjectString), }, }, nil }, wantedProject: testProject, wantedErr: nil, }, "with no existing project": { mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { require.Equal(t, testProjectPath, *param.Name) return nil, awserr.New(ssm.ErrCodeParameterNotFound, "No Parameter", fmt.Errorf("No Parameter")) }, mockGetCallerIdentity: func(t *testing.T, input *sts.GetCallerIdentityInput) (*sts.GetCallerIdentityOutput, error) { return &sts.GetCallerIdentityOutput{ Account: aws.String("12345"), }, nil }, wantedErr: &store.ErrNoSuchProject{ ProjectName: "chicken", AccountID: "12345", Region: "us-west-2", }, }, "with no existing project and failed STS call": { mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { require.Equal(t, testProjectPath, *param.Name) return nil, awserr.New(ssm.ErrCodeParameterNotFound, "No Parameter", fmt.Errorf("No Parameter")) }, mockGetCallerIdentity: func(t *testing.T, input *sts.GetCallerIdentityInput) (*sts.GetCallerIdentityOutput, error) { return nil, fmt.Errorf("Error") }, wantedErr: &store.ErrNoSuchProject{ ProjectName: "chicken", AccountID: "unknown", Region: "us-west-2", }, }, "with malfored json": { mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { require.Equal(t, testProjectPath, *param.Name) return &ssm.GetParameterOutput{ Parameter: &ssm.Parameter{ Name: aws.String(testProjectPath), Value: aws.String("oops"), }, }, nil }, wantedErr: fmt.Errorf("invalid character 'o' looking for beginning of value"), }, "with SSM error": { mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { require.Equal(t, testProjectPath, *param.Name) return nil, fmt.Errorf("broken") }, wantedErr: fmt.Errorf("broken"), }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { // GIVEN store := &SSM{ systemManager: &mockSSM{ t: t, mockGetParameter: tc.mockGetParameter, }, tokenService: &mockSTS{ t: t, mockGetCallerIdentity: tc.mockGetCallerIdentity, }, sessionRegion: "us-west-2", } // WHEN project, err := store.GetProject("chicken") // THEN if tc.wantedErr != nil { require.EqualError(t, err, tc.wantedErr.Error()) } else { require.Equal(t, tc.wantedProject, *project) } }) } } func TestStore_CreateProject(t *testing.T) { testProject := archer.Project{Name: "chicken"} testProjectString := fmt.Sprintf("{\"name\":\"chicken\",\"version\":\"%s\"}", schemaVersion) marshal(testProject) testProjectPath := fmt.Sprintf(fmtProjectPath, testProject.Name) testCases := map[string]struct { mockPutParameter func(t *testing.T, param *ssm.PutParameterInput) (*ssm.PutParameterOutput, error) wantedErr error }{ "with no existing project": { mockPutParameter: func(t *testing.T, param *ssm.PutParameterInput) (*ssm.PutParameterOutput, error) { require.Equal(t, testProjectPath, *param.Name) require.Equal(t, testProjectString, *param.Value) return &ssm.PutParameterOutput{ Version: aws.Int64(1), }, nil }, wantedErr: nil, }, "with existing project": { mockPutParameter: func(t *testing.T, param *ssm.PutParameterInput) (*ssm.PutParameterOutput, error) { require.Equal(t, testProjectPath, *param.Name) return nil, awserr.New(ssm.ErrCodeParameterAlreadyExists, "Already exists", fmt.Errorf("Already Exists")) }, wantedErr: &store.ErrProjectAlreadyExists{ ProjectName: "chicken", }, }, "with SSM error": { mockPutParameter: func(t *testing.T, param *ssm.PutParameterInput) (*ssm.PutParameterOutput, error) { require.Equal(t, testProjectPath, *param.Name) return nil, fmt.Errorf("broken") }, wantedErr: fmt.Errorf("broken"), }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { // GIVEN store := &SSM{ systemManager: &mockSSM{ t: t, mockPutParameter: tc.mockPutParameter, }, } // WHEN err := store.CreateProject(&archer.Project{Name: "chicken", Version: "1.0"}) // THEN if tc.wantedErr != nil { require.EqualError(t, err, tc.wantedErr.Error()) } }) } } func TestStore_ListEnvironments(t *testing.T) { testEnvironment := archer.Environment{Name: "test", AccountID: "12345", Project: "chicken", Region: "us-west-2s"} testEnvironmentString, err := marshal(testEnvironment) testEnvironmentPath := fmt.Sprintf(fmtEnvParamPath, testEnvironment.Project, testEnvironment.Name) require.NoError(t, err, "Marshal environment should not fail") prodEnvironment := archer.Environment{Name: "prod", AccountID: "12345", Project: "chicken", Region: "us-west-2s"} prodEnvironmentString, err := marshal(prodEnvironment) prodEnvironmentPath := fmt.Sprintf(fmtEnvParamPath, prodEnvironment.Project, prodEnvironment.Name) require.NoError(t, err, "Marshal environment should not fail") environmentPath := fmt.Sprintf(rootEnvParamPath, testEnvironment.Project) testCases := map[string]struct { mockGetParametersByPath func(t *testing.T, param *ssm.GetParametersByPathInput) (*ssm.GetParametersByPathOutput, error) wantedEnvironments []archer.Environment wantedErr error }{ "with multiple existing environments": { mockGetParametersByPath: func(t *testing.T, param *ssm.GetParametersByPathInput) (output *ssm.GetParametersByPathOutput, e error) { require.Equal(t, environmentPath, *param.Path) return &ssm.GetParametersByPathOutput{ Parameters: []*ssm.Parameter{ { Name: aws.String(testEnvironmentPath), Value: aws.String(testEnvironmentString), }, { Name: aws.String(prodEnvironmentPath), Value: aws.String(prodEnvironmentString), }, }, }, nil }, wantedEnvironments: []archer.Environment{testEnvironment, prodEnvironment}, wantedErr: nil, }, "with malfored json": { mockGetParametersByPath: func(t *testing.T, param *ssm.GetParametersByPathInput) (output *ssm.GetParametersByPathOutput, e error) { require.Equal(t, environmentPath, *param.Path) return &ssm.GetParametersByPathOutput{ Parameters: []*ssm.Parameter{ { Name: aws.String(testEnvironmentPath), Value: aws.String("oops"), }, }, }, nil }, wantedErr: fmt.Errorf("invalid character 'o' looking for beginning of value"), }, "with SSM error": { mockGetParametersByPath: func(t *testing.T, param *ssm.GetParametersByPathInput) (output *ssm.GetParametersByPathOutput, e error) { require.Equal(t, environmentPath, *param.Path) return nil, fmt.Errorf("broken") }, wantedErr: fmt.Errorf("broken"), }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { // GIVEN store := &SSM{ systemManager: &mockSSM{ t: t, mockGetParametersByPath: tc.mockGetParametersByPath, }, } // WHEN envPointers, err := store.ListEnvironments("chicken") // THEN if tc.wantedErr != nil { require.EqualError(t, err, tc.wantedErr.Error()) } else { var environments []archer.Environment for _, e := range envPointers { environments = append(environments, *e) } require.Equal(t, tc.wantedEnvironments, environments) } }) } } func TestStore_GetEnvironment(t *testing.T) { testEnvironment := archer.Environment{Name: "test", AccountID: "12345", Project: "chicken", Region: "us-west-2s"} testEnvironmentString, err := marshal(testEnvironment) testEnvironmentPath := fmt.Sprintf(fmtEnvParamPath, testEnvironment.Project, testEnvironment.Name) require.NoError(t, err, "Marshal environment should not fail") testCases := map[string]struct { mockGetParameter func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) wantedEnvironment archer.Environment wantedErr error }{ "with existing environment": { mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { require.Equal(t, testEnvironmentPath, *param.Name) return &ssm.GetParameterOutput{ Parameter: &ssm.Parameter{ Name: aws.String(testEnvironmentPath), Value: aws.String(testEnvironmentString), }, }, nil }, wantedEnvironment: testEnvironment, wantedErr: nil, }, "with no existing environment": { mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { require.Equal(t, testEnvironmentPath, *param.Name) return &ssm.GetParameterOutput{ Parameter: &ssm.Parameter{}, }, nil }, wantedErr: &store.ErrNoSuchEnvironment{ ProjectName: testEnvironment.Project, EnvironmentName: testEnvironment.Name, }, }, "with malfored json": { mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { require.Equal(t, testEnvironmentPath, *param.Name) return &ssm.GetParameterOutput{ Parameter: &ssm.Parameter{ Name: aws.String(testEnvironmentPath), Value: aws.String("oops"), }, }, nil }, wantedErr: fmt.Errorf("invalid character 'o' looking for beginning of value"), }, "with SSM error": { mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { return nil, fmt.Errorf("broken") }, wantedErr: fmt.Errorf("broken"), }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { // GIVEN store := &SSM{ systemManager: &mockSSM{ t: t, mockGetParameter: tc.mockGetParameter, }, } // WHEN env, err := store.GetEnvironment("chicken", "test") // THEN if tc.wantedErr != nil { require.EqualError(t, err, tc.wantedErr.Error()) } else { require.Equal(t, tc.wantedEnvironment, *env) } }) } } func TestStore_CreateEnvironment(t *testing.T) { testProject := archer.Project{Name: "chicken", Version: "1.0"} testProjectString, err := marshal(testProject) testProjectPath := fmt.Sprintf(fmtProjectPath, testProject.Name) require.NoError(t, err, "Marshal project should not fail") testEnvironment := archer.Environment{Name: "test", Project: testProject.Name, AccountID: "1234", Region: "us-west-2"} testEnvironmentString, err := marshal(testEnvironment) testEnvironmentPath := fmt.Sprintf(fmtEnvParamPath, testEnvironment.Project, testEnvironment.Name) require.NoError(t, err, "Marshal environment should not fail") testCases := map[string]struct { mockGetParameter func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) mockPutParameter func(t *testing.T, param *ssm.PutParameterInput) (*ssm.PutParameterOutput, error) wantedErr error }{ "with no existing environment": { mockPutParameter: func(t *testing.T, param *ssm.PutParameterInput) (*ssm.PutParameterOutput, error) { require.Equal(t, testEnvironmentPath, *param.Name) require.Equal(t, testEnvironmentString, *param.Value) return &ssm.PutParameterOutput{ Version: aws.Int64(1), }, nil }, mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { require.Equal(t, testProjectPath, *param.Name) return &ssm.GetParameterOutput{ Parameter: &ssm.Parameter{ Name: aws.String(testProjectPath), Value: aws.String(testProjectString), }, }, nil }, wantedErr: nil, }, "with existing environment": { mockPutParameter: func(t *testing.T, param *ssm.PutParameterInput) (*ssm.PutParameterOutput, error) { require.Equal(t, testEnvironmentPath, *param.Name) return nil, awserr.New(ssm.ErrCodeParameterAlreadyExists, "Already exists", fmt.Errorf("Already Exists")) }, mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { require.Equal(t, testProjectPath, *param.Name) return &ssm.GetParameterOutput{ Parameter: &ssm.Parameter{ Name: aws.String(testProjectPath), Value: aws.String(testProjectString), }, }, nil }, wantedErr: &store.ErrEnvironmentAlreadyExists{ EnvironmentName: testEnvironment.Name, ProjectName: testEnvironment.Project, }, }, "with SSM error": { mockPutParameter: func(t *testing.T, param *ssm.PutParameterInput) (*ssm.PutParameterOutput, error) { return nil, fmt.Errorf("broken") }, mockGetParameter: func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { require.Equal(t, testProjectPath, *param.Name) return &ssm.GetParameterOutput{ Parameter: &ssm.Parameter{ Name: aws.String(testProjectPath), Value: aws.String(testProjectString), }, }, nil }, wantedErr: fmt.Errorf("broken"), }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { // GIVEN store := &SSM{ systemManager: &mockSSM{ t: t, mockPutParameter: tc.mockPutParameter, mockGetParameter: tc.mockGetParameter, }, } // WHEN err := store.CreateEnvironment(&archer.Environment{ Name: testEnvironment.Name, Project: testEnvironment.Project, AccountID: testEnvironment.AccountID, Region: testEnvironment.Region}) // THEN if tc.wantedErr != nil { require.EqualError(t, err, tc.wantedErr.Error()) } }) } } type mockSSM struct { ssmiface.SSMAPI t *testing.T mockPutParameter func(t *testing.T, param *ssm.PutParameterInput) (*ssm.PutParameterOutput, error) mockGetParametersByPath func(t *testing.T, param *ssm.GetParametersByPathInput) (*ssm.GetParametersByPathOutput, error) mockGetParameter func(t *testing.T, param *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) } func (m *mockSSM) PutParameter(in *ssm.PutParameterInput) (*ssm.PutParameterOutput, error) { return m.mockPutParameter(m.t, in) } func (m *mockSSM) GetParametersByPath(in *ssm.GetParametersByPathInput) (*ssm.GetParametersByPathOutput, error) { return m.mockGetParametersByPath(m.t, in) } func (m *mockSSM) GetParameter(in *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { return m.mockGetParameter(m.t, in) } type mockSTS struct { stsiface.STSAPI t *testing.T mockGetCallerIdentity func(t *testing.T, input *sts.GetCallerIdentityInput) (*sts.GetCallerIdentityOutput, error) } func (m *mockSTS) GetCallerIdentity(input *sts.GetCallerIdentityInput) (*sts.GetCallerIdentityOutput, error) { return m.mockGetCallerIdentity(m.t, input) }
1
10,445
Should we set this back to `false` inside each `t.Run`? so that we can have more than one testcase that can have paginated responses
aws-copilot-cli
go
@@ -130,6 +130,9 @@ class BaseSnapshot implements Snapshot { if (dataManifests == null) { this.dataManifests = ImmutableList.copyOf(Iterables.filter(allManifests, manifest -> manifest.content() == ManifestContent.DATA)); + } + + if (deleteManifests == null) { this.deleteManifests = ImmutableList.copyOf(Iterables.filter(allManifests, manifest -> manifest.content() == ManifestContent.DELETES)); }
1
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iceberg; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; import org.apache.iceberg.exceptions.RuntimeIOException; import org.apache.iceberg.io.CloseableIterable; import org.apache.iceberg.io.FileIO; import org.apache.iceberg.relocated.com.google.common.base.MoreObjects; import org.apache.iceberg.relocated.com.google.common.base.Objects; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.relocated.com.google.common.collect.Iterables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; class BaseSnapshot implements Snapshot { private static final long INITIAL_SEQUENCE_NUMBER = 0; private final FileIO io; private final long snapshotId; private final Long parentId; private final long sequenceNumber; private final long timestampMillis; private final String manifestListLocation; private final String operation; private final Map<String, String> summary; // lazily initialized private transient List<ManifestFile> allManifests = null; private transient List<ManifestFile> dataManifests = null; private transient List<ManifestFile> deleteManifests = null; private transient List<DataFile> cachedAdds = null; private transient List<DataFile> cachedDeletes = null; /** * For testing only. */ BaseSnapshot(FileIO io, long snapshotId, String... manifestFiles) { this(io, snapshotId, null, System.currentTimeMillis(), null, null, Lists.transform(Arrays.asList(manifestFiles), path -> new GenericManifestFile(io.newInputFile(path), 0))); } BaseSnapshot(FileIO io, long sequenceNumber, long snapshotId, Long parentId, long timestampMillis, String operation, Map<String, String> summary, String manifestList) { this.io = io; this.sequenceNumber = sequenceNumber; this.snapshotId = snapshotId; this.parentId = parentId; this.timestampMillis = timestampMillis; this.operation = operation; this.summary = summary; this.manifestListLocation = manifestList; } BaseSnapshot(FileIO io, long snapshotId, Long parentId, long timestampMillis, String operation, Map<String, String> summary, List<ManifestFile> dataManifests) { this(io, INITIAL_SEQUENCE_NUMBER, snapshotId, parentId, timestampMillis, operation, summary, null); this.allManifests = dataManifests; } @Override public long sequenceNumber() { return sequenceNumber; } @Override public long snapshotId() { return snapshotId; } @Override public Long parentId() { return parentId; } @Override public long timestampMillis() { return timestampMillis; } @Override public String operation() { return operation; } @Override public Map<String, String> summary() { return summary; } private void cacheManifests() { if (allManifests == null) { // if manifests isn't set, then the snapshotFile is set and should be read to get the list this.allManifests = ManifestLists.read(io.newInputFile(manifestListLocation)); } if (dataManifests == null) { this.dataManifests = ImmutableList.copyOf(Iterables.filter(allManifests, manifest -> manifest.content() == ManifestContent.DATA)); this.deleteManifests = ImmutableList.copyOf(Iterables.filter(allManifests, manifest -> manifest.content() == ManifestContent.DELETES)); } } @Override public List<ManifestFile> allManifests() { if (allManifests == null) { cacheManifests(); } return allManifests; } @Override public List<ManifestFile> dataManifests() { if (dataManifests == null) { cacheManifests(); } return dataManifests; } @Override public List<ManifestFile> deleteManifests() { if (deleteManifests == null) { cacheManifests(); } return deleteManifests; } @Override public List<DataFile> addedFiles() { if (cachedAdds == null) { cacheChanges(); } return cachedAdds; } @Override public List<DataFile> deletedFiles() { if (cachedDeletes == null) { cacheChanges(); } return cachedDeletes; } @Override public String manifestListLocation() { return manifestListLocation; } private void cacheChanges() { ImmutableList.Builder<DataFile> adds = ImmutableList.builder(); ImmutableList.Builder<DataFile> deletes = ImmutableList.builder(); // read only manifests that were created by this snapshot Iterable<ManifestFile> changedManifests = Iterables.filter(dataManifests(), manifest -> Objects.equal(manifest.snapshotId(), snapshotId)); try (CloseableIterable<ManifestEntry<DataFile>> entries = new ManifestGroup(io, changedManifests) .ignoreExisting() .entries()) { for (ManifestEntry<DataFile> entry : entries) { switch (entry.status()) { case ADDED: adds.add(entry.file().copy()); break; case DELETED: deletes.add(entry.file().copyWithoutStats()); break; default: throw new IllegalStateException( "Unexpected entry status, not added or deleted: " + entry); } } } catch (IOException e) { throw new RuntimeIOException(e, "Failed to close entries while caching changes"); } this.cachedAdds = adds.build(); this.cachedDeletes = deletes.build(); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("id", snapshotId) .add("timestamp_ms", timestampMillis) .add("operation", operation) .add("summary", summary) .add("manifest-list", manifestListLocation) .toString(); } }
1
21,215
What about using `dataManifests == null || deleteManifests == null`?
apache-iceberg
java
@@ -469,7 +469,7 @@ public class IndexSearcher { @Override public TopScoreDocCollector newCollector() throws IOException { - return TopScoreDocCollector.create(cappedNumHits, after, TOTAL_HITS_THRESHOLD); + return TopScoreDocCollector.create(cappedNumHits, after, new GlobalHitsThresholdChecker(TOTAL_HITS_THRESHOLD)); } @Override
1
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.search; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.Future; import java.util.concurrent.FutureTask; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermStates; import org.apache.lucene.index.Terms; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.NIOFSDirectory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.ThreadInterruptedException; /** Implements search over a single IndexReader. * * <p>Applications usually need only call the inherited * {@link #search(Query,int)} method. For * performance reasons, if your index is unchanging, you * should share a single IndexSearcher instance across * multiple searches instead of creating a new one * per-search. If your index has changed and you wish to * see the changes reflected in searching, you should * use {@link DirectoryReader#openIfChanged(DirectoryReader)} * to obtain a new reader and * then create a new IndexSearcher from that. Also, for * low-latency turnaround it's best to use a near-real-time * reader ({@link DirectoryReader#open(IndexWriter)}). * Once you have a new {@link IndexReader}, it's relatively * cheap to create a new IndexSearcher from it. * * <p><b>NOTE</b>: The {@link #search} and {@link #searchAfter} methods are * configured to only count top hits accurately up to {@code 1,000} and may * return a {@link TotalHits.Relation lower bound} of the hit count if the * hit count is greater than or equal to {@code 1,000}. On queries that match * lots of documents, counting the number of hits may take much longer than * computing the top hits so this trade-off allows to get some minimal * information about the hit count without slowing down search too much. The * {@link TopDocs#scoreDocs} array is always accurate however. If this behavior * doesn't suit your needs, you should create collectors manually with either * {@link TopScoreDocCollector#create} or {@link TopFieldCollector#create} and * call {@link #search(Query, Collector)}. * * <a name="thread-safety"></a><p><b>NOTE</b>: <code>{@link * IndexSearcher}</code> instances are completely * thread safe, meaning multiple threads can call any of its * methods, concurrently. If your application requires * external synchronization, you should <b>not</b> * synchronize on the <code>IndexSearcher</code> instance; * use your own (non-Lucene) objects instead.</p> */ public class IndexSearcher { static int maxClauseCount = 1024; private static QueryCache DEFAULT_QUERY_CACHE; private static QueryCachingPolicy DEFAULT_CACHING_POLICY = new UsageTrackingQueryCachingPolicy(); static { final int maxCachedQueries = 1000; // min of 32MB or 5% of the heap size final long maxRamBytesUsed = Math.min(1L << 25, Runtime.getRuntime().maxMemory() / 20); DEFAULT_QUERY_CACHE = new LRUQueryCache(maxCachedQueries, maxRamBytesUsed); } /** * By default we count hits accurately up to 1000. This makes sure that we * don't spend most time on computing hit counts */ private static final int TOTAL_HITS_THRESHOLD = 1000; /** * Thresholds for index slice allocation logic. To change the default, extend * <code> IndexSearcher</code> and use custom values */ private static final int MAX_DOCS_PER_SLICE = 250_000; private static final int MAX_SEGMENTS_PER_SLICE = 5; final IndexReader reader; // package private for testing! // NOTE: these members might change in incompatible ways // in the next release protected final IndexReaderContext readerContext; protected final List<LeafReaderContext> leafContexts; /** used with executor - each slice holds a set of leafs executed within one thread */ private final LeafSlice[] leafSlices; // These are only used for multi-threaded search private final Executor executor; // the default Similarity private static final Similarity defaultSimilarity = new BM25Similarity(); private QueryCache queryCache = DEFAULT_QUERY_CACHE; private QueryCachingPolicy queryCachingPolicy = DEFAULT_CACHING_POLICY; /** * Expert: returns a default Similarity instance. * In general, this method is only called to initialize searchers and writers. * User code and query implementations should respect * {@link IndexSearcher#getSimilarity()}. * @lucene.internal */ public static Similarity getDefaultSimilarity() { return defaultSimilarity; } /** * Expert: Get the default {@link QueryCache} or {@code null} if the cache is disabled. * @lucene.internal */ public static QueryCache getDefaultQueryCache() { return DEFAULT_QUERY_CACHE; } /** * Expert: set the default {@link QueryCache} instance. * @lucene.internal */ public static void setDefaultQueryCache(QueryCache defaultQueryCache) { DEFAULT_QUERY_CACHE = defaultQueryCache; } /** * Expert: Get the default {@link QueryCachingPolicy}. * @lucene.internal */ public static QueryCachingPolicy getDefaultQueryCachingPolicy() { return DEFAULT_CACHING_POLICY; } /** * Expert: set the default {@link QueryCachingPolicy} instance. * @lucene.internal */ public static void setDefaultQueryCachingPolicy(QueryCachingPolicy defaultQueryCachingPolicy) { DEFAULT_CACHING_POLICY = defaultQueryCachingPolicy; } /** The Similarity implementation used by this searcher. */ private Similarity similarity = defaultSimilarity; /** Creates a searcher searching the provided index. */ public IndexSearcher(IndexReader r) { this(r, null); } /** Runs searches for each segment separately, using the * provided Executor. NOTE: * if you are using {@link NIOFSDirectory}, do not use * the shutdownNow method of ExecutorService as this uses * Thread.interrupt under-the-hood which can silently * close file descriptors (see <a * href="https://issues.apache.org/jira/browse/LUCENE-2239">LUCENE-2239</a>). * * @lucene.experimental */ public IndexSearcher(IndexReader r, Executor executor) { this(r.getContext(), executor); } /** * Creates a searcher searching the provided top-level {@link IndexReaderContext}. * <p> * Given a non-<code>null</code> {@link Executor} this method runs * searches for each segment separately, using the provided Executor. * NOTE: if you are using {@link NIOFSDirectory}, do not use the shutdownNow method of * ExecutorService as this uses Thread.interrupt under-the-hood which can * silently close file descriptors (see <a * href="https://issues.apache.org/jira/browse/LUCENE-2239">LUCENE-2239</a>). * * @see IndexReaderContext * @see IndexReader#getContext() * @lucene.experimental */ public IndexSearcher(IndexReaderContext context, Executor executor) { assert context.isTopLevel: "IndexSearcher's ReaderContext must be topLevel for reader" + context.reader(); reader = context.reader(); this.executor = executor; this.readerContext = context; leafContexts = context.leaves(); this.leafSlices = executor == null ? null : slices(leafContexts); } /** * Creates a searcher searching the provided top-level {@link IndexReaderContext}. * * @see IndexReaderContext * @see IndexReader#getContext() * @lucene.experimental */ public IndexSearcher(IndexReaderContext context) { this(context, null); } /** Return the maximum number of clauses permitted, 1024 by default. * Attempts to add more than the permitted number of clauses cause {@link * TooManyClauses} to be thrown. * @see #setMaxClauseCount(int) */ public static int getMaxClauseCount() { return maxClauseCount; } /** * Set the maximum number of clauses permitted per Query. * Default value is 1024. */ public static void setMaxClauseCount(int value) { if (value < 1) { throw new IllegalArgumentException("maxClauseCount must be >= 1"); } maxClauseCount = value; } /** * Set the {@link QueryCache} to use when scores are not needed. * A value of {@code null} indicates that query matches should never be * cached. This method should be called <b>before</b> starting using this * {@link IndexSearcher}. * <p>NOTE: When using a query cache, queries should not be modified after * they have been passed to IndexSearcher. * @see QueryCache * @lucene.experimental */ public void setQueryCache(QueryCache queryCache) { this.queryCache = queryCache; } /** * Return the query cache of this {@link IndexSearcher}. This will be either * the {@link #getDefaultQueryCache() default query cache} or the query cache * that was last set through {@link #setQueryCache(QueryCache)}. A return * value of {@code null} indicates that caching is disabled. * @lucene.experimental */ public QueryCache getQueryCache() { return queryCache; } /** * Set the {@link QueryCachingPolicy} to use for query caching. * This method should be called <b>before</b> starting using this * {@link IndexSearcher}. * @see QueryCachingPolicy * @lucene.experimental */ public void setQueryCachingPolicy(QueryCachingPolicy queryCachingPolicy) { this.queryCachingPolicy = Objects.requireNonNull(queryCachingPolicy); } /** * Return the query cache of this {@link IndexSearcher}. This will be either * the {@link #getDefaultQueryCachingPolicy() default policy} or the policy * that was last set through {@link #setQueryCachingPolicy(QueryCachingPolicy)}. * @lucene.experimental */ public QueryCachingPolicy getQueryCachingPolicy() { return queryCachingPolicy; } /** * Expert: Creates an array of leaf slices each holding a subset of the given leaves. * Each {@link LeafSlice} is executed in a single thread. By default, segments with more than * MAX_DOCS_PER_SLICE will get their own thread */ protected LeafSlice[] slices(List<LeafReaderContext> leaves) { return slices(leaves, MAX_DOCS_PER_SLICE, MAX_SEGMENTS_PER_SLICE); } /** * Static method to segregate LeafReaderContexts amongst multiple slices */ public static LeafSlice[] slices (List<LeafReaderContext> leaves, int maxDocsPerSlice, int maxSegmentsPerSlice) { // Make a copy so we can sort: List<LeafReaderContext> sortedLeaves = new ArrayList<>(leaves); // Sort by maxDoc, descending: Collections.sort(sortedLeaves, Collections.reverseOrder(Comparator.comparingInt(l -> l.reader().maxDoc()))); final List<List<LeafReaderContext>> groupedLeaves = new ArrayList<>(); long docSum = 0; List<LeafReaderContext> group = null; for (LeafReaderContext ctx : sortedLeaves) { if (ctx.reader().maxDoc() > maxDocsPerSlice) { assert group == null; groupedLeaves.add(Collections.singletonList(ctx)); } else { if (group == null) { group = new ArrayList<>(); group.add(ctx); groupedLeaves.add(group); } else { group.add(ctx); } docSum += ctx.reader().maxDoc(); if (group.size() >= maxSegmentsPerSlice || docSum > maxDocsPerSlice) { group = null; docSum = 0; } } } LeafSlice[] slices = new LeafSlice[groupedLeaves.size()]; int upto = 0; for (List<LeafReaderContext> currentLeaf : groupedLeaves) { slices[upto] = new LeafSlice(currentLeaf); ++upto; } return slices; } /** Return the {@link IndexReader} this searches. */ public IndexReader getIndexReader() { return reader; } /** * Sugar for <code>.getIndexReader().document(docID)</code> * @see IndexReader#document(int) */ public Document doc(int docID) throws IOException { return reader.document(docID); } /** * Sugar for <code>.getIndexReader().document(docID, fieldVisitor)</code> * @see IndexReader#document(int, StoredFieldVisitor) */ public void doc(int docID, StoredFieldVisitor fieldVisitor) throws IOException { reader.document(docID, fieldVisitor); } /** * Sugar for <code>.getIndexReader().document(docID, fieldsToLoad)</code> * @see IndexReader#document(int, Set) */ public Document doc(int docID, Set<String> fieldsToLoad) throws IOException { return reader.document(docID, fieldsToLoad); } /** Expert: Set the Similarity implementation used by this IndexSearcher. * */ public void setSimilarity(Similarity similarity) { this.similarity = similarity; } /** Expert: Get the {@link Similarity} to use to compute scores. This returns the * {@link Similarity} that has been set through {@link #setSimilarity(Similarity)} * or the default {@link Similarity} if none has been set explicitly. */ public Similarity getSimilarity() { return similarity; } /** * Count how many documents match the given query. */ public int count(Query query) throws IOException { query = rewrite(query); while (true) { // remove wrappers that don't matter for counts if (query instanceof ConstantScoreQuery) { query = ((ConstantScoreQuery) query).getQuery(); } else { break; } } // some counts can be computed in constant time if (query instanceof MatchAllDocsQuery) { return reader.numDocs(); } else if (query instanceof TermQuery && reader.hasDeletions() == false) { Term term = ((TermQuery) query).getTerm(); int count = 0; for (LeafReaderContext leaf : reader.leaves()) { count += leaf.reader().docFreq(term); } return count; } // general case: create a collecor and count matches final CollectorManager<TotalHitCountCollector, Integer> collectorManager = new CollectorManager<TotalHitCountCollector, Integer>() { @Override public TotalHitCountCollector newCollector() throws IOException { return new TotalHitCountCollector(); } @Override public Integer reduce(Collection<TotalHitCountCollector> collectors) throws IOException { int total = 0; for (TotalHitCountCollector collector : collectors) { total += collector.getTotalHits(); } return total; } }; return search(query, collectorManager); } /** Returns the leaf slices used for concurrent searching, or null if no {@code Executor} was * passed to the constructor. * * @lucene.experimental */ public LeafSlice[] getSlices() { return leafSlices; } /** Finds the top <code>n</code> * hits for <code>query</code> where all results are after a previous * result (<code>after</code>). * <p> * By passing the bottom result from a previous page as <code>after</code>, * this method can be used for efficient 'deep-paging' across potentially * large result sets. * * @throws TooManyClauses If a query would exceed * {@link IndexSearcher#getMaxClauseCount()} clauses. */ public TopDocs searchAfter(ScoreDoc after, Query query, int numHits) throws IOException { final int limit = Math.max(1, reader.maxDoc()); if (after != null && after.doc >= limit) { throw new IllegalArgumentException("after.doc exceeds the number of documents in the reader: after.doc=" + after.doc + " limit=" + limit); } final int cappedNumHits = Math.min(numHits, limit); final CollectorManager<TopScoreDocCollector, TopDocs> manager = new CollectorManager<TopScoreDocCollector, TopDocs>() { @Override public TopScoreDocCollector newCollector() throws IOException { return TopScoreDocCollector.create(cappedNumHits, after, TOTAL_HITS_THRESHOLD); } @Override public TopDocs reduce(Collection<TopScoreDocCollector> collectors) throws IOException { final TopDocs[] topDocs = new TopDocs[collectors.size()]; int i = 0; for (TopScoreDocCollector collector : collectors) { topDocs[i++] = collector.topDocs(); } return TopDocs.merge(0, cappedNumHits, topDocs); } }; return search(query, manager); } /** Finds the top <code>n</code> * hits for <code>query</code>. * * @throws TooManyClauses If a query would exceed * {@link IndexSearcher#getMaxClauseCount()} clauses. */ public TopDocs search(Query query, int n) throws IOException { return searchAfter(null, query, n); } /** Lower-level search API. * * <p>{@link LeafCollector#collect(int)} is called for every matching document. * * @throws TooManyClauses If a query would exceed * {@link IndexSearcher#getMaxClauseCount()} clauses. */ public void search(Query query, Collector results) throws IOException { query = rewrite(query); search(leafContexts, createWeight(query, results.scoreMode(), 1), results); } /** Search implementation with arbitrary sorting, plus * control over whether hit scores and max score * should be computed. Finds * the top <code>n</code> hits for <code>query</code>, and sorting * the hits by the criteria in <code>sort</code>. * If <code>doDocScores</code> is <code>true</code> * then the score of each hit will be computed and * returned. If <code>doMaxScore</code> is * <code>true</code> then the maximum score over all * collected hits will be computed. * * @throws TooManyClauses If a query would exceed * {@link IndexSearcher#getMaxClauseCount()} clauses. */ public TopFieldDocs search(Query query, int n, Sort sort, boolean doDocScores) throws IOException { return searchAfter(null, query, n, sort, doDocScores); } /** * Search implementation with arbitrary sorting. * @param query The query to search for * @param n Return only the top n results * @param sort The {@link org.apache.lucene.search.Sort} object * @return The top docs, sorted according to the supplied {@link org.apache.lucene.search.Sort} instance * @throws IOException if there is a low-level I/O error */ public TopFieldDocs search(Query query, int n, Sort sort) throws IOException { return searchAfter(null, query, n, sort, false); } /** Finds the top <code>n</code> * hits for <code>query</code> where all results are after a previous * result (<code>after</code>). * <p> * By passing the bottom result from a previous page as <code>after</code>, * this method can be used for efficient 'deep-paging' across potentially * large result sets. * * @throws TooManyClauses If a query would exceed * {@link IndexSearcher#getMaxClauseCount()} clauses. */ public TopDocs searchAfter(ScoreDoc after, Query query, int n, Sort sort) throws IOException { return searchAfter(after, query, n, sort, false); } /** Finds the top <code>n</code> * hits for <code>query</code> where all results are after a previous * result (<code>after</code>), allowing control over * whether hit scores and max score should be computed. * <p> * By passing the bottom result from a previous page as <code>after</code>, * this method can be used for efficient 'deep-paging' across potentially * large result sets. If <code>doDocScores</code> is <code>true</code> * then the score of each hit will be computed and * returned. If <code>doMaxScore</code> is * <code>true</code> then the maximum score over all * collected hits will be computed. * * @throws TooManyClauses If a query would exceed * {@link IndexSearcher#getMaxClauseCount()} clauses. */ public TopFieldDocs searchAfter(ScoreDoc after, Query query, int numHits, Sort sort, boolean doDocScores) throws IOException { if (after != null && !(after instanceof FieldDoc)) { // TODO: if we fix type safety of TopFieldDocs we can // remove this throw new IllegalArgumentException("after must be a FieldDoc; got " + after); } return searchAfter((FieldDoc) after, query, numHits, sort, doDocScores); } private TopFieldDocs searchAfter(FieldDoc after, Query query, int numHits, Sort sort, boolean doDocScores) throws IOException { final int limit = Math.max(1, reader.maxDoc()); if (after != null && after.doc >= limit) { throw new IllegalArgumentException("after.doc exceeds the number of documents in the reader: after.doc=" + after.doc + " limit=" + limit); } final int cappedNumHits = Math.min(numHits, limit); final Sort rewrittenSort = sort.rewrite(this); final CollectorManager<TopFieldCollector, TopFieldDocs> manager = new CollectorManager<TopFieldCollector, TopFieldDocs>() { @Override public TopFieldCollector newCollector() throws IOException { // TODO: don't pay the price for accurate hit counts by default return TopFieldCollector.create(rewrittenSort, cappedNumHits, after, TOTAL_HITS_THRESHOLD); } @Override public TopFieldDocs reduce(Collection<TopFieldCollector> collectors) throws IOException { final TopFieldDocs[] topDocs = new TopFieldDocs[collectors.size()]; int i = 0; for (TopFieldCollector collector : collectors) { topDocs[i++] = collector.topDocs(); } return TopDocs.merge(rewrittenSort, 0, cappedNumHits, topDocs); } }; TopFieldDocs topDocs = search(query, manager); if (doDocScores) { TopFieldCollector.populateScores(topDocs.scoreDocs, this, query); } return topDocs; } /** * Lower-level search API. * Search all leaves using the given {@link CollectorManager}. In contrast * to {@link #search(Query, Collector)}, this method will use the searcher's * {@link Executor} in order to parallelize execution of the collection * on the configured {@link #leafSlices}. * @see CollectorManager * @lucene.experimental */ public <C extends Collector, T> T search(Query query, CollectorManager<C, T> collectorManager) throws IOException { if (executor == null || leafSlices.length <= 1) { final C collector = collectorManager.newCollector(); search(query, collector); return collectorManager.reduce(Collections.singletonList(collector)); } else { final List<C> collectors = new ArrayList<>(leafSlices.length); ScoreMode scoreMode = null; for (int i = 0; i < leafSlices.length; ++i) { final C collector = collectorManager.newCollector(); collectors.add(collector); if (scoreMode == null) { scoreMode = collector.scoreMode(); } else if (scoreMode != collector.scoreMode()) { throw new IllegalStateException("CollectorManager does not always produce collectors with the same score mode"); } } if (scoreMode == null) { // no segments scoreMode = ScoreMode.COMPLETE; } query = rewrite(query); final Weight weight = createWeight(query, scoreMode, 1); final List<Future<C>> topDocsFutures = new ArrayList<>(leafSlices.length); for (int i = 0; i < leafSlices.length - 1; ++i) { final LeafReaderContext[] leaves = leafSlices[i].leaves; final C collector = collectors.get(i); FutureTask<C> task = new FutureTask<>(() -> { search(Arrays.asList(leaves), weight, collector); return collector; }); executor.execute(task); topDocsFutures.add(task); } final LeafReaderContext[] leaves = leafSlices[leafSlices.length - 1].leaves; final C collector = collectors.get(leafSlices.length - 1); // execute the last on the caller thread search(Arrays.asList(leaves), weight, collector); topDocsFutures.add(CompletableFuture.completedFuture(collector)); final List<C> collectedCollectors = new ArrayList<>(); for (Future<C> future : topDocsFutures) { try { collectedCollectors.add(future.get()); } catch (InterruptedException e) { throw new ThreadInterruptedException(e); } catch (ExecutionException e) { throw new RuntimeException(e); } } return collectorManager.reduce(collectors); } } /** * Lower-level search API. * * <p> * {@link LeafCollector#collect(int)} is called for every document. <br> * * <p> * NOTE: this method executes the searches on all given leaves exclusively. * To search across all the searchers leaves use {@link #leafContexts}. * * @param leaves * the searchers leaves to execute the searches on * @param weight * to match documents * @param collector * to receive hits * @throws TooManyClauses If a query would exceed * {@link IndexSearcher#getMaxClauseCount()} clauses. */ protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException { // TODO: should we make this // threaded...? the Collector could be sync'd? // always use single thread: for (LeafReaderContext ctx : leaves) { // search each subreader final LeafCollector leafCollector; try { leafCollector = collector.getLeafCollector(ctx); } catch (CollectionTerminatedException e) { // there is no doc of interest in this reader context // continue with the following leaf continue; } BulkScorer scorer = weight.bulkScorer(ctx); if (scorer != null) { try { scorer.score(leafCollector, ctx.reader().getLiveDocs()); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf } } } } /** Expert: called to re-write queries into primitive queries. * @throws TooManyClauses If a query would exceed * {@link IndexSearcher#getMaxClauseCount()} clauses. */ public Query rewrite(Query original) throws IOException { Query query = original; for (Query rewrittenQuery = query.rewrite(reader); rewrittenQuery != query; rewrittenQuery = query.rewrite(reader)) { query = rewrittenQuery; } query.visit(getNumClausesCheckVisitor()); return query; } /** Returns a QueryVisitor which recursively checks the total * number of clauses that a query and its children cumulatively * have and validates that the total number does not exceed * the specified limit */ private static QueryVisitor getNumClausesCheckVisitor() { return new QueryVisitor() { int numClauses; @Override public QueryVisitor getSubVisitor(BooleanClause.Occur occur, Query parent) { // Return this instance even for MUST_NOT and not an empty QueryVisitor return this; } @Override public void visitLeaf(Query query) { if (numClauses > maxClauseCount) { throw new TooManyClauses(); } ++numClauses; } @Override public void consumeTerms(Query query, Term... terms) { if (numClauses > maxClauseCount) { throw new TooManyClauses(); } ++numClauses; } }; } /** Returns an Explanation that describes how <code>doc</code> scored against * <code>query</code>. * * <p>This is intended to be used in developing Similarity implementations, * and, for good performance, should not be displayed with every hit. * Computing an explanation is as expensive as executing the query over the * entire index. */ public Explanation explain(Query query, int doc) throws IOException { query = rewrite(query); return explain(createWeight(query, ScoreMode.COMPLETE, 1), doc); } /** Expert: low-level implementation method * Returns an Explanation that describes how <code>doc</code> scored against * <code>weight</code>. * * <p>This is intended to be used in developing Similarity implementations, * and, for good performance, should not be displayed with every hit. * Computing an explanation is as expensive as executing the query over the * entire index. * <p>Applications should call {@link IndexSearcher#explain(Query, int)}. * @throws TooManyClauses If a query would exceed * {@link IndexSearcher#getMaxClauseCount()} clauses. */ protected Explanation explain(Weight weight, int doc) throws IOException { int n = ReaderUtil.subIndex(doc, leafContexts); final LeafReaderContext ctx = leafContexts.get(n); int deBasedDoc = doc - ctx.docBase; final Bits liveDocs = ctx.reader().getLiveDocs(); if (liveDocs != null && liveDocs.get(deBasedDoc) == false) { return Explanation.noMatch("Document " + doc + " is deleted"); } return weight.explain(ctx, deBasedDoc); } /** * Creates a {@link Weight} for the given query, potentially adding caching * if possible and configured. * @lucene.experimental */ public Weight createWeight(Query query, ScoreMode scoreMode, float boost) throws IOException { final QueryCache queryCache = this.queryCache; Weight weight = query.createWeight(this, scoreMode, boost); if (scoreMode.needsScores() == false && queryCache != null) { weight = queryCache.doCache(weight, queryCachingPolicy); } return weight; } /** * Returns this searchers the top-level {@link IndexReaderContext}. * @see IndexReader#getContext() */ /* sugar for #getReader().getTopReaderContext() */ public IndexReaderContext getTopReaderContext() { return readerContext; } /** * A class holding a subset of the {@link IndexSearcher}s leaf contexts to be * executed within a single thread. * * @lucene.experimental */ public static class LeafSlice { /** The leaves that make up this slice. * * @lucene.experimental */ public final LeafReaderContext[] leaves; public LeafSlice(List<LeafReaderContext> leavesList) { Collections.sort(leavesList, Comparator.comparingInt(l -> l.docBase)); this.leaves = leavesList.toArray(new LeafReaderContext[0]); } } @Override public String toString() { return "IndexSearcher(" + reader + "; executor=" + executor + ")"; } /** * Returns {@link TermStatistics} for a term, or {@code null} if * the term does not exist. * * This can be overridden for example, to return a term's statistics * across a distributed collection. * @lucene.experimental */ public TermStatistics termStatistics(Term term, TermStates context) throws IOException { if (context.docFreq() == 0) { return null; } else { return new TermStatistics(term.bytes(), context.docFreq(), context.totalTermFreq()); } } /** * Returns {@link CollectionStatistics} for a field, or {@code null} if * the field does not exist (has no indexed terms) * * This can be overridden for example, to return a field's statistics * across a distributed collection. * @lucene.experimental */ public CollectionStatistics collectionStatistics(String field) throws IOException { assert field != null; long docCount = 0; long sumTotalTermFreq = 0; long sumDocFreq = 0; for (LeafReaderContext leaf : reader.leaves()) { final Terms terms = leaf.reader().terms(field); if (terms == null) { continue; } docCount += terms.getDocCount(); sumTotalTermFreq += terms.getSumTotalTermFreq(); sumDocFreq += terms.getSumDocFreq(); } if (docCount == 0) { return null; } return new CollectionStatistics(field, reader.maxDoc(), docCount, sumTotalTermFreq, sumDocFreq); } /** * Returns this searchers executor or <code>null</code> if no executor was provided */ public Executor getExecutor() { return executor; } /** Thrown when an attempt is made to add more than {@link * #getMaxClauseCount()} clauses. This typically happens if * a PrefixQuery, FuzzyQuery, WildcardQuery, or TermRangeQuery * is expanded to many terms during search. */ public static class TooManyClauses extends RuntimeException { public TooManyClauses() { super("maxClauseCount is set to " + maxClauseCount); } } }
1
30,094
The `HitsThresholdChecker` should be created once and shared within the collectors ? We also don't need to use the `GlobalHitsThresholdChecker` if the executor is null or if there is a single slice.
apache-lucene-solr
java
@@ -15,10 +15,14 @@ package openflow import ( + "antrea.io/antrea/pkg/agent/config" + "antrea.io/antrea/pkg/agent/openflow/cookie" "fmt" + "k8s.io/client-go/tools/cache" "net" "strconv" "strings" + "sync" "k8s.io/apimachinery/pkg/util/intstr" "k8s.io/klog/v2"
1
// Copyright 2019 Antrea Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package openflow import ( "fmt" "net" "strconv" "strings" "k8s.io/apimachinery/pkg/util/intstr" "k8s.io/klog/v2" "antrea.io/antrea/pkg/agent/types" "antrea.io/antrea/pkg/apis/controlplane/v1beta2" crdv1alpha1 "antrea.io/antrea/pkg/apis/crd/v1alpha1" binding "antrea.io/antrea/pkg/ovs/openflow" thirdpartynp "antrea.io/antrea/third_party/networkpolicy" ) var ( MatchDstIP = types.NewMatchKey(binding.ProtocolIP, types.IPAddr, "nw_dst") MatchSrcIP = types.NewMatchKey(binding.ProtocolIP, types.IPAddr, "nw_src") MatchDstIPNet = types.NewMatchKey(binding.ProtocolIP, types.IPNetAddr, "nw_dst") MatchSrcIPNet = types.NewMatchKey(binding.ProtocolIP, types.IPNetAddr, "nw_src") MatchDstIPv6 = types.NewMatchKey(binding.ProtocolIPv6, types.IPAddr, "ipv6_dst") MatchSrcIPv6 = types.NewMatchKey(binding.ProtocolIPv6, types.IPAddr, "ipv6_src") MatchDstIPNetv6 = types.NewMatchKey(binding.ProtocolIPv6, types.IPNetAddr, "ipv6_dst") MatchSrcIPNetv6 = types.NewMatchKey(binding.ProtocolIPv6, types.IPNetAddr, "ipv6_src") MatchDstOFPort = types.NewMatchKey(binding.ProtocolIP, types.OFPortAddr, "reg1[0..31]") MatchSrcOFPort = types.NewMatchKey(binding.ProtocolIP, types.OFPortAddr, "in_port") MatchTCPDstPort = types.NewMatchKey(binding.ProtocolTCP, types.L4PortAddr, "tp_dst") MatchTCPv6DstPort = types.NewMatchKey(binding.ProtocolTCPv6, types.L4PortAddr, "tp_dst") MatchUDPDstPort = types.NewMatchKey(binding.ProtocolUDP, types.L4PortAddr, "tp_dst") MatchUDPv6DstPort = types.NewMatchKey(binding.ProtocolUDPv6, types.L4PortAddr, "tp_dst") MatchSCTPDstPort = types.NewMatchKey(binding.ProtocolSCTP, types.L4PortAddr, "tp_dst") MatchSCTPv6DstPort = types.NewMatchKey(binding.ProtocolSCTPv6, types.L4PortAddr, "tp_dst") MatchTCPSrcPort = types.NewMatchKey(binding.ProtocolTCP, types.L4PortAddr, "tp_src") MatchTCPv6SrcPort = types.NewMatchKey(binding.ProtocolTCPv6, types.L4PortAddr, "tp_src") MatchUDPSrcPort = types.NewMatchKey(binding.ProtocolUDP, types.L4PortAddr, "tp_src") MatchUDPv6SrcPort = types.NewMatchKey(binding.ProtocolUDPv6, types.L4PortAddr, "tp_src") MatchServiceGroupID = types.NewMatchKey(binding.ProtocolIP, types.ServiceGroupIDAddr, "reg7[0..31]") Unsupported = types.NewMatchKey(binding.ProtocolIP, types.UnSupported, "unknown") // metricFlowIdentifier is used to identify metric flows in metric table. // There could be other flows like default flow and Traceflow flows in the table. Only metric flows are supposed to // have normal priority. metricFlowIdentifier = fmt.Sprintf("priority=%d,", priorityNormal) protocolUDP = v1beta2.ProtocolUDP dnsPort = intstr.FromInt(53) ) // IP address calculated from Pod's address. type IPAddress net.IP func (a *IPAddress) GetMatchKey(addrType types.AddressType) *types.MatchKey { ipArr := net.IP(*a) switch addrType { case types.SrcAddress: if ipArr.To4() != nil { return MatchSrcIP } return MatchSrcIPv6 case types.DstAddress: if ipArr.To4() != nil { return MatchDstIP } return MatchDstIPv6 default: klog.Errorf("Unknown AddressType %d in IPAddress", addrType) return Unsupported } } func (a *IPAddress) GetMatchValue() string { addr := net.IP(*a) return addr.String() } func (a *IPAddress) GetValue() interface{} { return net.IP(*a) } func NewIPAddress(addr net.IP) *IPAddress { ia := IPAddress(addr) return &ia } // IP block calculated from Pod's address. type IPNetAddress net.IPNet func (a *IPNetAddress) GetMatchKey(addrType types.AddressType) *types.MatchKey { ipAddr := net.IPNet(*a) switch addrType { case types.SrcAddress: if ipAddr.IP.To4() != nil { return MatchSrcIPNet } return MatchSrcIPNetv6 case types.DstAddress: if ipAddr.IP.To4() != nil { return MatchDstIPNet } return MatchDstIPNetv6 default: klog.Errorf("Unknown AddressType %d in IPNetAddress", addrType) return Unsupported } } func (a *IPNetAddress) GetMatchValue() string { addr := net.IPNet(*a) return addr.String() } func (a *IPNetAddress) GetValue() interface{} { return net.IPNet(*a) } func NewIPNetAddress(addr net.IPNet) *IPNetAddress { ia := IPNetAddress(addr) return &ia } // OFPortAddress is the Openflow port of an interface. type OFPortAddress int32 func (a *OFPortAddress) GetMatchKey(addrType types.AddressType) *types.MatchKey { switch addrType { case types.SrcAddress: // in_port is used in egress rule to match packets sent from local Pod. Service traffic is not covered by this // match, and source IP will be matched instead. return MatchSrcOFPort case types.DstAddress: return MatchDstOFPort default: klog.Errorf("Unknown AddressType %d in OFPortAddress", addrType) return Unsupported } } func (a *OFPortAddress) GetMatchValue() string { return fmt.Sprintf("%d", int32(*a)) } func (a *OFPortAddress) GetValue() interface{} { return int32(*a) } func NewOFPortAddress(addr int32) *OFPortAddress { a := OFPortAddress(addr) return &a } type ServiceGroupIDAddress binding.GroupIDType func (a *ServiceGroupIDAddress) GetMatchKey(addrType types.AddressType) *types.MatchKey { return MatchServiceGroupID } func (a *ServiceGroupIDAddress) GetMatchValue() string { return fmt.Sprintf("%d", uint32(*a)) } func (a *ServiceGroupIDAddress) GetValue() interface{} { return uint32(*a) } func NewServiceGroupIDAddress(groupID binding.GroupIDType) *ServiceGroupIDAddress { a := ServiceGroupIDAddress(groupID) return &a } // ConjunctionNotFound is an error response when the specified policyRuleConjunction is not found from the local cache. type ConjunctionNotFound uint32 func (e *ConjunctionNotFound) Error() string { return fmt.Sprintf("policyRuleConjunction with ID %d not found", uint32(*e)) } func newConjunctionNotFound(conjunctionID uint32) *ConjunctionNotFound { err := ConjunctionNotFound(conjunctionID) return &err } // conjunctiveMatch generates match conditions for conjunctive match flow entry, including source or destination // IP address, ofport number of OVS interface, or Service port. When conjunctiveMatch is used to match IP // address or ofport number, matchProtocol is "ip". When conjunctiveMatch is used to match Service // port, matchProtocol is Service protocol. If Service protocol is not set, "tcp" is used by default. type conjunctiveMatch struct { tableID uint8 priority *uint16 matchKey *types.MatchKey matchValue interface{} } func (m *conjunctiveMatch) generateGlobalMapKey() string { var valueStr, priorityStr string matchType := m.matchKey switch v := m.matchValue.(type) { case net.IP: // Use the unique format "x.x.x.x/xx" for IP address and IP net, to avoid generating two different global map // keys for IP and IP/mask. Use MatchDstIPNet/MatchSrcIPNet as match type to generate global cache key for both IP // and IPNet. This is because OVS treats IP and IP/$maskLen as the same condition (maskLen=32 for an IPv4 address, // and maskLen=128 for an IPv6 address). If Antrea has two different conjunctive match flow contexts, only one // flow entry is installed on OVS, and the conjunctive actions in the first context wil be overwritten by those // in the second one. var maskLen int if v.To4() != nil { maskLen = net.IPv4len * 8 } else { maskLen = net.IPv6len * 8 } valueStr = fmt.Sprintf("%s/%d", v.String(), maskLen) switch m.matchKey { case MatchDstIP: matchType = MatchDstIPNet case MatchDstIPv6: matchType = MatchDstIPNetv6 case MatchSrcIP: matchType = MatchSrcIPNet case MatchSrcIPv6: matchType = MatchSrcIPNetv6 } case net.IPNet: valueStr = v.String() case types.BitRange: bitRange := m.matchValue.(types.BitRange) if bitRange.Mask != nil { valueStr = fmt.Sprintf("%d/%d", bitRange.Value, *bitRange.Mask) } else { // To normalize the key, set full mask while a single port is provided. valueStr = fmt.Sprintf("%d/65535", bitRange.Value) } default: // The default cases include the matchValue is an ofport Number. valueStr = fmt.Sprintf("%s", m.matchValue) } if m.priority == nil { priorityStr = strconv.Itoa(int(priorityNormal)) } else { priorityStr = strconv.Itoa(int(*m.priority)) } return fmt.Sprintf("table:%d,priority:%s,type:%v,value:%s", m.tableID, priorityStr, matchType, valueStr) } // changeType is generally used to describe the change type of a conjMatchFlowContext. It is also used in "flowChange" // to describe the expected OpenFlow operation which needs to be applied on the OVS bridge, and used in "actionChange" // to describe the policyRuleConjunction is expected to be added to or removed from conjMatchFlowContext's actions. // The value of changeType could be creation, modification, and deletion. type changeType int const ( insertion changeType = iota modification deletion ) // flowChange stores the expected OpenFlow entry and flow operation type which need to be applied on the OVS bridge. // The "flow" in flowChange should be nil if there is no change on the OpenFlow entry. A possible case is that a // DENY-ALL rule is required by a policyRuleConjunction, the flowChange will update the in-memory cache, but will not // change on OVS. type flowChange struct { flow binding.Flow changeType changeType } // actionChange stores the changed action of the conjunctive match flow, and the change type. // The "action" in actionChange is not nil. type actionChange struct { action *conjunctiveAction changeType changeType } // conjunctiveAction generates the policyRuleConjunction action in Openflow entry. The flow action is like // policyRuleConjunction(conjID,clauseID/nClause) when it has been realized on the switch. type conjunctiveAction struct { conjID uint32 clauseID uint8 nClause uint8 } // conjMatchFlowContext generates conjunctive match flow entries for conjunctions share the same match conditions. // One conjMatchFlowContext is responsible for one specific conjunctive match flow entry. As the match condition // of the flow entry can be shared by different conjunctions, the realized Openflow entry might have multiple // conjunctive actions. If the dropTable is not nil, conjMatchFlowContext also installs a drop flow in the dropTable. type conjMatchFlowContext struct { // conjunctiveMatch describes the match condition of conjunctive match flow entry. *conjunctiveMatch // actions is a map from policyRuleConjunction ID to conjunctiveAction. It records all the conjunctive actions in // the conjunctive match flow. When the number of actions is reduced to 0, the conjMatchFlowContext.flow is // uninstalled from the switch. actions map[uint32]*conjunctiveAction // denyAllRules is a set to cache the "DENY-ALL" rules that is applied to the matching address in this context. denyAllRules map[uint32]bool client *client // flow is the conjunctive match flow built from this context. flow needs to be updated if actions are changed. flow binding.Flow // dropflow is the default drop flow built from this context to drop packets in the AppliedToGroup but not pass the // NetworkPolicy rule. dropFlow is installed on the switch as long as either actions or denyAllRules is not // empty, and uninstalled when both two are empty. When the dropFlow is uninstalled from the switch, the // conjMatchFlowContext is removed from the cache. dropFlow binding.Flow } // createOrUpdateConjunctiveMatchFlow creates or updates the conjunctive match flow with the latest actions. It returns // the flowChange including the changed OpenFlow entry and the expected operation which need to be applied on the OVS bridge. func (ctx *conjMatchFlowContext) createOrUpdateConjunctiveMatchFlow(actions []*conjunctiveAction) *flowChange { // Check if flow is already installed. If not, create a new flow. if ctx.flow == nil { // Check the number of valid conjunctiveActions, and return nil immediately if it is 0. It happens when the match // condition is used only for matching AppliedToGroup, but no From or To is defined in the NetworkPolicy rule. if len(actions) == 0 { return nil } // Create the conjunctive match flow entry. The actions here should not be empty for either add or update case. // The expected operation for a new Openflow entry should be "insertion". flow := ctx.client.conjunctiveMatchFlow(ctx.tableID, ctx.matchKey, ctx.matchValue, ctx.priority, actions) return &flowChange{ flow: flow, changeType: insertion, } } // Modify the existing Openflow entry and reset the actions. flowBuilder := ctx.flow.CopyToBuilder(0, false) for _, act := range actions { flowBuilder.Action().Conjunction(act.conjID, act.clauseID, act.nClause) } // The expected operation for an existing Openflow entry should be "modification". return &flowChange{ flow: flowBuilder.Done(), changeType: modification, } } // deleteAction deletes the specified policyRuleConjunction from conjunctiveMatchFlow's actions, and then returns the // flowChange. func (ctx *conjMatchFlowContext) deleteAction(conjID uint32) *flowChange { // If the specified conjunctive action is the last one, delete the conjunctive match flow entry from the OVS bridge. // No need to check if the conjunction ID of the only conjunctive action is the specified ID or not, as it // has been checked in the caller. if len(ctx.actions) == 1 && ctx.flow != nil { return &flowChange{ flow: ctx.flow, changeType: deletion, } } // Modify the Openflow entry and reset the other conjunctive actions. var actions []*conjunctiveAction for _, act := range ctx.actions { if act.conjID != conjID { actions = append(actions, act) } } return ctx.createOrUpdateConjunctiveMatchFlow(actions) } // addAction adds the specified policyRuleConjunction into conjunctiveMatchFlow's actions, and then returns the flowChange. func (ctx *conjMatchFlowContext) addAction(action *conjunctiveAction) *flowChange { // Check if the conjunction exists in conjMatchFlowContext actions or not. If yes, return nil immediately. _, found := ctx.actions[action.conjID] if found { return nil } // Append current conjunctive action to the existing actions, and then calculate the conjunctive match flow changes. actions := []*conjunctiveAction{action} for _, act := range ctx.actions { actions = append(actions, act) } return ctx.createOrUpdateConjunctiveMatchFlow(actions) } func (ctx *conjMatchFlowContext) addDenyAllRule(ruleID uint32) { if ctx.denyAllRules == nil { ctx.denyAllRules = make(map[uint32]bool) } ctx.denyAllRules[ruleID] = true } func (ctx *conjMatchFlowContext) delDenyAllRule(ruleID uint32) { // Delete the DENY-ALL rule if it is in context denyAllRules. _, found := ctx.denyAllRules[ruleID] if found { delete(ctx.denyAllRules, ruleID) } } // conjMatchFlowContextChange describes the changes of a conjMatchFlowContext. It is generated when a policyRuleConjunction // is added, deleted, or the addresses in an existing policyRuleConjunction are changed. The changes are calculated first, // and then applied on the OVS bridge using a single Bundle, and lastly the local cache is updated. The local cahce // is updated only if conjMatchFlowContextChange is applied on the OVS bridge successfully. type conjMatchFlowContextChange struct { // context is the changed conjMatchFlowContext, which needs to be updated after the OpenFlow entries are applied to // the OVS bridge. context is not nil. context *conjMatchFlowContext // ctxChangeType is the changed type of the conjMatchFlowContext. The possible values are "creation", "modification" // and "deletion". Add the context into the globalConjMatchFlowCache if the ctxChangeType is "insertion", and remove // from the globalConjMatchFlowCache if it is "deletion". ctxChangeType changeType // matchFlow is the changed conjunctive match flow which needs to be realized on the OVS bridge. It is used to update // conjMatchFlowContext.flow. matchFlow is set if the conjunctive match flow needs to be updated on the OVS bridge, or // a DENY-ALL rule change is required by the policyRuleConjunction. matchFlow is nil if the policyRuleConjunction // is already added/removed in the conjMatchFlowContext's actions or denyAllRules. matchFlow *flowChange // dropFlow is the changed drop flow which needs to be realized on the OVS bridge. It is used to update // conjMatchFlowContext.dropFlow. dropFlow is set when the default drop flow needs to be added or removed on the OVS // bridge, and it is nil in other cases. dropFlow *flowChange // clause is the policyRuleConjunction's clause having current conjMatchFlowContextChange. It is used to update the // mapping relations between the policyRuleConjunction and the conjMatchFlowContext. Update the clause.matches after // the conjMatchFlowContextChange is realized on the OVS bridge. clause is not nil. clause *clause // actChange is the changed conjunctive action. It is used to update the conjMatchFlowContext's actions. actChange // is not nil. actChange *actionChange } // updateContextStatus changes conjMatchFlowContext's status, including, // 1) reset flow and dropFlow after the flow changes have been applied to the OVS bridge, // 2) modify the actions with the changed action, // 3) update the mapping of denyAllRules and corresponding policyRuleConjunction, // 4) add the new conjMatchFlowContext into the globalConjMatchFlowCache, or remove the deleted conjMatchFlowContext // from the globalConjMatchFlowCache. func (c *conjMatchFlowContextChange) updateContextStatus() { matcherKey := c.context.generateGlobalMapKey() // Update clause.matches with the conjMatchFlowContext, and update conjMatchFlowContext.actions with the changed // conjunctive action. changedAction := c.actChange.action switch c.actChange.changeType { case insertion: c.clause.matches[matcherKey] = c.context if changedAction != nil { c.context.actions[changedAction.conjID] = changedAction } case deletion: delete(c.clause.matches, matcherKey) if changedAction != nil { delete(c.context.actions, changedAction.conjID) } } // Update the match flow in the conjMatchFlowContext. There are two kinds of possible changes on the match flow: // 1) A conjunctive match flow change required by the policyRuleConjunction. // 2) A DENY-ALL rule required by the policyRuleConjunction. // For 1), conjMatchFlowContext.Flow should be updated with the conjMatchFlowContextChange.matchFlow.flow. // For 2), append or delete the conjunction ID from the conjMatchFlowContext's denyAllRules. if c.matchFlow != nil { switch c.matchFlow.changeType { case insertion: fallthrough case modification: if c.matchFlow.flow != nil { c.context.flow = c.matchFlow.flow } else { switch c.actChange.changeType { case insertion: c.context.addDenyAllRule(c.clause.action.conjID) case deletion: c.context.delDenyAllRule(c.clause.action.conjID) } } case deletion: if c.matchFlow.flow != nil { c.context.flow = nil } else { c.context.delDenyAllRule(c.clause.action.conjID) } } } // Update conjMatchFlowContext.dropFlow. if c.dropFlow != nil { switch c.dropFlow.changeType { case insertion: c.context.dropFlow = c.dropFlow.flow case deletion: c.context.dropFlow = nil } } // Update globalConjMatchFlowCache. Add the conjMatchFlowContext into the globalConjMatchFlowCache if the ctxChangeType // is "insertion", or delete from the globalConjMatchFlowCache if the ctxChangeType is "deletion". switch c.ctxChangeType { case insertion: c.context.client.globalConjMatchFlowCache[matcherKey] = c.context case deletion: delete(c.context.client.globalConjMatchFlowCache, matcherKey) } } // policyRuleConjunction is responsible to build Openflow entries for Pods that are in a NetworkPolicy rule's AppliedToGroup. // The Openflow entries include conjunction action flows, conjunctive match flows, and default drop flows in the dropTable. // NetworkPolicyController will make sure only one goroutine operates on a policyRuleConjunction. // 1) Conjunction action flows use policyRuleConjunction ID as match condition. policyRuleConjunction ID is the single // match condition for conjunction action flows to allow packets. If the NetworkPolicy rule has also configured excepts // in From or To, Openflow entries are installed only for diff IPBlocks between From/To and Excepts. These are added as // conjunctive match flows as described below. // 2) Conjunctive match flows adds conjunctive actions in Openflow entry, and they are grouped by clauses. The match // condition in one clause is one of these three types: from address(for fromClause), or to address(for toClause), or // service ports(for serviceClause) configured in the NetworkPolicy rule. Each conjunctive match flow entry is // maintained by one specific conjMatchFlowContext which is stored in globalConjMatchFlowCache, and shared by clauses // if they have the same match conditions. clause adds or deletes conjunctive action to conjMatchFlowContext actions. // A clause is hit if the packet matches any conjunctive match flow that are grouped by this clause. Conjunction // action flow is hit only if all clauses in the policyRuleConjunction are hit. // 3) Default drop flows are also maintained by conjMatchFlowContext. It is used to drop packets sent from or to the // AppliedToGroup but not pass the Network Policy rule. type policyRuleConjunction struct { id uint32 fromClause *clause toClause *clause serviceClause *clause actionFlows []binding.Flow metricFlows []binding.Flow // NetworkPolicy reference information for debugging usage. npRef *v1beta2.NetworkPolicyReference ruleTableID uint8 } // clause groups conjunctive match flows. Matches in a clause represent source addresses(for fromClause), or destination // addresses(for toClause) or service ports(for serviceClause) in a NetworkPolicy rule. When the new address or service // port is added into the clause, it adds a new conjMatchFlowContext into globalConjMatchFlowCache (or finds the // existing one from globalConjMatchFlowCache), and then update the key of the conjunctiveMatch into its own matches. // When address is deleted from the clause, it deletes the conjunctive action from the conjMatchFlowContext, // and then deletes the key of conjunctiveMatch from its own matches. type clause struct { action *conjunctiveAction // matches is a map from the unique string generated from the conjunctiveMatch to conjMatchFlowContext. It is used // to cache conjunctive match conditions in the same clause. matches map[string]*conjMatchFlowContext // ruleTable is where to install conjunctive match flows. ruleTable binding.Table // dropTable is where to install Openflow entries to drop the packet sent to or from the AppliedToGroup but does not // satisfy any conjunctive match conditions. It should be nil, if the clause is used for matching service port. dropTable binding.Table } func (c *client) NewDNSpacketInConjunction(id uint32) error { existingConj := c.getPolicyRuleConjunction(id) if existingConj != nil { klog.InfoS("DNS Conjunction has already been added to cache", "id", id) return nil } conj := &policyRuleConjunction{ id: id, ruleTableID: AntreaPolicyIngressRuleTable.GetID(), actionFlows: []binding.Flow{c.dnsPacketInFlow(id), c.dnsResponseBypassPacketInFlow(), c.dnsResponseBypassConntrackFlow()}, } if err := c.ofEntryOperations.AddAll(conj.actionFlows); err != nil { return fmt.Errorf("error when adding action flows for the DNS conjunction: %w", err) } udpService := v1beta2.Service{ Protocol: &protocolUDP, Port: &dnsPort, } dnsPriority := priorityDNSIntercept conj.serviceClause = conj.newClause(1, 2, getTableByID(conj.ruleTableID), nil) conj.toClause = conj.newClause(2, 2, getTableByID(conj.ruleTableID), nil) c.conjMatchFlowLock.Lock() defer c.conjMatchFlowLock.Unlock() ctxChanges := conj.serviceClause.addServiceFlows(c, []v1beta2.Service{udpService}, &dnsPriority, true) if err := c.applyConjunctiveMatchFlows(ctxChanges); err != nil { return err } // Add the policyRuleConjunction into policyCache c.policyCache.Add(conj) return nil } func (c *client) AddAddressToDNSConjunction(id uint32, addrs []types.Address) error { dnsPriority := priorityDNSIntercept return c.AddPolicyRuleAddress(id, types.DstAddress, addrs, &dnsPriority) } func (c *client) DeleteAddressFromDNSConjunction(id uint32, addrs []types.Address) error { dnsPriority := priorityDNSIntercept return c.DeletePolicyRuleAddress(id, types.DstAddress, addrs, &dnsPriority) } func (c *clause) addConjunctiveMatchFlow(client *client, match *conjunctiveMatch) *conjMatchFlowContextChange { matcherKey := match.generateGlobalMapKey() _, found := c.matches[matcherKey] if found { klog.V(2).Infof("Conjunctive match flow with matcher %s is already added in rule: %d", matcherKey, c.action.conjID) return nil } var context *conjMatchFlowContext ctxType := modification var dropFlow *flowChange // Get conjMatchFlowContext from globalConjMatchFlowCache. If it doesn't exist, create a new one and add into the cache. context, found = client.globalConjMatchFlowCache[matcherKey] if !found { context = &conjMatchFlowContext{ conjunctiveMatch: match, actions: make(map[uint32]*conjunctiveAction), client: client, } ctxType = insertion // Generate the default drop flow if dropTable is not nil and the default drop flow is not set yet. if c.dropTable != nil && context.dropFlow == nil { dropFlow = &flowChange{ flow: context.client.defaultDropFlow(c.dropTable, match.matchKey, match.matchValue), changeType: insertion, } } } // Calculate the change on the conjMatchFlowContext. ctxChanges := &conjMatchFlowContextChange{ context: context, ctxChangeType: ctxType, clause: c, actChange: &actionChange{ changeType: insertion, }, dropFlow: dropFlow, } if c.action.nClause > 1 { // Append the conjunction to conjunctiveFlowContext's actions, and add the changed flow into the conjMatchFlowContextChange. flowChange := context.addAction(c.action) if flowChange != nil { ctxChanges.matchFlow = flowChange ctxChanges.actChange.action = c.action } } else { // Set the flowChange type as "insertion" but do not set flowChange.Flow. In this case, the policyRuleConjunction should // be added into conjunctiveFlowContext's denyAllRules. ctxChanges.matchFlow = &flowChange{ changeType: insertion, } } return ctxChanges } func generateAddressConjMatch(ruleTableID uint8, addr types.Address, addrType types.AddressType, priority *uint16) *conjunctiveMatch { matchKey := addr.GetMatchKey(addrType) matchValue := addr.GetValue() match := &conjunctiveMatch{ tableID: ruleTableID, matchKey: matchKey, matchValue: matchValue, priority: priority, } return match } func getServiceMatchType(protocol *v1beta2.Protocol, ipv4Enabled, ipv6Enabled, matchSrc bool) []*types.MatchKey { var matchKeys []*types.MatchKey switch *protocol { case v1beta2.ProtocolTCP: if !matchSrc { if ipv4Enabled { matchKeys = append(matchKeys, MatchTCPDstPort) } if ipv6Enabled { matchKeys = append(matchKeys, MatchTCPv6DstPort) } } else { if ipv4Enabled { matchKeys = append(matchKeys, MatchTCPSrcPort) } if ipv6Enabled { matchKeys = append(matchKeys, MatchTCPv6SrcPort) } } case v1beta2.ProtocolUDP: if !matchSrc { if ipv4Enabled { matchKeys = append(matchKeys, MatchUDPDstPort) } if ipv6Enabled { matchKeys = append(matchKeys, MatchUDPv6DstPort) } } else { if ipv4Enabled { matchKeys = append(matchKeys, MatchUDPSrcPort) } if ipv6Enabled { matchKeys = append(matchKeys, MatchUDPv6SrcPort) } } case v1beta2.ProtocolSCTP: if ipv4Enabled { matchKeys = append(matchKeys, MatchSCTPDstPort) } if ipv6Enabled { matchKeys = append(matchKeys, MatchSCTPv6DstPort) } default: matchKeys = []*types.MatchKey{MatchTCPDstPort} } return matchKeys } func generateServicePortConjMatches(ruleTableID uint8, service v1beta2.Service, priority *uint16, ipv4Enabled, ipv6Enabled, matchSrc bool) []*conjunctiveMatch { matchKeys := getServiceMatchType(service.Protocol, ipv4Enabled, ipv6Enabled, matchSrc) ovsBitRanges := serviceToBitRanges(service) var matches []*conjunctiveMatch for _, matchKey := range matchKeys { for _, ovsBitRange := range ovsBitRanges { matches = append(matches, &conjunctiveMatch{ tableID: ruleTableID, matchKey: matchKey, matchValue: ovsBitRange, priority: priority, }) } } return matches } // serviceToBitRanges converts a Service to a list of BitRange. func serviceToBitRanges(service v1beta2.Service) []types.BitRange { var ovsBitRanges []types.BitRange // If `EndPort` is equal to `Port`, then treat it as single port case. if service.EndPort != nil && *service.EndPort > service.Port.IntVal { // Add several antrea range services based on a port range. portRange := thirdpartynp.PortRange{Start: uint16(service.Port.IntVal), End: uint16(*service.EndPort)} bitRanges, err := portRange.BitwiseMatch() if err != nil { klog.Errorf("Error when getting BitRanges from %v: %v", portRange, err) return ovsBitRanges } for _, bitRange := range bitRanges { curBitRange := bitRange ovsBitRanges = append(ovsBitRanges, types.BitRange{ Value: curBitRange.Value, Mask: &curBitRange.Mask, }) } } else if service.Port != nil { // Add single antrea service based on a single port. ovsBitRanges = append(ovsBitRanges, types.BitRange{ Value: uint16(service.Port.IntVal), }) } else { // Match all ports with the given protocol type if `Port` and `EndPort` are not // specified (value is 0). ovsBitRanges = append(ovsBitRanges, types.BitRange{ Value: uint16(0), }) } return ovsBitRanges } // addAddrFlows translates the specified addresses to conjunctiveMatchFlows, and returns the corresponding changes on the // conjunctiveMatchFlows. func (c *clause) addAddrFlows(client *client, addrType types.AddressType, addresses []types.Address, priority *uint16) []*conjMatchFlowContextChange { var conjMatchFlowContextChanges []*conjMatchFlowContextChange // Calculate Openflow changes for the added addresses. for _, addr := range addresses { match := generateAddressConjMatch(c.ruleTable.GetID(), addr, addrType, priority) ctxChange := c.addConjunctiveMatchFlow(client, match) if ctxChange != nil { conjMatchFlowContextChanges = append(conjMatchFlowContextChanges, ctxChange) } } return conjMatchFlowContextChanges } // addServiceFlows translates the specified NetworkPolicyPorts to conjunctiveMatchFlow, and returns corresponding // conjMatchFlowContextChange. func (c *clause) addServiceFlows(client *client, ports []v1beta2.Service, priority *uint16, matchSrc bool) []*conjMatchFlowContextChange { var conjMatchFlowContextChanges []*conjMatchFlowContextChange for _, port := range ports { matches := generateServicePortConjMatches(c.ruleTable.GetID(), port, priority, client.IsIPv4Enabled(), client.IsIPv6Enabled(), matchSrc) for _, match := range matches { ctxChange := c.addConjunctiveMatchFlow(client, match) conjMatchFlowContextChanges = append(conjMatchFlowContextChanges, ctxChange) } } return conjMatchFlowContextChanges } // deleteConjunctiveMatchFlow deletes the specific conjunctiveAction from existing flow. func (c *clause) deleteConjunctiveMatchFlow(flowContextKey string) *conjMatchFlowContextChange { context, found := c.matches[flowContextKey] // Match is not located in clause cache. It happens if the conjMatchFlowContext is already deleted from clause local cache. if !found { return nil } ctxChange := &conjMatchFlowContextChange{ context: context, clause: c, ctxChangeType: modification, actChange: &actionChange{ changeType: deletion, }, } conjID := c.action.conjID expectedConjunctiveActions := len(context.actions) expectedDenyAllRules := len(context.denyAllRules) if c.action.nClause > 1 { // Delete the conjunctive action if it is in context actions. action, found := context.actions[conjID] if found { ctxChange.matchFlow = context.deleteAction(conjID) ctxChange.actChange.action = action expectedConjunctiveActions-- } } else { // Delete the DENY-ALL rule if it is in context denyAllRules. ctxChange.matchFlow = &flowChange{ changeType: deletion, } expectedDenyAllRules-- } // Uninstall default drop flow if the deleted conjunctiveAction is the last action or the rule is the last one in // the denyAllRules. if expectedConjunctiveActions == 0 && expectedDenyAllRules == 0 { if context.dropFlow != nil { ctxChange.dropFlow = &flowChange{ flow: context.dropFlow, changeType: deletion, } } // Remove the context from global cache if the match condition is not used by either DENEY-ALL or the conjunctive // match flow. ctxChange.ctxChangeType = deletion } return ctxChange } // deleteAddrFlows deletes conjunctiveMatchFlow relevant to the specified addresses from local cache, // and uninstalls Openflow entry. func (c *clause) deleteAddrFlows(addrType types.AddressType, addresses []types.Address, priority *uint16) []*conjMatchFlowContextChange { var ctxChanges []*conjMatchFlowContextChange for _, addr := range addresses { match := generateAddressConjMatch(c.ruleTable.GetID(), addr, addrType, priority) contextKey := match.generateGlobalMapKey() ctxChange := c.deleteConjunctiveMatchFlow(contextKey) if ctxChange != nil { ctxChanges = append(ctxChanges, ctxChange) } } return ctxChanges } // deleteAllMatches deletes all conjunctiveMatchFlow in the clause, and removes Openflow entry. deleteAllMatches // is always invoked when NetworkPolicy rule is deleted. func (c *clause) deleteAllMatches() []*conjMatchFlowContextChange { var ctxChanges []*conjMatchFlowContextChange for key := range c.matches { ctxChange := c.deleteConjunctiveMatchFlow(key) if ctxChange != nil { ctxChanges = append(ctxChanges, ctxChange) } } return ctxChanges } func (c *policyRuleConjunction) getAddressClause(addrType types.AddressType) *clause { switch addrType { case types.SrcAddress: return c.fromClause case types.DstAddress: return c.toClause default: klog.Errorf("no address clause use AddressType %d", addrType) return nil } } // InstallPolicyRuleFlows installs flows for a new NetworkPolicy rule. Rule should include all fields in the // NetworkPolicy rule. Each ingress/egress policy rule installs Openflow entries on two tables, one for ruleTable and // the other for dropTable. If a packet does not pass the ruleTable, it will be dropped by the dropTable. // NetworkPolicyController will make sure only one goroutine operates on a PolicyRule and addresses in the rule. // For a normal NetworkPolicy rule, these Openflow entries are installed: 1) 1 conjunction action flow; 2) multiple // conjunctive match flows, the flow number depends on addresses in rule.From and rule.To, or if // rule.FromExcepts/rule.ToExcepts are present, flow number is equal to diff of addresses between rule.From and // rule.FromExcepts, and diff addresses between rule.To and rule.ToExcepts, and in addition number includes service ports // in rule.Service; and 3) multiple default drop flows, the number is dependent on the addresses in rule.From for // an egress rule, and addresses in rule.To for an ingress rule. // For ALLOW-ALL rule, the Openflow entries installed on the switch are similar to a normal rule. The differences include, // 1) rule.Service is nil; and 2) rule.To has only one address "0.0.0.0/0" for egress rule, and rule.From is "0.0.0.0/0" // for ingress rule. // For DENY-ALL rule, only the default drop flow is installed for the addresses in rule.From for egress rule, or // addresses in rule.To for ingress rule. No conjunctive match flow or conjunction action except flows are installed. // A DENY-ALL rule is configured with rule.ID, rule.Direction, and either rule.From(egress rule) or rule.To(ingress rule). // Other fields in the rule should be nil. // If there is an error in any clause's addAddrFlows or addServiceFlows, the conjunction action flow will never be hit. // If the default drop flow is already installed before this error, all packets will be dropped by the default drop flow, // Otherwise all packets will be allowed. func (c *client) InstallPolicyRuleFlows(rule *types.PolicyRule) error { c.replayMutex.RLock() defer c.replayMutex.RUnlock() conj := c.calculateActionFlowChangesForRule(rule) c.conjMatchFlowLock.Lock() defer c.conjMatchFlowLock.Unlock() ctxChanges := c.calculateMatchFlowChangesForRule(conj, rule) if err := c.ofEntryOperations.AddAll(conj.metricFlows); err != nil { return err } if err := c.ofEntryOperations.AddAll(conj.actionFlows); err != nil { return err } if err := c.applyConjunctiveMatchFlows(ctxChanges); err != nil { return err } // Add the policyRuleConjunction into policyCache c.policyCache.Add(conj) return nil } // calculateActionFlowChangesForRule calculates and updates the actionFlows for the conjunction corresponded to the ofPolicyRule. func (c *client) calculateActionFlowChangesForRule(rule *types.PolicyRule) *policyRuleConjunction { ruleOfID := rule.FlowID // Check if the policyRuleConjunction is added into cache or not. If yes, return nil. conj := c.getPolicyRuleConjunction(ruleOfID) if conj != nil { klog.V(2).Infof("PolicyRuleConjunction %d is already added in cache", ruleOfID) return nil } conj = &policyRuleConjunction{ id: ruleOfID, npRef: rule.PolicyRef, } nClause, ruleTable, dropTable := conj.calculateClauses(rule, c) conj.ruleTableID = rule.TableID _, isEgress := egressTables[rule.TableID] isIngress := !isEgress // Conjunction action flows are installed only if the number of clauses in the conjunction is > 1. It should be a rule // to drop all packets. If the number is 1, no conjunctive match flows or conjunction action flows are installed, // but the default drop flow is installed. if nClause > 1 { // Install action flows. var actionFlows []binding.Flow var metricFlows []binding.Flow if rule.IsAntreaNetworkPolicyRule() && *rule.Action == crdv1alpha1.RuleActionDrop { metricFlows = append(metricFlows, c.denyRuleMetricFlow(ruleOfID, isIngress)) actionFlows = append(actionFlows, c.conjunctionActionDenyFlow(ruleOfID, ruleTable, rule.Priority, DispositionDrop, rule.EnableLogging)) } else if rule.IsAntreaNetworkPolicyRule() && *rule.Action == crdv1alpha1.RuleActionReject { metricFlows = append(metricFlows, c.denyRuleMetricFlow(ruleOfID, isIngress)) actionFlows = append(actionFlows, c.conjunctionActionDenyFlow(ruleOfID, ruleTable, rule.Priority, DispositionRej, rule.EnableLogging)) } else { metricFlows = append(metricFlows, c.allowRulesMetricFlows(ruleOfID, isIngress)...) actionFlows = append(actionFlows, c.conjunctionActionFlow(ruleOfID, ruleTable, dropTable.GetNext(), rule.Priority, rule.EnableLogging)...) } conj.actionFlows = actionFlows conj.metricFlows = metricFlows } return conj } // calculateMatchFlowChangesForRule calculates the contextChanges for the policyRule, and updates the context status in case of batch install. func (c *client) calculateMatchFlowChangesForRule(conj *policyRuleConjunction, rule *types.PolicyRule) []*conjMatchFlowContextChange { // Calculate the conjMatchFlowContext changes. The changed Openflow entries are included in the conjMatchFlowContext change. ctxChanges := conj.calculateChangesForRuleCreation(c, rule) return ctxChanges } // addRuleToConjunctiveMatch adds a rule's clauses to corresponding conjunctive match contexts. // Unlike calculateMatchFlowChangesForRule, it updates the context status directly and doesn't calculate flow changes. // It's used in initial batch install where we first add all rules then calculates flows change based on final state. func (c *client) addRuleToConjunctiveMatch(conj *policyRuleConjunction, rule *types.PolicyRule) { if conj.fromClause != nil { for _, addr := range rule.From { match := generateAddressConjMatch(conj.fromClause.ruleTable.GetID(), addr, types.SrcAddress, rule.Priority) c.addActionToConjunctiveMatch(conj.fromClause, match) } } if conj.toClause != nil { for _, addr := range rule.To { match := generateAddressConjMatch(conj.toClause.ruleTable.GetID(), addr, types.DstAddress, rule.Priority) c.addActionToConjunctiveMatch(conj.toClause, match) } } if conj.serviceClause != nil { for _, port := range rule.Service { matches := generateServicePortConjMatches(conj.serviceClause.ruleTable.GetID(), port, rule.Priority, c.IsIPv4Enabled(), c.IsIPv6Enabled(), false) for _, match := range matches { c.addActionToConjunctiveMatch(conj.serviceClause, match) } } } } // addActionToConjunctiveMatch adds a clause to corresponding conjunctive match context. // It updates the context status directly and doesn't calculate the match flow, which is supposed to be calculated after // all actions are added. It's used in initial batch install only. func (c *client) addActionToConjunctiveMatch(clause *clause, match *conjunctiveMatch) { matcherKey := match.generateGlobalMapKey() _, found := clause.matches[matcherKey] if found { klog.V(2).InfoS("Conjunctive match flow is already added for rule", "matcherKey", matcherKey, "ruleID", clause.action.conjID) return } var context *conjMatchFlowContext // Get conjMatchFlowContext from globalConjMatchFlowCache. If it doesn't exist, create a new one and add into the cache. context, found = c.globalConjMatchFlowCache[matcherKey] if !found { context = &conjMatchFlowContext{ conjunctiveMatch: match, actions: make(map[uint32]*conjunctiveAction), client: c, } // Generate the default drop flow if dropTable is not nil. if clause.dropTable != nil { context.dropFlow = context.client.defaultDropFlow(clause.dropTable, match.matchKey, match.matchValue) } c.globalConjMatchFlowCache[matcherKey] = context } clause.matches[matcherKey] = context if clause.action.nClause > 1 { // Add the conjunction to the conjunctiveFlowContext's actions. context.actions[clause.action.conjID] = clause.action } else { // Add the conjunction ID to the conjunctiveFlowContext's denyAllRules. context.addDenyAllRule(clause.action.conjID) } } // BatchInstallPolicyRuleFlows installs flows for NetworkPolicy rules in case of agent restart. It calculates and // accumulates all Openflow entry updates required and installs all of them on OVS bridge in one bundle. // It resets the global conjunctive match flow cache upon failure, and should NOT be used after any rule is installed // via the InstallPolicyRuleFlows method. Otherwise the cache would be out of sync. func (c *client) BatchInstallPolicyRuleFlows(ofPolicyRules []*types.PolicyRule) error { c.replayMutex.RLock() defer c.replayMutex.RUnlock() var allFlows []binding.Flow var conjunctions []*policyRuleConjunction for _, rule := range ofPolicyRules { conj := c.calculateActionFlowChangesForRule(rule) c.addRuleToConjunctiveMatch(conj, rule) allFlows = append(allFlows, conj.actionFlows...) allFlows = append(allFlows, conj.metricFlows...) conjunctions = append(conjunctions, conj) } for _, ctx := range c.globalConjMatchFlowCache { // In theory there must be at least one action but InstallPolicyRuleFlows currently handles the 1 clause case // and we do the same in addRuleToConjunctiveMatch. The check is added only for consistency. Later we should // return error if clients install a rule with only 1 clause, and should remove the extra code for processing it. if len(ctx.actions) > 0 { actions := make([]*conjunctiveAction, 0, len(ctx.actions)) for _, action := range ctx.actions { actions = append(actions, action) } ctx.flow = c.conjunctiveMatchFlow(ctx.tableID, ctx.matchKey, ctx.matchValue, ctx.priority, actions) allFlows = append(allFlows, ctx.flow) } if ctx.dropFlow != nil { allFlows = append(allFlows, ctx.dropFlow) } } // Send the changed Openflow entries to the OVS bridge. if err := c.ofEntryOperations.AddAll(allFlows); err != nil { // Reset the global conjunctive match flow cache since the OpenFlow bundle, which contains // all the match flows to be installed, was not applied successfully. c.globalConjMatchFlowCache = map[string]*conjMatchFlowContext{} return err } // Update conjMatchFlowContexts as the expected status. for _, conj := range conjunctions { // Add the policyRuleConjunction into policyCache c.policyCache.Add(conj) } return nil } // applyConjunctiveMatchFlows installs OpenFlow entries on the OVS bridge, and then updates the conjMatchFlowContext. func (c *client) applyConjunctiveMatchFlows(flowChanges []*conjMatchFlowContextChange) error { // Send the OpenFlow entries to the OVS bridge. if err := c.sendConjunctiveFlows(flowChanges, []binding.Flow{}); err != nil { return err } // Update conjunctiveMatchContext. for _, ctxChange := range flowChanges { ctxChange.updateContextStatus() } return nil } // sendConjunctiveFlows sends all the changed OpenFlow entries to the OVS bridge in a single Bundle. func (c *client) sendConjunctiveFlows(changes []*conjMatchFlowContextChange, flows []binding.Flow) error { var addFlows, modifyFlows, deleteFlows []binding.Flow var flowChanges []*flowChange addFlows = flows for _, flowChange := range changes { if flowChange.matchFlow != nil { flowChanges = append(flowChanges, flowChange.matchFlow) } if flowChange.dropFlow != nil { flowChanges = append(flowChanges, flowChange.dropFlow) } } // Retrieve the OpenFlow entries from the flowChanges. for _, fc := range flowChanges { switch fc.changeType { case insertion: addFlows = append(addFlows, fc.flow) case modification: modifyFlows = append(modifyFlows, fc.flow) case deletion: deleteFlows = append(deleteFlows, fc.flow) } } return c.bridge.AddFlowsInBundle(addFlows, modifyFlows, deleteFlows) } // ActionFlowPriorities returns the OF priorities of the actionFlows in the policyRuleConjunction func (c *policyRuleConjunction) ActionFlowPriorities() []string { priorities := make([]string, 0, len(c.actionFlows)) for _, flow := range c.actionFlows { priorityStr := strconv.Itoa(int(flow.FlowPriority())) priorities = append(priorities, priorityStr) } return priorities } func (c *policyRuleConjunction) newClause(clauseID uint8, nClause uint8, ruleTable, dropTable binding.Table) *clause { return &clause{ ruleTable: ruleTable, dropTable: dropTable, matches: make(map[string]*conjMatchFlowContext, 0), action: &conjunctiveAction{ conjID: c.id, clauseID: clauseID, nClause: nClause, }, } } // calculateClauses configures the policyRuleConjunction's clauses according to the PolicyRule. The Openflow entries are // not installed on the OVS bridge when calculating the clauses. func (c *policyRuleConjunction) calculateClauses(rule *types.PolicyRule, clnt *client) (uint8, binding.Table, binding.Table) { var dropTable binding.Table var isEgressRule = false switch rule.Direction { case v1beta2.DirectionOut: dropTable = EgressDefaultTable isEgressRule = true default: dropTable = IngressDefaultTable } ruleTable := getTableByID(rule.TableID) var fromID, toID, serviceID, nClause uint8 // Calculate clause IDs and the total number of clauses. if rule.From != nil { nClause++ fromID = nClause } if rule.To != nil { nClause++ toID = nClause } if rule.Service != nil { nClause++ serviceID = nClause } var defaultTable binding.Table if rule.From != nil { // deny rule does not need to be created for ClusterNetworkPolicies if !isEgressRule || rule.IsAntreaNetworkPolicyRule() { defaultTable = nil } else { defaultTable = dropTable } c.fromClause = c.newClause(fromID, nClause, ruleTable, defaultTable) } if rule.To != nil { if isEgressRule || rule.IsAntreaNetworkPolicyRule() { defaultTable = nil } else { defaultTable = dropTable } c.toClause = c.newClause(toID, nClause, ruleTable, defaultTable) } if rule.Service != nil { c.serviceClause = c.newClause(serviceID, nClause, ruleTable, nil) } return nClause, ruleTable, dropTable } // calculateChangesForRuleCreation returns the conjMatchFlowContextChanges of the new policyRuleConjunction. It // will calculate the expected conjMatchFlowContext status, and the changed Openflow entries. func (c *policyRuleConjunction) calculateChangesForRuleCreation(clnt *client, rule *types.PolicyRule) []*conjMatchFlowContextChange { var ctxChanges []*conjMatchFlowContextChange if c.fromClause != nil { ctxChanges = append(ctxChanges, c.fromClause.addAddrFlows(clnt, types.SrcAddress, rule.From, rule.Priority)...) } if c.toClause != nil { ctxChanges = append(ctxChanges, c.toClause.addAddrFlows(clnt, types.DstAddress, rule.To, rule.Priority)...) } if c.serviceClause != nil { ctxChanges = append(ctxChanges, c.serviceClause.addServiceFlows(clnt, rule.Service, rule.Priority, false)...) } return ctxChanges } // calculateChangesForRuleDeletion returns the conjMatchFlowContextChanges of the deleted policyRuleConjunction. It // will calculate the expected conjMatchFlowContext status, and the changed Openflow entries. func (c *policyRuleConjunction) calculateChangesForRuleDeletion() []*conjMatchFlowContextChange { var ctxChanges []*conjMatchFlowContextChange if c.fromClause != nil { ctxChanges = append(ctxChanges, c.fromClause.deleteAllMatches()...) } if c.toClause != nil { ctxChanges = append(ctxChanges, c.toClause.deleteAllMatches()...) } if c.serviceClause != nil { ctxChanges = append(ctxChanges, c.serviceClause.deleteAllMatches()...) } return ctxChanges } // getAllFlowKeys returns the matching strings of actions flows of // policyRuleConjunction, as well as matching flows of all its clauses. func (c *policyRuleConjunction) getAllFlowKeys() []string { flowKeys := []string{} dropFlowKeys := []string{} for _, flow := range c.actionFlows { flowKeys = append(flowKeys, flow.MatchString()) } addClauseFlowKeys := func(clause *clause) { if clause == nil { return } for _, ctx := range clause.matches { if ctx.flow != nil { flowKeys = append(flowKeys, ctx.flow.MatchString()) } if ctx.dropFlow != nil { dropFlowKeys = append(dropFlowKeys, ctx.dropFlow.MatchString()) } } } addClauseFlowKeys(c.fromClause) addClauseFlowKeys(c.toClause) addClauseFlowKeys(c.serviceClause) // Add flows in the order of action flows, conjunctive match flows, drop flows. return append(flowKeys, dropFlowKeys...) } func (c *client) getPolicyRuleConjunction(ruleID uint32) *policyRuleConjunction { conj, found, _ := c.policyCache.GetByKey(fmt.Sprint(ruleID)) if !found { return nil } return conj.(*policyRuleConjunction) } func (c *client) GetPolicyInfoFromConjunction(ruleID uint32) (string, string) { conjunction := c.getPolicyRuleConjunction(ruleID) if conjunction == nil { return "", "" } priorities := conjunction.ActionFlowPriorities() if len(priorities) == 0 { return "", "" } return conjunction.npRef.ToString(), priorities[0] } // UninstallPolicyRuleFlows removes the Openflow entry relevant to the specified NetworkPolicy rule. // It also returns a slice of stale ofPriorities used by ClusterNetworkPolicies. // UninstallPolicyRuleFlows will do nothing if no Openflow entry for the rule is installed. func (c *client) UninstallPolicyRuleFlows(ruleID uint32) ([]string, error) { c.replayMutex.RLock() defer c.replayMutex.RUnlock() conj := c.getPolicyRuleConjunction(ruleID) if conj == nil { klog.V(2).Infof("policyRuleConjunction with ID %d not found", ruleID) return nil, nil } staleOFPriorities := c.getStalePriorities(conj) // Delete action flows from the OVS bridge. if err := c.ofEntryOperations.DeleteAll(conj.actionFlows); err != nil { return nil, err } if err := c.ofEntryOperations.DeleteAll(conj.metricFlows); err != nil { return nil, err } c.conjMatchFlowLock.Lock() defer c.conjMatchFlowLock.Unlock() // Get the conjMatchFlowContext changes. ctxChanges := conj.calculateChangesForRuleDeletion() // Send the changed OpenFlow entries to the OVS bridge and update the conjMatchFlowContext. if err := c.applyConjunctiveMatchFlows(ctxChanges); err != nil { return nil, err } c.policyCache.Delete(conj) return staleOFPriorities, nil } // getStalePriorities returns the ofPriorities that will be stale on the rule table where the // policyRuleConjunction is installed, after the deletion of that policyRuleConjunction. func (c *client) getStalePriorities(conj *policyRuleConjunction) (staleOFPriorities []string) { var ofPrioritiesPotentiallyStale []string if conj.ruleTableID != IngressRuleTable.GetID() && conj.ruleTableID != EgressRuleTable.GetID() { ofPrioritiesPotentiallyStale = conj.ActionFlowPriorities() } klog.V(4).Infof("Potential stale ofpriority %v found", ofPrioritiesPotentiallyStale) for _, p := range ofPrioritiesPotentiallyStale { // Filter out all the policyRuleConjuctions created at the ofPriority across all CNP tables. conjs, _ := c.policyCache.ByIndex(priorityIndex, p) priorityStale := true for i := 0; i < len(conjs); i++ { conjFiltered := conjs[i].(*policyRuleConjunction) if conj.id != conjFiltered.id && conj.ruleTableID == conjFiltered.ruleTableID { // There are other policyRuleConjuctions in the same table created with this // ofPriority. The ofPriority is thus not stale and cannot be released. priorityStale = false break } } if priorityStale { klog.V(2).Infof("ofPriority %v is now stale", p) staleOFPriorities = append(staleOFPriorities, p) } } return staleOFPriorities } func (c *client) replayPolicyFlows() { var flows []binding.Flow addActionFlows := func(conj *policyRuleConjunction) { for _, flow := range conj.actionFlows { flow.Reset() flows = append(flows, flow) } } addMetricFlows := func(conj *policyRuleConjunction) { for _, flow := range conj.metricFlows { flow.Reset() flows = append(flows, flow) } } for _, conj := range c.policyCache.List() { addActionFlows(conj.(*policyRuleConjunction)) addMetricFlows(conj.(*policyRuleConjunction)) } addMatchFlows := func(ctx *conjMatchFlowContext) { if ctx.dropFlow != nil { ctx.dropFlow.Reset() flows = append(flows, ctx.dropFlow) } if ctx.flow != nil { ctx.flow.Reset() flows = append(flows, ctx.flow) } } for _, ctx := range c.globalConjMatchFlowCache { addMatchFlows(ctx) } if err := c.ofEntryOperations.AddAll(flows); err != nil { klog.Errorf("Error when replaying flows: %v", err) } } // AddPolicyRuleAddress adds one or multiple addresses to the specified NetworkPolicy rule. If addrType is srcAddress, the // addresses are added to PolicyRule.From, else to PolicyRule.To. func (c *client) AddPolicyRuleAddress(ruleID uint32, addrType types.AddressType, addresses []types.Address, priority *uint16) error { c.replayMutex.RLock() defer c.replayMutex.RUnlock() conj := c.getPolicyRuleConjunction(ruleID) // If policyRuleConjunction doesn't exist in client's policyCache return not found error. It should not happen, since // NetworkPolicyController will guarantee the policyRuleConjunction is created before this method is called. The check // here is for safety. if conj == nil { return newConjunctionNotFound(ruleID) } var clause = conj.getAddressClause(addrType) // Check if the clause is nil or not. The clause is nil if the addrType is an unsupported type. if clause == nil { return fmt.Errorf("no clause is using addrType %d", addrType) } c.conjMatchFlowLock.Lock() defer c.conjMatchFlowLock.Unlock() flowChanges := clause.addAddrFlows(c, addrType, addresses, priority) return c.applyConjunctiveMatchFlows(flowChanges) } // DeletePolicyRuleAddress removes addresses from the specified NetworkPolicy rule. If addrType is srcAddress, the addresses // are removed from PolicyRule.From, else from PolicyRule.To. func (c *client) DeletePolicyRuleAddress(ruleID uint32, addrType types.AddressType, addresses []types.Address, priority *uint16) error { c.replayMutex.RLock() defer c.replayMutex.RUnlock() conj := c.getPolicyRuleConjunction(ruleID) // If policyRuleConjunction doesn't exist in client's policyCache return not found error. It should not happen, since // NetworkPolicyController will guarantee the policyRuleConjunction is created before this method is called. The check // here is for safety. if conj == nil { return newConjunctionNotFound(ruleID) } var clause = conj.getAddressClause(addrType) // Check if the clause is nil or not. The clause is nil if the addrType is an unsupported type. if clause == nil { return fmt.Errorf("no clause is using addrType %d", addrType) } c.conjMatchFlowLock.Lock() defer c.conjMatchFlowLock.Unlock() // Remove policyRuleConjunction to actions of conjunctive match using specific address. changes := clause.deleteAddrFlows(addrType, addresses, priority) // Update the Openflow entries on the OVS bridge, and update local cache. return c.applyConjunctiveMatchFlows(changes) } func (c *client) GetNetworkPolicyFlowKeys(npName, npNamespace string) []string { flowKeys := []string{} // Hold replayMutex write lock to protect flows from being modified by // NetworkPolicy updates and replayPolicyFlows. This is more for logic // cleanliness, as: for now flow updates do not impact the matching string // generation; NetworkPolicy updates do not change policyRuleConjunction.actionFlows; // and last for protection of clause flows, conjMatchFlowLock is good enough. c.replayMutex.Lock() defer c.replayMutex.Unlock() for _, conjObj := range c.policyCache.List() { conj := conjObj.(*policyRuleConjunction) if conj.npRef.Name == npName && conj.npRef.Namespace == npNamespace { // There can be duplicated flows added due to conjunctive matches // shared by multiple policy rules (clauses). flowKeys = append(flowKeys, conj.getAllFlowKeys()...) } } return flowKeys } // flowUpdates stores updates to the actionFlows and matchFlows in a policyRuleConjunction. type flowUpdates struct { newActionFlows []binding.Flow newPriority uint16 } // getMatchFlowUpdates calculates the update for conjuctiveMatchFlows in a policyRuleConjunction to be // installed on a new priority. func getMatchFlowUpdates(conj *policyRuleConjunction, newPriority uint16) (add, del []binding.Flow) { allClause := []*clause{conj.fromClause, conj.toClause, conj.serviceClause} for _, c := range allClause { if c == nil { continue } for _, ctx := range c.matches { f := ctx.flow updatedFlow := f.CopyToBuilder(newPriority, true).Done() add = append(add, updatedFlow) del = append(del, f) } } return add, del } // processFlowUpdates identifies the update cases in flow adds and deletes. // For conjunctiveMatchFlow updates, the following scenario is possible: // A flow {priority=100,ip,reg1=0x1f action=conjunction(1,1/3)} need to be re-assigned priority=99. // In this case, an addFlow of <priority=99,ip,reg1=0x1f> and delFlow <priority=100,ip,reg1=0x1f> will be issued. // At the same time, another flow {priority=99,ip,reg1=0x1f action=conjunction(2,1/3)} exists and now needs to // be re-assigned priority 98. This operation will issue a delFlow <priority=99,ip,reg1=0x1f>, which // would essentially void the add flow for conj=1. // In this case, we remove the conflicting delFlow and set addFlow as a modifyFlow. func (c *client) processFlowUpdates(addFlows, delFlows []binding.Flow) (add, update, del []binding.Flow) { for _, a := range addFlows { matched := false for i := 0; i < len(delFlows); i++ { if a.FlowPriority() == delFlows[i].FlowPriority() && a.MatchString() == delFlows[i].MatchString() { matched = true // treat the addFlow as update update = append(update, a) // remove the delFlow from the list delFlows = append(delFlows[:i], delFlows[i+1:]...) // reset list index as delFlows[i] is removed i-- } } if !matched { add = append(add, a) } } del = delFlows return add, update, del } // updateConjunctionActionFlows constructs a new policyRuleConjunction with actionFlows updated to be // stored in the policyCache. func (c *client) updateConjunctionActionFlows(conj *policyRuleConjunction, updates flowUpdates) *policyRuleConjunction { newActionFlows := make([]binding.Flow, len(conj.actionFlows)) copy(newActionFlows, updates.newActionFlows) newConj := &policyRuleConjunction{ id: conj.id, fromClause: conj.fromClause, toClause: conj.toClause, serviceClause: conj.serviceClause, actionFlows: newActionFlows, npRef: conj.npRef, ruleTableID: conj.ruleTableID, } return newConj } // updateConjunctionMatchFlows updates the conjuctiveMatchFlows in a policyRuleConjunction. func (c *client) updateConjunctionMatchFlows(conj *policyRuleConjunction, newPriority uint16) { allClause := []*clause{conj.fromClause, conj.toClause, conj.serviceClause} for _, cl := range allClause { if cl == nil { continue } for i, ctx := range cl.matches { delete(c.globalConjMatchFlowCache, ctx.generateGlobalMapKey()) f := ctx.flow updatedFlow := f.CopyToBuilder(newPriority, true).Done() cl.matches[i].flow = updatedFlow cl.matches[i].priority = &newPriority } // update the globalConjMatchFlowCache so that the keys are updated for _, ctx := range cl.matches { c.globalConjMatchFlowCache[ctx.generateGlobalMapKey()] = ctx } } } // calculateFlowUpdates calculates the flow updates required for the priority re-assignments specified in the input map. func (c *client) calculateFlowUpdates(updates map[uint16]uint16, table uint8) (addFlows, delFlows []binding.Flow, conjFlowUpdates map[uint32]flowUpdates) { conjFlowUpdates = map[uint32]flowUpdates{} for original, newPriority := range updates { originalPriorityStr := strconv.Itoa(int(original)) conjs, _ := c.policyCache.ByIndex(priorityIndex, originalPriorityStr) for _, conjObj := range conjs { conj := conjObj.(*policyRuleConjunction) // Only re-assign flow priorities for flows in the table specified. if conj.ruleTableID != table { klog.V(4).Infof("Conjunction %v with the same actionFlow priority is from a different table %v", conj.id, conj.ruleTableID) continue } for _, actionFlow := range conj.actionFlows { flowPriority := actionFlow.FlowPriority() if flowPriority == original { // The OF flow was created at the priority which need to be re-installed // at the NewPriority now updatedFlow := actionFlow.CopyToBuilder(newPriority, true).Done() addFlows = append(addFlows, updatedFlow) delFlows = append(delFlows, actionFlow) // Store the actionFlow update to the policyRuleConjunction and update all // policyRuleConjunctions if flow installation is successful. conjFlowUpdates[conj.id] = flowUpdates{ append(conjFlowUpdates[conj.id].newActionFlows, updatedFlow), newPriority, } } } matchFlowAdd, matchFlowDel := getMatchFlowUpdates(conj, newPriority) addFlows = append(addFlows, matchFlowAdd...) delFlows = append(delFlows, matchFlowDel...) } } return addFlows, delFlows, conjFlowUpdates } // ReassignFlowPriorities takes a list of priority updates, and update the actionFlows to replace // the old priority with the desired one, for each priority update. func (c *client) ReassignFlowPriorities(updates map[uint16]uint16, table uint8) error { addFlows, delFlows, conjFlowUpdates := c.calculateFlowUpdates(updates, table) add, update, del := c.processFlowUpdates(addFlows, delFlows) // Commit the flows updates calculated. err := c.bridge.AddFlowsInBundle(add, update, del) if err != nil { return err } for conjID, actionUpdates := range conjFlowUpdates { originalConj, _, _ := c.policyCache.GetByKey(fmt.Sprint(conjID)) conj := originalConj.(*policyRuleConjunction) updatedConj := c.updateConjunctionActionFlows(conj, actionUpdates) c.updateConjunctionMatchFlows(updatedConj, actionUpdates.newPriority) c.policyCache.Update(updatedConj) } return nil } func parseDropFlow(flowMap map[string]string) (uint32, types.RuleMetric) { m := types.RuleMetric{} pkts, _ := strconv.ParseUint(flowMap["n_packets"], 10, 64) m.Packets = pkts m.Sessions = pkts bytes, _ := strconv.ParseUint(flowMap["n_bytes"], 10, 64) m.Bytes = bytes reg3 := flowMap["reg3"] id, _ := strconv.ParseUint(reg3[:strings.Index(reg3, " ")], 0, 32) return uint32(id), m } func parseAllowFlow(flowMap map[string]string) (uint32, types.RuleMetric) { m := types.RuleMetric{} pkts, _ := strconv.ParseUint(flowMap["n_packets"], 10, 64) m.Packets = pkts if strings.Contains(flowMap["ct_state"], "+") { // ct_state=+new m.Sessions = pkts } bytes, _ := strconv.ParseUint(flowMap["n_bytes"], 10, 64) m.Bytes = bytes ct_label := flowMap["ct_label"] idRaw := ct_label[strings.Index(ct_label, "0x")+2 : strings.Index(ct_label, "/")] if len(idRaw) > 8 { // only 32 bits are valid. idRaw = idRaw[:len(idRaw)-8] } id, _ := strconv.ParseUint(idRaw, 16, 32) return uint32(id), m } func parseFlowToMap(flow string) map[string]string { split := strings.Split(flow, ",") flowMap := make(map[string]string) for _, seg := range split { equalIndex := strings.Index(seg, "=") // Some substrings spilt by "," may have no "=", for instance, if "resubmit(,70)" is present. if equalIndex == -1 { continue } flowMap[strings.TrimSpace(seg[:equalIndex])] = strings.TrimSpace(seg[equalIndex+1:]) } return flowMap } func parseMetricFlow(flow string) (uint32, types.RuleMetric) { dropIdentifier := "reg0" flowMap := parseFlowToMap(flow) // example allow flow format: // table=101, n_packets=0, n_bytes=0, priority=200,ct_state=-new,ct_label=0x1/0xffffffff,ip actions=goto_table:105 // example drop flow format: // table=101, n_packets=9, n_bytes=666, priority=200,reg0=0x100000/0x100000,reg3=0x5 actions=drop if _, ok := flowMap[dropIdentifier]; ok { return parseDropFlow(flowMap) } return parseAllowFlow(flowMap) } func (c *client) NetworkPolicyMetrics() map[uint32]*types.RuleMetric { result := map[uint32]*types.RuleMetric{} egressFlows, _ := c.ovsctlClient.DumpTableFlows(EgressMetricTable.GetID()) ingressFlows, _ := c.ovsctlClient.DumpTableFlows(IngressMetricTable.GetID()) collectMetricsFromFlows := func(flows []string) { for _, flow := range flows { if !strings.Contains(flow, metricFlowIdentifier) { continue } ruleID, metric := parseMetricFlow(flow) if accMetric, ok := result[ruleID]; ok { accMetric.Merge(&metric) } else { result[ruleID] = &metric } } } // We have two flows for each allow rule. One matches 'ct_state=+new' // and counts the number of first packets, which is also the number // of sessions (this is the reason why we have 2 flows). The other // matches 'ct_state=-new' and is used to count all subsequent // packets in the session. We need to merge metrics from these 2 // flows to get the correct number of total packets. collectMetricsFromFlows(egressFlows) collectMetricsFromFlows(ingressFlows) return result }
1
48,686
move this to below `antrea.io` import section
antrea-io-antrea
go
@@ -80,6 +80,8 @@ namespace OpenTelemetry.Trace /// </summary> /// <param name="activity">Activity instance.</param> /// <param name="kind">Activity execution kind.</param> + /// <remarks>This extension method should only be used on <see cref="Activity"/> instances that were created + /// via older means that predate <see cref="ActivitySource"/>.</remarks> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void SetKind(this Activity activity, ActivityKind kind) {
1
// <copyright file="ActivityExtensions.cs" company="OpenTelemetry Authors"> // Copyright The OpenTelemetry Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // </copyright> using System; using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Runtime.CompilerServices; using OpenTelemetry.Internal; namespace OpenTelemetry.Trace { /// <summary> /// Extension methods on Activity. /// </summary> public static class ActivityExtensions { /// <summary> /// Sets the status of activity execution. /// Activity class in .NET does not support 'Status'. /// This extension provides a workaround to store Status as special tags with key name of otel.status_code and otel.status_description. /// Read more about SetStatus here https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/trace/api.md#set-status. /// </summary> /// <param name="activity">Activity instance.</param> /// <param name="status">Activity execution status.</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1062:Validate arguments of public methods", Justification = "ActivityProcessor is hot path")] public static void SetStatus(this Activity activity, Status status) { Debug.Assert(activity != null, "Activity should not be null"); activity.SetTag(SpanAttributeConstants.StatusCodeKey, SpanHelper.GetCachedCanonicalCodeString(status.CanonicalCode)); if (!string.IsNullOrEmpty(status.Description)) { activity.SetTag(SpanAttributeConstants.StatusDescriptionKey, status.Description); } } /// <summary> /// Gets the status of activity execution. /// Activity class in .NET does not support 'Status'. /// This extension provides a workaround to retrieve Status from special tags with key name otel.status_code and otel.status_description. /// </summary> /// <param name="activity">Activity instance.</param> /// <returns>Activity execution status.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1062:Validate arguments of public methods", Justification = "ActivityProcessor is hot path")] public static Status GetStatus(this Activity activity) { Debug.Assert(activity != null, "Activity should not be null"); var statusCanonicalCode = activity.Tags.FirstOrDefault(k => k.Key == SpanAttributeConstants.StatusCodeKey).Value; var statusDescription = activity.Tags.FirstOrDefault(d => d.Key == SpanAttributeConstants.StatusDescriptionKey).Value; var status = SpanHelper.ResolveCanonicalCodeToStatus(statusCanonicalCode); if (status.IsValid && !string.IsNullOrEmpty(statusDescription)) { return status.WithDescription(statusDescription); } return status; } /// <summary> /// Sets the kind of activity execution. /// </summary> /// <param name="activity">Activity instance.</param> /// <param name="kind">Activity execution kind.</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void SetKind(this Activity activity, ActivityKind kind) { Debug.Assert(activity != null, "Activity should not be null"); SetKindProperty(activity, kind); } /// <summary> /// Record Exception. /// </summary> /// <param name="activity">Activity instance.</param> /// <param name="ex">Exception to be recorded.</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void RecordException(this Activity activity, Exception ex) { if (ex == null) { return; } var tagsCollection = new ActivityTagsCollection { { SemanticConventions.AttributeExceptionType, ex.GetType().Name }, { SemanticConventions.AttributeExceptionStacktrace, ex.ToInvariantString() }, }; if (!string.IsNullOrWhiteSpace(ex.Message)) { tagsCollection.Add(SemanticConventions.AttributeExceptionMessage, ex.Message); } activity?.AddEvent(new ActivityEvent(SemanticConventions.AttributeExceptionEventName, default, tagsCollection)); } #pragma warning disable SA1201 // Elements should appear in the correct order private static readonly Action<Activity, ActivityKind> SetKindProperty = CreateActivityKindSetter(); #pragma warning restore SA1201 // Elements should appear in the correct order private static Action<Activity, ActivityKind> CreateActivityKindSetter() { ParameterExpression instance = Expression.Parameter(typeof(Activity), "instance"); ParameterExpression propertyValue = Expression.Parameter(typeof(ActivityKind), "propertyValue"); var body = Expression.Assign(Expression.Property(instance, "Kind"), propertyValue); return Expression.Lambda<Action<Activity, ActivityKind>>(body, instance, propertyValue).Compile(); } } }
1
16,704
Seems likely to confuse people. What if we moved it into ActivitySourceAdapter and made it private?
open-telemetry-opentelemetry-dotnet
.cs
@@ -320,6 +320,18 @@ func (b *EthAPIBackend) SendTx(ctx context.Context, signedTx *types.Transaction) if len(signedTx.Data()) > b.MaxCallDataSize { return fmt.Errorf("Calldata cannot be larger than %d, sent %d", b.MaxCallDataSize, len(signedTx.Data())) } + // The gas price must be a multiple of a gwei + if new(big.Int).Mod(signedTx.GasPrice(), big.NewInt(1e6)).Cmp(common.Big0) != 0 { + return errors.New("Gas price must be a multiple of 1,000,000 wei") + } + // If there is a value field set then reject transactions that + // contain calldata. The feature of sending transactions with value + // and calldata will be added in the future. + if signedTx.Value().Cmp(common.Big0) != 0 { + if len(signedTx.Data()) > 0 { + return errors.New("Cannot send transactions with value and calldata") + } + } } return b.eth.syncService.ApplyTransaction(signedTx) }
1
// Copyright 2015 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package eth import ( "context" "errors" "fmt" "math/big" "github.com/ethereum/go-ethereum/accounts" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/math" "github.com/ethereum/go-ethereum/core" "github.com/ethereum/go-ethereum/core/bloombits" "github.com/ethereum/go-ethereum/core/rawdb" "github.com/ethereum/go-ethereum/core/state" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/vm" "github.com/ethereum/go-ethereum/diffdb" "github.com/ethereum/go-ethereum/eth/downloader" "github.com/ethereum/go-ethereum/eth/gasprice" "github.com/ethereum/go-ethereum/ethdb" "github.com/ethereum/go-ethereum/event" "github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/params" "github.com/ethereum/go-ethereum/rpc" ) // EthAPIBackend implements ethapi.Backend for full nodes type EthAPIBackend struct { extRPCEnabled bool eth *Ethereum gpo *gasprice.Oracle l1gpo *gasprice.L1Oracle verifier bool gasLimit uint64 UsingOVM bool MaxCallDataSize int } func (b *EthAPIBackend) IsVerifier() bool { return b.verifier } func (b *EthAPIBackend) IsSyncing() bool { return b.eth.syncService.IsSyncing() } func (b *EthAPIBackend) GasLimit() uint64 { return b.gasLimit } func (b *EthAPIBackend) GetEthContext() (uint64, uint64) { bn := b.eth.syncService.GetLatestL1BlockNumber() ts := b.eth.syncService.GetLatestL1Timestamp() return bn, ts } func (b *EthAPIBackend) GetRollupContext() (uint64, uint64, uint64) { index := uint64(0) queueIndex := uint64(0) verifiedIndex := uint64(0) if latest := b.eth.syncService.GetLatestIndex(); latest != nil { index = *latest } if latest := b.eth.syncService.GetLatestEnqueueIndex(); latest != nil { queueIndex = *latest } if latest := b.eth.syncService.GetLatestVerifiedIndex(); latest != nil { verifiedIndex = *latest } return index, queueIndex, verifiedIndex } // ChainConfig returns the active chain configuration. func (b *EthAPIBackend) ChainConfig() *params.ChainConfig { return b.eth.blockchain.Config() } func (b *EthAPIBackend) CurrentBlock() *types.Block { return b.eth.blockchain.CurrentBlock() } func (b *EthAPIBackend) GetDiff(block *big.Int) (diffdb.Diff, error) { return b.eth.blockchain.GetDiff(block) } func (b *EthAPIBackend) SetHead(number uint64) { if number == 0 { log.Info("Cannot reset to genesis") return } if !b.UsingOVM { b.eth.protocolManager.downloader.Cancel() } b.eth.blockchain.SetHead(number) // Make sure to reset the LatestL1{Timestamp,BlockNumber} block := b.eth.blockchain.CurrentBlock() txs := block.Transactions() if len(txs) == 0 { log.Error("No transactions found in block", "number", number) return } tx := txs[0] blockNumber := tx.L1BlockNumber() if blockNumber == nil { log.Error("No L1BlockNumber found in transaction", "number", number) return } b.eth.syncService.SetLatestL1Timestamp(tx.L1Timestamp()) b.eth.syncService.SetLatestL1BlockNumber(blockNumber.Uint64()) } func (b *EthAPIBackend) HeaderByNumber(ctx context.Context, number rpc.BlockNumber) (*types.Header, error) { // Pending block is only known by the miner if number == rpc.PendingBlockNumber { block := b.eth.miner.PendingBlock() return block.Header(), nil } // Otherwise resolve and return the block if number == rpc.LatestBlockNumber { return b.eth.blockchain.CurrentBlock().Header(), nil } return b.eth.blockchain.GetHeaderByNumber(uint64(number)), nil } func (b *EthAPIBackend) HeaderByNumberOrHash(ctx context.Context, blockNrOrHash rpc.BlockNumberOrHash) (*types.Header, error) { if blockNr, ok := blockNrOrHash.Number(); ok { return b.HeaderByNumber(ctx, blockNr) } if hash, ok := blockNrOrHash.Hash(); ok { header := b.eth.blockchain.GetHeaderByHash(hash) if header == nil { return nil, errors.New("header for hash not found") } if blockNrOrHash.RequireCanonical && b.eth.blockchain.GetCanonicalHash(header.Number.Uint64()) != hash { return nil, errors.New("hash is not currently canonical") } return header, nil } return nil, errors.New("invalid arguments; neither block nor hash specified") } func (b *EthAPIBackend) HeaderByHash(ctx context.Context, hash common.Hash) (*types.Header, error) { return b.eth.blockchain.GetHeaderByHash(hash), nil } func (b *EthAPIBackend) BlockByNumber(ctx context.Context, number rpc.BlockNumber) (*types.Block, error) { // Pending block is only known by the miner if number == rpc.PendingBlockNumber { block := b.eth.miner.PendingBlock() return block, nil } // Otherwise resolve and return the block if number == rpc.LatestBlockNumber { return b.eth.blockchain.CurrentBlock(), nil } return b.eth.blockchain.GetBlockByNumber(uint64(number)), nil } func (b *EthAPIBackend) BlockByHash(ctx context.Context, hash common.Hash) (*types.Block, error) { return b.eth.blockchain.GetBlockByHash(hash), nil } func (b *EthAPIBackend) BlockByNumberOrHash(ctx context.Context, blockNrOrHash rpc.BlockNumberOrHash) (*types.Block, error) { if blockNr, ok := blockNrOrHash.Number(); ok { return b.BlockByNumber(ctx, blockNr) } if hash, ok := blockNrOrHash.Hash(); ok { header := b.eth.blockchain.GetHeaderByHash(hash) if header == nil { return nil, errors.New("header for hash not found") } if blockNrOrHash.RequireCanonical && b.eth.blockchain.GetCanonicalHash(header.Number.Uint64()) != hash { return nil, errors.New("hash is not currently canonical") } block := b.eth.blockchain.GetBlock(hash, header.Number.Uint64()) if block == nil { return nil, errors.New("header found, but block body is missing") } return block, nil } return nil, errors.New("invalid arguments; neither block nor hash specified") } func (b *EthAPIBackend) StateAndHeaderByNumber(ctx context.Context, number rpc.BlockNumber) (*state.StateDB, *types.Header, error) { // Pending state is only known by the miner if number == rpc.PendingBlockNumber { block, state := b.eth.miner.Pending() return state, block.Header(), nil } // Otherwise resolve the block number and return its state header, err := b.HeaderByNumber(ctx, number) if err != nil { return nil, nil, err } if header == nil { return nil, nil, errors.New("header not found") } stateDb, err := b.eth.BlockChain().StateAt(header.Root) return stateDb, header, err } func (b *EthAPIBackend) StateAndHeaderByNumberOrHash(ctx context.Context, blockNrOrHash rpc.BlockNumberOrHash) (*state.StateDB, *types.Header, error) { if blockNr, ok := blockNrOrHash.Number(); ok { return b.StateAndHeaderByNumber(ctx, blockNr) } if hash, ok := blockNrOrHash.Hash(); ok { header, err := b.HeaderByHash(ctx, hash) if err != nil { return nil, nil, err } if header == nil { return nil, nil, errors.New("header for hash not found") } if blockNrOrHash.RequireCanonical && b.eth.blockchain.GetCanonicalHash(header.Number.Uint64()) != hash { return nil, nil, errors.New("hash is not currently canonical") } stateDb, err := b.eth.BlockChain().StateAt(header.Root) return stateDb, header, err } return nil, nil, errors.New("invalid arguments; neither block nor hash specified") } func (b *EthAPIBackend) GetReceipts(ctx context.Context, hash common.Hash) (types.Receipts, error) { return b.eth.blockchain.GetReceiptsByHash(hash), nil } func (b *EthAPIBackend) GetLogs(ctx context.Context, hash common.Hash) ([][]*types.Log, error) { receipts := b.eth.blockchain.GetReceiptsByHash(hash) if receipts == nil { return nil, nil } logs := make([][]*types.Log, len(receipts)) for i, receipt := range receipts { logs[i] = receipt.Logs } return logs, nil } func (b *EthAPIBackend) GetTd(blockHash common.Hash) *big.Int { return b.eth.blockchain.GetTdByHash(blockHash) } func (b *EthAPIBackend) GetEVM(ctx context.Context, msg core.Message, state *state.StateDB, header *types.Header) (*vm.EVM, func() error, error) { state.SetBalance(msg.From(), math.MaxBig256) vmError := func() error { return nil } context := core.NewEVMContext(msg, header, b.eth.BlockChain(), nil) return vm.NewEVM(context, state, b.eth.blockchain.Config(), *b.eth.blockchain.GetVMConfig()), vmError, nil } func (b *EthAPIBackend) SubscribeRemovedLogsEvent(ch chan<- core.RemovedLogsEvent) event.Subscription { return b.eth.BlockChain().SubscribeRemovedLogsEvent(ch) } func (b *EthAPIBackend) SubscribePendingLogsEvent(ch chan<- []*types.Log) event.Subscription { return b.eth.miner.SubscribePendingLogs(ch) } func (b *EthAPIBackend) SubscribeChainEvent(ch chan<- core.ChainEvent) event.Subscription { return b.eth.BlockChain().SubscribeChainEvent(ch) } func (b *EthAPIBackend) SubscribeChainHeadEvent(ch chan<- core.ChainHeadEvent) event.Subscription { return b.eth.BlockChain().SubscribeChainHeadEvent(ch) } func (b *EthAPIBackend) SubscribeChainSideEvent(ch chan<- core.ChainSideEvent) event.Subscription { return b.eth.BlockChain().SubscribeChainSideEvent(ch) } func (b *EthAPIBackend) SubscribeLogsEvent(ch chan<- []*types.Log) event.Subscription { return b.eth.BlockChain().SubscribeLogsEvent(ch) } // Transactions originating from the RPC endpoints are added to remotes so that // a lock can be used around the remotes for when the sequencer is reorganizing. func (b *EthAPIBackend) SendTx(ctx context.Context, signedTx *types.Transaction) error { if b.UsingOVM { // The value field is not rolled up so it must be set to 0 if signedTx.Value().Cmp(new(big.Int)) != 0 { return fmt.Errorf("Cannot send transaction with non-zero value. Use WETH.transfer()") } to := signedTx.To() if to != nil { if *to == (common.Address{}) { return errors.New("Cannot send transaction to zero address") } // Prevent transactions from being submitted if the gas limit too high if signedTx.Gas() >= b.gasLimit { return fmt.Errorf("Transaction gasLimit (%d) is greater than max gasLimit (%d)", signedTx.Gas(), b.gasLimit) } // Prevent QueueOriginSequencer transactions that are too large to // be included in a batch. The `MaxCallDataSize` should be set to // the layer one consensus max transaction size in bytes minus the // constant sized overhead of a batch. This will prevent // a layer two transaction from not being able to be batch submitted // to layer one. if len(signedTx.Data()) > b.MaxCallDataSize { return fmt.Errorf("Calldata cannot be larger than %d, sent %d", b.MaxCallDataSize, len(signedTx.Data())) } } return b.eth.syncService.ApplyTransaction(signedTx) } // OVM Disabled return b.eth.txPool.AddLocal(signedTx) } func (b *EthAPIBackend) SetTimestamp(timestamp int64) { b.eth.blockchain.SetCurrentTimestamp(timestamp) } func (b *EthAPIBackend) GetPoolTransactions() (types.Transactions, error) { pending, err := b.eth.txPool.Pending() if err != nil { return nil, err } var txs types.Transactions for _, batch := range pending { txs = append(txs, batch...) } return txs, nil } func (b *EthAPIBackend) GetPoolTransaction(hash common.Hash) *types.Transaction { return b.eth.txPool.Get(hash) } func (b *EthAPIBackend) GetTransaction(ctx context.Context, txHash common.Hash) (*types.Transaction, common.Hash, uint64, uint64, error) { tx, blockHash, blockNumber, index := rawdb.ReadTransaction(b.eth.ChainDb(), txHash) return tx, blockHash, blockNumber, index, nil } func (b *EthAPIBackend) GetPoolNonce(ctx context.Context, addr common.Address) (uint64, error) { return b.eth.txPool.Nonce(addr), nil } func (b *EthAPIBackend) Stats() (pending int, queued int) { return b.eth.txPool.Stats() } func (b *EthAPIBackend) TxPoolContent() (map[common.Address]types.Transactions, map[common.Address]types.Transactions) { return b.eth.TxPool().Content() } func (b *EthAPIBackend) SubscribeNewTxsEvent(ch chan<- core.NewTxsEvent) event.Subscription { return b.eth.TxPool().SubscribeNewTxsEvent(ch) } func (b *EthAPIBackend) Downloader() *downloader.Downloader { return b.eth.Downloader() } func (b *EthAPIBackend) ProtocolVersion() int { return b.eth.EthVersion() } func (b *EthAPIBackend) SuggestPrice(ctx context.Context) (*big.Int, error) { return b.gpo.SuggestPrice(ctx) } func (b *EthAPIBackend) SuggestDataPrice(ctx context.Context) (*big.Int, error) { return b.l1gpo.SuggestDataPrice(ctx) } func (b *EthAPIBackend) SetL1GasPrice(ctx context.Context, gasPrice *big.Int) { b.l1gpo.SetL1GasPrice(gasPrice) } func (b *EthAPIBackend) ChainDb() ethdb.Database { return b.eth.ChainDb() } func (b *EthAPIBackend) EventMux() *event.TypeMux { return b.eth.EventMux() } func (b *EthAPIBackend) AccountManager() *accounts.Manager { return b.eth.AccountManager() } func (b *EthAPIBackend) ExtRPCEnabled() bool { return b.extRPCEnabled } func (b *EthAPIBackend) RPCGasCap() *big.Int { return b.eth.config.RPCGasCap } func (b *EthAPIBackend) BloomStatus() (uint64, uint64) { sections, _, _ := b.eth.bloomIndexer.Sections() return params.BloomBitsBlocks, sections } func (b *EthAPIBackend) ServiceFilter(ctx context.Context, session *bloombits.MatcherSession) { for i := 0; i < bloomFilterThreads; i++ { go session.Multiplex(bloomRetrievalBatch, bloomRetrievalWait, b.eth.bloomRequests) } }
1
15,602
gas price don't need to be a multiple cuz we support allll gas prices now
ethereum-optimism-optimism
go
@@ -81,5 +81,10 @@ namespace Microsoft.VisualStudio.TestPlatform.CommunicationUtilities.Interfaces /// Send the request to abort the test run /// </summary> void SendTestRunAbort(); + + /// <summary> + /// handle client process exit + /// </summary> + void OnClientProcessExit(); } }
1
// Copyright (c) Microsoft. All rights reserved. namespace Microsoft.VisualStudio.TestPlatform.CommunicationUtilities.Interfaces { using System; using System.Collections.Generic; using Microsoft.VisualStudio.TestPlatform.CommunicationUtilities.ObjectModel; using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; /// <summary> /// Defines contract to send test platform requests to test host /// </summary> public interface ITestRequestSender : IDisposable { /// <summary> /// Initializes the communication for sending requests /// </summary> /// <returns>Port Number of the communication channel</returns> int InitializeCommunication(); /// <summary> /// Waits for Request Handler to be connected /// </summary> /// <param name="connectionTimeout">Time to wait for connection</param> /// <returns>True, if Handler is connected</returns> bool WaitForRequestHandlerConnection(int connectionTimeout); /// <summary> /// Close the Sender /// </summary> void Close(); /// <summary> /// Initializes the Discovery /// </summary> /// <param name="pathToAdditionalExtensions">Paths to check for additional extensions</param> /// <param name="loadOnlyWellKnownExtensions">Load only well only extensions</param> void InitializeDiscovery(IEnumerable<string> pathToAdditionalExtensions, bool loadOnlyWellKnownExtensions); /// <summary> /// Initializes the Execution /// </summary> /// <param name="pathToAdditionalExtensions">Paths to check for additional extensions</param> /// <param name="loadOnlyWellKnownExtensions">Load only well only extensions</param> void InitializeExecution(IEnumerable<string> pathToAdditionalExtensions, bool loadOnlyWellKnownExtensions); /// <summary> /// Discovers the tests /// </summary> /// <param name="discoveryCriteria">DiscoveryCriteria for discovery</param> /// <param name="eventHandler">EventHandler for discovery events</param> void DiscoverTests(DiscoveryCriteria discoveryCriteria, ITestDiscoveryEventsHandler eventHandler); /// <summary> /// Starts the TestRun with given sources and criteria /// </summary> /// <param name="runCriteria">RunCriteria for test run</param> /// <param name="eventHandler">EventHandler for test run events</param> void StartTestRun(TestRunCriteriaWithSources runCriteria, ITestRunEventsHandler eventHandler); /// <summary> /// Starts the TestRun with given test cases and criteria /// </summary> /// <param name="runCriteria">RunCriteria for test run</param> /// <param name="eventHandler">EventHandler for test run events</param> void StartTestRun(TestRunCriteriaWithTests runCriteria, ITestRunEventsHandler eventHandler); /// <summary> /// Ends the Session /// </summary> void EndSession(); /// <summary> /// Send the request to cancel the test run /// </summary> void SendTestRunCancel(); /// <summary> /// Send the request to abort the test run /// </summary> void SendTestRunAbort(); } }
1
11,242
We are exposing implementation details in the interface. What if there is no processes involved in an implementation of `ITestRequestSender`?
microsoft-vstest
.cs
@@ -1167,6 +1167,8 @@ def main(args, 'address.\n') else: print('Error occurred on the server side, message: {}'.format(e)) + except Exception as e: # pylint: disable=broad-except + print ('Error occurred, message: {}'.format(e.message)) return config
1
# Copyright 2017 The Forseti Security Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Forseti CLI.""" from argparse import ArgumentParser import json import os import sys from google.protobuf.json_format import MessageToJson import grpc from google.cloud.forseti.services import client as iam_client from google.cloud.forseti.common.util import file_loader from google.cloud.forseti.common.util import logger LOGGER = logger.get_logger(__name__) # pylint: disable=too-many-lines class DefaultParser(ArgumentParser): """Default parser, when error is triggered, instead of printing error message, it will print the help message (-h). """ def error(self, message=None): """This method will be triggered when error occurred. Args: message (str): Error message. """ if message: sys.stderr.write('Argument error: %s.\n' % message) self.print_usage() sys.exit(2) def define_inventory_parser(parent): """Define the inventory service parser. Args: parent (argparser): Parent parser to hook into. """ service_parser = parent.add_parser('inventory', help='inventory service') action_subparser = service_parser.add_subparsers( title='action', dest='action') create_inventory_parser = action_subparser.add_parser( 'create', help='Start a new inventory') create_inventory_parser.add_argument( '--import_as', metavar=('MODEL_NAME',), help='Import the inventory when complete, requires a model name') create_inventory_parser.add_argument( '--background', '-b', action='store_true', help='Execute inventory in background', ) create_inventory_parser.add_argument( '--enable_debug', action='store_true', help='Emit additional information for debugging.', ) delete_inventory_parser = action_subparser.add_parser( 'delete', help='Delete an inventory') delete_inventory_parser.add_argument( 'id', help='Inventory id to delete') purge_inventory_parser = action_subparser.add_parser( 'purge', help='Purge all inventory data older than the retention days.') purge_inventory_parser.add_argument( 'retention_days', default=None, nargs='?', help=('Optional. Number of days to retain the data. If not ' 'specified, then the value in forseti config yaml file will ' 'be used.')) _ = action_subparser.add_parser( 'list', help='List all inventory') get_inventory_parser = action_subparser.add_parser( 'get', help='Get a particular inventory') get_inventory_parser.add_argument( 'id', help='Inventory id to get') def define_config_parser(parent): """Define the config service parser. Args: parent (argparser): Parent parser to hook into. """ service_parser = parent.add_parser( 'config', help=('config service, persist and modify the' 'client configuration in ~/.forseti')) action_subparser = service_parser.add_subparsers( title='action', dest='action') _ = action_subparser.add_parser( 'show', help='Show the config') _ = action_subparser.add_parser( 'reset', help='Reset the config to its default values') delete_config_parser = action_subparser.add_parser( 'delete', help='Deletes an item from the config') delete_config_parser.add_argument( 'key', type=str, help='Key to delete from config') set_endpoint_config_parser = action_subparser.add_parser( 'endpoint', help='Configure the client endpoint') set_endpoint_config_parser.add_argument( 'hostport', type=str, help='Server endpoint in host:port format') set_model_config_parser = action_subparser.add_parser( 'model', help='Configure the model to use') set_model_config_parser.add_argument( 'name', type=str, help='Handle of the model to use, as hexlified sha1sum') set_format_config_parser = action_subparser.add_parser( 'format', help='Configure the output format') set_format_config_parser.add_argument( 'name', choices=['json', 'text'], help='Configure the CLI output format') def define_server_parser(parent): """Define the server config service parser. Args: parent (argparser): Parent parser to hook into. """ service_parser = parent.add_parser( 'server', help='Server config service') action_subparser = service_parser.add_subparsers( title='action', dest='action') log_level_parser = action_subparser.add_parser( 'log_level', help='Log level of the server.') log_level_subparser = log_level_parser.add_subparsers( title='subaction', dest='subaction') set_log_level = log_level_subparser.add_parser( 'set', help='Set the log level of the server.' ) set_log_level.add_argument( 'log_level', choices=['debug', 'info', 'warning', 'error']) _ = log_level_subparser.add_parser( 'get', help='Get the log level of the server.') config_parser = action_subparser.add_parser( 'configuration', help='Server configuration.') config_subparser = config_parser.add_subparsers( title='subaction', dest='subaction') _ = config_subparser.add_parser( 'get', help='Get the server configuration.' ) reload_config = config_subparser.add_parser( 'reload', help='Load the server configuration.' ) reload_config.add_argument( 'config_file_path', nargs='?', type=str, help=('Forseti configuration file path. If not specified, ' 'the default path will be used. Note: Please specify ' 'a path that the server has access to (e.g. a path in ' 'the server vm or a gcs path starts with gs://).') ) def define_model_parser(parent): """Define the model service parser. Args: parent (argparser): Parent parser to hook into. """ service_parser = parent.add_parser('model', help='model service') action_subparser = service_parser.add_subparsers( title='action', dest='action') use_model_parser = action_subparser.add_parser( 'use', help='Context switch into the model.') use_model_parser.add_argument( 'model', help='Model to switch to, either handle or name' ) _ = action_subparser.add_parser( 'list', help='List all available models') get_model_parser = action_subparser.add_parser( 'get', help='Get the details of a model by name or handle') get_model_parser.add_argument( 'model', help='Model to get') delete_model_parser = action_subparser.add_parser( 'delete', help='Deletes an entire model') delete_model_parser.add_argument( 'model', help='Model to delete, either handle or name') create_model_parser = action_subparser.add_parser( 'create', help='Create a model') create_model_parser.add_argument( 'name', help='Human readable name for this model') create_model_parser.add_argument( '--inventory_index_id', default='', help='Inventory id to import from' ) create_model_parser.add_argument( '--background', '-b', default=False, action='store_true', help='Run import in background' ) def define_scanner_parser(parent): """Define the scanner service parser. Args: parent (argparser): Parent parser to hook into. """ service_parser = parent.add_parser('scanner', help='scanner service') action_subparser = service_parser.add_subparsers( title='action', dest='action') action_subparser.add_parser( 'run', help='Run the scanner') def define_notifier_parser(parent): """Define the notifier service parser. Args: parent (argparser): Parent parser to hook into. """ service_parser = parent.add_parser('notifier', help='notifier service') action_subparser = service_parser.add_subparsers( title='action', dest='action') create_notifier_parser = action_subparser.add_parser( 'run', help='Run the notifier') create_notifier_parser.add_argument( '--inventory_index_id', default=0, help=('Id of the inventory index to send violation notifications. ' 'If this is not specified, then the last inventory index id ' 'will be used.') ) def define_explainer_parser(parent): """Define the explainer service parser. Args: parent (argparser): Parent parser to hook into. """ service_parser = parent.add_parser('explainer', help='explain service') action_subparser = service_parser.add_subparsers( title='action', dest='action') list_resource_parser = action_subparser.add_parser( 'list_resources', help='List resources') list_resource_parser.add_argument( '--prefix', default='', help='Resource full name prefix to filter for ' '(e.g. organization/1234567890/folder/my-folder-id)') list_members_parser = action_subparser.add_parser( 'list_members', help='List members by prefix') list_members_parser.add_argument( '--prefix', default='', help='Member prefix to filter for') list_roles_parser = action_subparser.add_parser( 'list_roles', help='List roles by prefix') list_roles_parser.add_argument( '--prefix', default='', help='Role prefix to filter for') perms_by_roles_parser = action_subparser.add_parser( 'list_permissions', help='List permissions by role(s)') perms_by_roles_parser.add_argument( '--roles', nargs='*', default=[], help='Role names') perms_by_roles_parser.add_argument( '--role_prefixes', nargs='*', default=[], help='Role prefixes') get_policy = action_subparser.add_parser( 'get_policy', help='Get a resource\'s direct policy') get_policy.add_argument( 'resource', help='Resource to get policy for') check_policy = action_subparser.add_parser( 'check_policy', help='Check if a member has access to a resource') check_policy.add_argument( 'resource', help='Resource to check on') check_policy.add_argument( 'permission', help='Permissions to check on') check_policy.add_argument( 'member', help='Member to check access for') explain_granted_parser = action_subparser.add_parser( 'why_granted', help='Explain why a role or permission is' ' granted for a member on a resource') explain_granted_parser.add_argument( 'member', help='Member to query') explain_granted_parser.add_argument( 'resource', help='Resource to query') explain_granted_parser.add_argument( '--role', default=None, help='Query for a role') explain_granted_parser.add_argument( '--permission', default=None, help='Query for a permission') explain_denied_parser = action_subparser.add_parser( 'why_denied', help='Explain why a set of roles or permissions ' 'is denied for a member on a resource') explain_denied_parser.add_argument( 'member', help='Member to query') explain_denied_parser.add_argument( 'resources', nargs='+', help='Resource to query') explain_denied_parser.add_argument( '--roles', nargs='*', default=[], help='Query for roles') explain_denied_parser.add_argument( '--permissions', nargs='*', default=[], help='Query for permissions') query_access_by_member = action_subparser.add_parser( 'access_by_member', help='List access by member and permissions') query_access_by_member.add_argument( 'member', help='Member to query') query_access_by_member.add_argument( 'permissions', default=[], nargs='*', help='Permissions to query for') query_access_by_member.add_argument( '--expand_resources', type=bool, default=False, help='Expand the resource hierarchy') query_access_by_authz = action_subparser.add_parser( 'access_by_authz', help='List access by role or permission') query_access_by_authz.add_argument( '--permission', default=None, nargs='?', help='Permission to query') query_access_by_authz.add_argument( '--role', default=None, nargs='?', help='Role to query') query_access_by_authz.add_argument( '--expand_groups', type=bool, default=False, help='Expand groups to their members') query_access_by_authz.add_argument( '--expand_resources', type=bool, default=False, help='Expand resources to their children') query_access_by_resource = action_subparser.add_parser( 'access_by_resource', help='List access by member and permissions') query_access_by_resource.add_argument( 'resource', help='Resource to query') query_access_by_resource.add_argument( 'permissions', default=[], nargs='*', help='Permissions to query for') query_access_by_resource.add_argument( '--expand_groups', type=bool, default=False, help='Expand groups to their members') def read_env(var_key, default): """Read an environment variable with a default value. Args: var_key (str): Environment key get. default (str): Default value if variable is not set. Returns: string: return environment value or default """ var_value = os.environ[var_key] if var_key in os.environ else default LOGGER.info('reading environment variable %s = %s', var_key, var_value) return var_value def define_parent_parser(parser_cls, config_env): """Define the parent parser. Args: parser_cls (type): Class to instantiate parser from. config_env (object): Configuration environment. Returns: argparser: The parent parser which has been defined. """ LOGGER.debug('parser_cls = %s, config_env = %s', parser_cls, config_env) parent_parser = parser_cls(prog='forseti') parent_parser.add_argument( '--endpoint', default=config_env['endpoint'], help='Server endpoint') parent_parser.add_argument( '--use_model', default=config_env['model'], help='Model to operate on') parent_parser.add_argument( '--out-format', default=config_env['format'], choices=['text', 'json']) return parent_parser def create_parser(parser_cls, config_env): """Create argument parser hierarchy. Args: parser_cls (cls): Class to instantiate parser from. config_env (object): Configuration environment Returns: argparser: The argument parser hierarchy which is created. """ main_parser = define_parent_parser(parser_cls, config_env) service_subparsers = main_parser.add_subparsers( title='service', dest='service') define_explainer_parser(service_subparsers) define_inventory_parser(service_subparsers) define_config_parser(service_subparsers) define_model_parser(service_subparsers) define_scanner_parser(service_subparsers) define_notifier_parser(service_subparsers) define_server_parser(service_subparsers) return main_parser class Output(object): """Output base interface.""" def write(self, obj): """Writes an object to the output channel. Args: obj (object): Object to write Raises: NotImplementedError: Always """ raise NotImplementedError() class TextOutput(Output): """Text output for result objects.""" def write(self, obj): """Writes text representation. Args: obj (object): Object to write as string """ print obj class JsonOutput(Output): """Raw output for result objects.""" def write(self, obj): """Writes json representation. Args: obj (object): Object to write as json """ print MessageToJson(obj) def run_config(_, config, output, config_env): """Run config commands. Args: _ (iam_client.ClientComposition): Unused. config (object): argparser namespace to use. output (Output): output writer to use. config_env (object): Configuration environment. """ def do_show_config(): """Show the current config.""" if isinstance(output, TextOutput): output.write(config_env) else: print config_env def do_set_endpoint(): """Set a config item.""" config_env['endpoint'] = config.hostport DefaultConfigParser.persist(config_env) do_show_config() def do_set_model(): """Set a config item.""" config_env['model'] = config.name DefaultConfigParser.persist(config_env) do_show_config() def do_set_output(): """Set a config item.""" config_env['format'] = config.name DefaultConfigParser.persist(config_env) do_show_config() def do_delete_config(): """Delete a config item.""" del config_env[config.key] DefaultConfigParser.persist(config_env) do_show_config() def do_reset_config(): """Reset the config to default values.""" for key in config_env: del config_env[key] DefaultConfigParser.persist(config_env) do_show_config() actions = { 'show': do_show_config, 'model': do_set_model, 'endpoint': do_set_endpoint, 'format': do_set_output, 'reset': do_reset_config, 'delete': do_delete_config} actions[config.action]() def run_scanner(client, config, output, _): """Run scanner commands. Args: client (iam_client.ClientComposition): client to use for requests. config (object): argparser namespace to use. output (Output): output writer to use. _ (object): Configuration environment. """ client = client.scanner def do_run(): """Run a scanner.""" for progress in client.run(): output.write(progress) actions = { 'run': do_run} actions[config.action]() def run_server(client, config, output, _): """Run scanner commands. Args: client (iam_client.ClientComposition): client to use for requests. config (object): argparser namespace to use. output (Output): output writer to use. _ (object): Configuration environment. """ client = client.server_config def do_get_log_level(): """Get the log level of the server.""" output.write(client.get_log_level()) def do_set_log_level(): """Set the log level of the server.""" output.write(client.set_log_level(config.log_level)) def do_reload_configuration(): """Reload the configuration of the server.""" output.write(client.reload_server_configuration( config.config_file_path)) def do_get_configuration(): """Get the configuration of the server.""" output.write(client.get_server_configuration()) actions = { 'log_level': { 'get': do_get_log_level, 'set': do_set_log_level }, 'configuration': { 'get': do_get_configuration, 'reload': do_reload_configuration } } actions[config.action][config.subaction]() def run_notifier(client, config, output, _): """Run notifier commands. Args: client (iam_client.ClientComposition): client to use for requests. config (object): argparser namespace to use. output (Output): output writer to use. _ (object): Configuration environment. """ client = client.notifier def do_run(): """Run the notifier.""" for progress in client.run(int(config.inventory_index_id)): output.write(progress) actions = { 'run': do_run} actions[config.action]() def run_model(client, config, output, config_env): """Run model commands. Args: client (iam_client.ClientComposition): client to use for requests. config (object): argparser namespace to use. output (Output): output writer to use. config_env (object): Configuration environment. """ client = client.model def do_list_models(): """List models.""" for model in client.list_models(): output.write(model) def do_get_model(): """Get details of a model.""" result = client.get_model(config.model) output.write(result) def do_delete_model(): """Delete a model.""" model = client.get_model(config.model) result = client.delete_model(model.handle) output.write(result) def do_create_model(): """Create a model.""" result = client.new_model('inventory', config.name, int(config.inventory_index_id), config.background) output.write(result) def do_use_model(): """Use a model. Raises: Warning: When the specified model is not usable or not existed """ model = client.get_model(config.model) if model and model.status in ['SUCCESS', 'PARTIAL_SUCCESS']: config_env['model'] = model.handle else: raise Warning('use_model failed, the specified model is ' 'either not existed or not usable.') DefaultConfigParser.persist(config_env) actions = { 'create': do_create_model, 'list': do_list_models, 'get': do_get_model, 'delete': do_delete_model, 'use': do_use_model} actions[config.action]() def run_inventory(client, config, output, _): """Run inventory commands. Args: client (iam_client.ClientComposition): client to use for requests. config (object): argparser namespace to use. output (Output): output writer to use. _ (object): Unused. """ client = client.inventory def do_create_inventory(): """Create an inventory.""" for progress in client.create(config.background, config.import_as, config.enable_debug): output.write(progress) def do_list_inventory(): """List an inventory.""" for inventory in client.list(): output.write(inventory) def do_get_inventory(): """Get an inventory.""" result = client.get(int(config.id)) output.write(result) def do_delete_inventory(): """Delete an inventory.""" result = client.delete(int(config.id)) output.write(result) def do_purge_inventory(): """Purge all inventory data older than the retention days.""" result = client.purge(config.retention_days) output.write(result) actions = { 'create': do_create_inventory, 'list': do_list_inventory, 'get': do_get_inventory, 'delete': do_delete_inventory, 'purge': do_purge_inventory} actions[config.action]() def run_explainer(client, config, output, _): """Run explain commands. Args: client (iam_client.ClientComposition): client to use for requests. config (object): argparser namespace to use. output (Output): output writer to use. _ (object): Unused. """ client = client.explain def do_list_resources(): """List resources by prefix""" result = client.list_resources(config.prefix) output.write(result) def do_list_members(): """List resources by prefix""" result = client.list_members(config.prefix) output.write(result) def do_list_roles(): """List roles by prefix""" result = client.list_roles(config.prefix) output.write(result) def do_list_permissions(): """List permissions by roles or role prefixes. Raises: ValueError: if neither a role nor a role prefix is set """ if not any([config.roles, config.role_prefixes]): raise ValueError('please specify either a role or a role prefix') result = client.query_permissions_by_roles(config.roles, config.role_prefixes) output.write(result) def do_get_policy(): """Get access""" result = client.get_iam_policy(config.resource) output.write(result) def do_check_policy(): """Check access""" result = client.check_iam_policy(config.resource, config.permission, config.member) output.write(result) def do_why_granted(): """Explain why a permission or role is granted.""" result = client.explain_granted(config.member, config.resource, config.role, config.permission) output.write(result) def do_why_not_granted(): """Explain why a permission or a role is NOT granted.""" result = client.explain_denied(config.member, config.resources, config.roles, config.permissions) output.write(result) def do_query_access_by_member(): """Query access by member and permissions""" result = client.query_access_by_members(config.member, config.permissions, config.expand_resources) output.write(result) def do_query_access_by_resource(): """Query access by resource and permissions""" result = client.query_access_by_resources(config.resource, config.permissions, config.expand_groups) output.write(result) def do_query_access_by_authz(): """Query access by role or permission Raises: ValueError: if neither a role nor a permission is set """ if not any([config.role, config.permission]): raise ValueError('please specify either a role or a permission') for access in ( client.query_access_by_permissions(config.role, config.permission, config.expand_groups, config.expand_resources)): output.write(access) actions = { 'list_resources': do_list_resources, 'list_members': do_list_members, 'list_roles': do_list_roles, 'get_policy': do_get_policy, 'check_policy': do_check_policy, 'why_granted': do_why_granted, 'why_denied': do_why_not_granted, 'list_permissions': do_list_permissions, 'access_by_member': do_query_access_by_member, 'access_by_resource': do_query_access_by_resource, 'access_by_authz': do_query_access_by_authz} actions[config.action]() OUTPUTS = { 'text': TextOutput, 'json': JsonOutput, } SERVICES = { 'explainer': run_explainer, 'inventory': run_inventory, 'config': run_config, 'model': run_model, 'scanner': run_scanner, 'notifier': run_notifier, 'server': run_server } class DefaultConfigParser(object): """Handles creation and persistence of DefaultConfig""" @classmethod def persist(cls, config): """Save a configuration file. Args: config (obj): Configuration to store. """ with file(get_config_path(), 'w+') as outfile: json.dump(config, outfile) @classmethod def load(cls): """Open configuration file and create an instance from it. Returns: object: DefaultConfig. """ try: with file(get_config_path()) as infile: return DefaultConfig(json.load(infile)) except IOError: LOGGER.warn('IOError - trying to open configuration' ' file located at %s', get_config_path()) return DefaultConfig() class DefaultConfig(dict): """Represents the configuration.""" DEFAULT_ENDPOINT = 'localhost:50051' DEFAULT = { 'endpoint': '', 'model': '', 'format': 'text', } def __init__(self, *args, **kwargs): """Constructor. Args: *args (list): Forwarded to base class. **kwargs (dict): Forwarded to base class. """ super(DefaultConfig, self).__init__(*args, **kwargs) self.DEFAULT['endpoint'] = self.get_default_endpoint() # Initialize default values for key, value in self.DEFAULT.iteritems(): if key not in self: self[key] = value def get_default_endpoint(self): """Get server address. Returns: str: Forseti server endpoint """ try: conf_path = os.environ['FORSETI_CLIENT_CONFIG'] configs = file_loader.read_and_parse_file(conf_path) server_ip = configs.get('server_ip') if server_ip: return '{}:50051'.format(server_ip) except (KeyError, IOError) as err: LOGGER.warn(err) return self.DEFAULT_ENDPOINT def __getitem__(self, key): """Get item by key. Args: key (object): Key to get value for. Returns: object: Returns base classe setitem result. Raises: KeyError: If configuration key is unknown. """ if key not in self.DEFAULT: error_message = 'Configuration key unknown: {}'.format(key) LOGGER.error(error_message) raise KeyError(error_message) return dict.__getitem__(self, key) def __setitem__(self, key, value): """Set item by key. Args: key (object): Key to set value for. value (object): Value to set. Returns: object: Returns base classe setitem result. Raises: KeyError: If configuration key is unknown. """ if key not in self.DEFAULT: error_message = 'Configuration key unknown: {}'.format(key) LOGGER.error(error_message) raise KeyError(error_message) return dict.__setitem__(self, key, value) def __delitem__(self, key): """Delete item by key. Args: key (object): Key to delete value for Raises: KeyError: If configuration key is unknown. """ if key not in self.DEFAULT: error_message = 'Configuration key unknown: {}'.format(key) LOGGER.error(error_message) raise KeyError(error_message) self[key] = self.DEFAULT[key] def main(args, config_env, client=None, outputs=None, parser_cls=DefaultParser, services=None): """Main function. Args: args (list): Command line arguments without argv[0]. config_env (obj): Configuration environment. client (obj): API client to use. outputs (list): Supported output formats. parser_cls (type): Argument parser type to instantiate. services (list): Supported Forseti Server services. Returns: object: Environment configuration. """ parser = create_parser(parser_cls, config_env) config = parser.parse_args(args) if not client: client = iam_client.ClientComposition(config.endpoint) client.switch_model(config.use_model) if not outputs: outputs = OUTPUTS if not services: services = SERVICES output = outputs[config.out_format]() try: services[config.service](client, config, output, config_env) except ValueError as e: parser.error(e.message) except grpc.RpcError as e: grpc_status_code = e.code() if grpc_status_code == grpc.StatusCode.UNAVAILABLE: print('Error communicating to the Forseti server.\n' 'Please check the status of the server and make sure it\'s ' 'running.\n' 'If you are accessing from a client VM, make sure the ' '`server_ip` field inside the client configuration file in ' 'the Forseti client GCS bucket contains the right IP ' 'address.\n') else: print('Error occurred on the server side, message: {}'.format(e)) return config def get_config_path(): """Get configuration file path. Returns: str: Configuration path. """ default_path = os.path.join(os.getenv('HOME'), '.forseti') config_path = read_env('FORSETI_SERVER_CONFIG', default_path) return config_path if __name__ == '__main__': ENV_CONFIG = DefaultConfigParser.load() main(sys.argv[1:], ENV_CONFIG)
1
30,281
It would be awesome not to handle the broad exception here. Instead, raise a custom exception, something like `ModelNotSetException`, from the `require_model()`, and then handle it here with a nicer message to tell the user to set the model first.
forseti-security-forseti-security
py
@@ -18,7 +18,8 @@ return [ 'widget_title_default' => 'Website', 'online' => 'Online', 'maintenance' => 'In maintenance', - 'manage_themes' => 'Manage themes' + 'manage_themes' => 'Manage themes', + 'customize_theme' => 'Customize Theme' ] ], 'theme' => [
1
<?php return [ 'cms_object' => [ 'invalid_file' => 'Invalid file name: :name. File names can contain only alphanumeric symbols, underscores, dashes and dots. Some examples of correct file names: page.htm, page, subdirectory/page', 'invalid_property' => "The property ':name' cannot be set", 'file_already_exists' => "File ':name' already exists.", 'error_saving' => "Error saving file ':name'. Please check write permissions.", 'error_creating_directory' => 'Error creating directory :name. Please check write permissions.', 'invalid_file_extension'=>'Invalid file extension: :invalid. Allowed extensions are: :allowed.', 'error_deleting' => "Error deleting the template file ':name'. Please check write permissions.", 'delete_success' => 'Templates deleted: :count.', 'file_name_required' => 'The File Name field is required.', 'safe_mode_enabled' => 'Safe mode is currently enabled.' ], 'dashboard' => [ 'active_theme' => [ 'widget_title_default' => 'Website', 'online' => 'Online', 'maintenance' => 'In maintenance', 'manage_themes' => 'Manage themes' ] ], 'theme' => [ 'not_found_name' => "The theme ':name' is not found.", 'active' => [ 'not_set' => 'The active theme is not set.', 'not_found' => 'The active theme is not found.' ], 'edit' => [ 'not_set' => 'The edit theme is not set.', 'not_found' => 'The edit theme is not found.', 'not_match' => "The object you're trying to access doesn't belong to the theme being edited. Please reload the page." ], 'settings_menu' => 'Front-end theme', 'settings_menu_description' => 'Preview the list of installed themes and select an active theme.', 'default_tab' => 'Properties', 'name_label' => 'Name', 'name_create_placeholder' => 'New theme name', 'author_label' => 'Author', 'author_placeholder' => 'Person or company name', 'description_label' => 'Description', 'description_placeholder' => 'Theme description', 'homepage_label' => 'Homepage', 'homepage_placeholder' => 'Website URL', 'code_label' => 'Code', 'code_placeholder' => 'A unique code for this theme used for distribution', 'preview_image_label' => 'Preview image', 'preview_image_placeholder' => 'The path of theme preview image.', 'dir_name_label' => 'Directory name', 'dir_name_create_label' => 'The destination theme directory', 'theme_label' => 'Theme', 'theme_title' => 'Themes', 'activate_button' => 'Activate', 'active_button' => 'Activate', 'customize_theme' => 'Customize Theme', 'customize_button' => 'Customize', 'duplicate_button' => 'Duplicate', 'duplicate_title' => 'Duplicate theme', 'duplicate_theme_success' => 'Theme duplicated!', 'manage_button' => 'Manage', 'manage_title' => 'Manage theme', 'edit_properties_title' => 'Theme', 'edit_properties_button' => 'Edit properties', 'save_properties' => 'Save properties', 'import_button' => 'Import', 'import_title' => 'Import theme', 'import_theme_success' => 'Theme imported!', 'import_uploaded_file' => 'Theme archive file', 'import_overwrite_label' => 'Overwrite existing files', 'import_overwrite_comment' => 'Untick this box to only import new files', 'import_folders_label' => 'Folders', 'import_folders_comment' => 'Please select the theme folders you would like to import', 'export_button' => 'Export', 'export_title' => 'Export theme', 'export_folders_label' => 'Folders', 'export_folders_comment' => 'Please select the theme folders you would like to export', 'delete_button' => 'Delete', 'delete_confirm' => 'Are you sure you want to delete this theme? It cannot be undone!', 'delete_active_theme_failed' => 'Cannot delete the active theme, try making another theme active first.', 'delete_theme_success' => 'Theme deleted!', 'create_title' => 'Create theme', 'create_button' => 'Create', 'create_new_blank_theme' => 'Create a new blank theme', 'create_theme_success' => 'Theme created!', 'create_theme_required_name' => 'Please specify a name for the theme.', 'new_directory_name_label' => 'Theme directory', 'new_directory_name_comment' => 'Provide a new directory name for the duplicated theme.', 'dir_name_invalid' => 'Name can contain only digits, Latin letters and the following symbols: _-', 'dir_name_taken' => 'Desired theme directory already exists.', 'find_more_themes' => 'Find more themes', 'saving' => 'Saving theme...', 'return' => 'Return to themes list' ], 'maintenance' => [ 'settings_menu' => 'Maintenance mode', 'settings_menu_description' => 'Configure the maintenance mode page and toggle the setting.', 'is_enabled' => 'Enable maintenance mode', 'is_enabled_comment' => 'Select the page to show when maintenance mode is activated.', 'hint' => 'Maintenance mode will display the maintenance page to visitors who are not signed in to the back-end area.' ], 'page' => [ 'not_found_name' => "The page ':name' is not found", 'not_found' => [ 'label' => 'Page not found', 'help' => 'The requested page cannot be found.' ], 'custom_error' => [ 'label' => 'Page error', 'help' => "We're sorry, but something went wrong and the page cannot be displayed." ], 'menu_label' => 'Pages', 'unsaved_label' => 'Unsaved page(s)', 'no_list_records' => 'No pages found', 'new' => 'New page', 'invalid_url' => 'Invalid URL format. The URL should start with the forward slash symbol and can contain digits, Latin letters and the following symbols: ._-[]:?|/+*^$', 'delete_confirm_multiple' => 'Delete selected pages?', 'delete_confirm_single' => 'Delete this page?', 'no_layout' => '-- no layout --', 'cms_page' => 'CMS page', 'title' => 'Page title', 'url' => 'Page URL', 'file_name' => 'Page file name' ], 'layout' => [ 'not_found_name' => "The layout ':name' is not found", 'menu_label' => 'Layouts', 'unsaved_label' => 'Unsaved layout(s)', 'no_list_records' => 'No layouts found', 'new' => 'New layout', 'delete_confirm_multiple' => 'Delete selected layouts?', 'delete_confirm_single' => 'Delete this layout?' ], 'partial' => [ 'not_found_name' => "The partial ':name' is not found.", 'invalid_name' => 'Invalid partial name: :name.', 'menu_label' => 'Partials', 'unsaved_label' => 'Unsaved partial(s)', 'no_list_records' => 'No partials found', 'delete_confirm_multiple' => 'Delete selected partials?', 'delete_confirm_single' => 'Delete this partial?', 'new' => 'New partial' ], 'content' => [ 'not_found_name' => "The content file ':name' is not found.", 'menu_label' => 'Content', 'unsaved_label' => 'Unsaved content', 'no_list_records' => 'No content files found', 'delete_confirm_multiple' => 'Delete selected content files or directories?', 'delete_confirm_single' => 'Delete this content file?', 'new' => 'New content file' ], 'ajax_handler' => [ 'invalid_name' => 'Invalid AJAX handler name: :name.', 'not_found' => "AJAX handler ':name' was not found." ], 'cms' => [ 'menu_label' => 'CMS' ], 'sidebar' => [ 'add' => 'Add', 'search' => 'Search...' ], 'editor' => [ 'settings' => 'Settings', 'title' => 'Title', 'new_title' => 'New page title', 'url' => 'URL', 'filename' => 'File Name', 'layout' => 'Layout', 'description' => 'Description', 'preview' => 'Preview', 'meta' => 'Meta', 'meta_title' => 'Meta Title', 'meta_description' => 'Meta Description', 'markup' => 'Markup', 'code' => 'Code', 'content' => 'Content', 'hidden' => 'Hidden', 'hidden_comment' => 'Hidden pages are accessible only by logged-in back-end users.', 'enter_fullscreen' => 'Enter fullscreen mode', 'exit_fullscreen' => 'Exit fullscreen mode', 'open_searchbox' => 'Open Search box', 'close_searchbox' => 'Close Search box', 'open_replacebox' => 'Open Replace box', 'close_replacebox' => 'Close Replace box' ], 'asset' => [ 'menu_label' => 'Assets', 'unsaved_label' => 'Unsaved asset(s)', 'drop_down_add_title' => 'Add...', 'drop_down_operation_title' => 'Action...', 'upload_files' => 'Upload file(s)', 'create_file' => 'Create file', 'create_directory' => 'Create directory', 'directory_popup_title' => 'New directory', 'directory_name' => 'Directory name', 'rename' => 'Rename', 'delete' => 'Delete', 'move' => 'Move', 'select' => 'Select', 'new' => 'New file', 'rename_popup_title' => 'Rename', 'rename_new_name' => 'New name', 'invalid_path' => 'Path can contain only digits, Latin letters, spaces and the following symbols: ._-/', 'error_deleting_file' => 'Error deleting file :name.', 'error_deleting_dir_not_empty' => 'Error deleting directory :name. The directory is not empty.', 'error_deleting_dir' => 'Error deleting directory :name.', 'invalid_name' => 'Name can contain only digits, Latin letters, spaces and the following symbols: ._-', 'original_not_found' => 'Original file or directory not found', 'already_exists' => 'File or directory with this name already exists', 'error_renaming' => 'Error renaming the file or directory', 'name_cant_be_empty' => 'The name cannot be empty', 'too_large' => 'The uploaded file is too large. The maximum allowed file size is :max_size', 'type_not_allowed' => 'Only the following file types are allowed: :allowed_types', 'file_not_valid' => 'File is not valid', 'error_uploading_file' => "Error uploading file ':name': :error", 'move_please_select' => 'please select', 'move_destination' => 'Destination directory', 'move_popup_title' => 'Move assets', 'move_button' => 'Move', 'selected_files_not_found' => 'Selected files not found', 'select_destination_dir' => 'Please select a destination directory', 'destination_not_found' => 'Destination directory is not found', 'error_moving_file' => 'Error moving file :file', 'error_moving_directory' => 'Error moving directory :dir', 'error_deleting_directory' => 'Error deleting the original directory :dir', 'no_list_records' => 'No files found', 'delete_confirm' => 'Delete selected files or directories?', 'path' => 'Path' ], 'component' => [ 'menu_label' => 'Components', 'unnamed' => 'Unnamed', 'no_description' => 'No description provided', 'alias' => 'Alias', 'alias_description' => 'A unique name given to this component when using it in the page or layout code.', 'validation_message' => 'Component aliases are required and can contain only Latin symbols, digits, and underscores. The aliases should start with a Latin symbol.', 'invalid_request' => 'The template cannot be saved because of invalid component data.', 'no_records' => 'No components found', 'not_found' => "The component ':name' is not found.", 'method_not_found' => "The component ':name' does not contain a method ':method'." ], 'template' => [ 'invalid_type' => 'Unknown template type.', 'not_found' => 'Template not found.', 'saved' => 'Template saved.', 'no_list_records' => 'No records found', 'delete_confirm' => 'Delete selected templates?', 'order_by' => 'Order by' ], 'permissions' => [ 'name' => 'CMS', 'manage_content' => 'Manage website content files', 'manage_assets' => 'Manage website assets - images, JavaScript files, CSS files', 'manage_pages' => 'Create, modify and delete website pages', 'manage_layouts' => 'Create, modify and delete CMS layouts', 'manage_partials' => 'Create, modify and delete CMS partials', 'manage_themes' => 'Activate, deactivate and configure CMS themes', 'manage_media' => 'Upload and manage media contents - images, videos, sounds, documents' ], 'mediafinder' => [ 'label' => 'Media Finder', 'default_prompt' => 'Click the %s button to find a media item' ], 'media' => [ 'invalid_path' => "Invalid file path specified: ':path'.", 'menu_label' => 'Media', 'upload' => 'Upload', 'move' => 'Move', 'delete' => 'Delete', 'add_folder' => 'Add folder', 'search' => 'Search', 'display' => 'Display', 'filter_everything' => 'Everything', 'filter_images' => 'Images', 'filter_video' => 'Video', 'filter_audio' => 'Audio', 'filter_documents' => 'Documents', 'library' => 'Library', 'folder_size_items' => 'item(s)', 'size' => 'Size', 'title' => 'Title', 'last_modified' => 'Last modified', 'public_url' => 'Public URL', 'click_here' => 'Click here', 'thumbnail_error' => 'Error generating thumbnail.', 'return_to_parent' => 'Return to the parent folder', 'return_to_parent_label' => 'Go up ..', 'nothing_selected' => 'Nothing is selected.', 'multiple_selected' => 'Multiple items selected.', 'uploading_file_num' => 'Uploading :number file(s)...', 'uploading_complete' => 'Upload complete', 'uploading_error' => 'Upload failed', 'type_blocked' => 'The file type used is blocked for security reasons.', 'order_by' => 'Order by', 'folder' => 'Folder', 'no_files_found' => 'No files found by your request.', 'delete_empty' => 'Please select items to delete.', 'delete_confirm' => 'Delete the selected item(s)?', 'error_renaming_file' => 'Error renaming the item.', 'new_folder_title' => 'New folder', 'folder_name' => 'Folder name', 'error_creating_folder' => 'Error creating folder', 'folder_or_file_exist' => 'A folder or file with the specified name already exists.', 'move_empty' => 'Please select items to move.', 'move_popup_title' => 'Move files or folders', 'move_destination' => 'Destination folder', 'please_select_move_dest' => 'Please select a destination folder.', 'move_dest_src_match' => 'Please select another destination folder.', 'empty_library' => 'The Media Library is empty. Upload files or create folders to get started.', 'insert' => 'Insert', 'crop_and_insert' => 'Crop & Insert', 'select_single_image' => 'Please select a single image.', 'selection_not_image' => 'The selected item is not an image.', 'restore' => 'Undo all changes', 'resize' => 'Resize...', 'selection_mode_normal' => 'Normal', 'selection_mode_fixed_ratio' => 'Fixed ratio', 'selection_mode_fixed_size' => 'Fixed size', 'height' => 'Height', 'width' => 'Width', 'selection_mode' => 'Selection mode', 'resize_image' => 'Resize image', 'image_size' => 'Image size:', 'selected_size' => 'Selected:' ], 'theme_log' => [ 'hint' => 'This log displays any changes made to the theme by administrators in the back-end area.', 'menu_label' => 'Theme log', 'menu_description' => 'View changes made to the active theme.', 'empty_link' => 'Empty theme log', 'empty_loading' => 'Emptying theme log...', 'empty_success' => 'Theme log emptied', 'return_link' => 'Return to theme log', 'id' => 'ID', 'id_label' => 'Log ID', 'created_at' => 'Date & Time', 'user' => 'User', 'type' => 'Type', 'type_create' => 'Create', 'type_update' => 'Update', 'type_delete' => 'Delete', 'theme_name' => 'Theme', 'theme_code' => 'Theme code', 'old_template' => 'Template (Old)', 'new_template' => 'Template (New)', 'template' => 'Template', 'diff' => 'Changes', 'old_value' => 'Old value', 'new_value' => 'New value', 'preview_title' => 'Template changes', 'template_updated' => 'Template was updated', 'template_created' => 'Template was created', 'template_deleted' => 'Template was deleted', ], ];
1
11,990
This already exists under the `theme` lang key, please remove this.
octobercms-october
php
@@ -223,6 +223,10 @@ class SetupUsingGCP extends Component { return ( <Fragment> <Header /> + { /* + Note: this component doesn't use hooks and thus can't access the + feature flags, so we don't render the HelpMenu here. + */ } <div className="googlesitekit-wizard"> <div className="mdc-layout-grid"> <div className="mdc-layout-grid__inner">
1
/** * SetupUsingGCP component. * * Site Kit by Google, Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * External dependencies */ import { delay } from 'lodash'; /** * WordPress dependencies */ import { __ } from '@wordpress/i18n'; import { Component, Fragment } from '@wordpress/element'; /** * Internal dependencies */ import Header from '../Header'; import Button from '../Button'; import Layout from '../layout/Layout'; import data, { TYPE_CORE } from '../data'; import { trackEvent, clearWebStorage, getSiteKitAdminURL } from '../../util'; import STEPS from './wizard-steps'; import WizardProgressStep from './wizard-progress-step'; class SetupUsingGCP extends Component { constructor( props ) { super( props ); const { connectURL } = global._googlesitekitLegacyData.admin; const { isAuthenticated, hasSearchConsoleProperty, isSiteKitConnected, isVerified, needReauthenticate, } = global._googlesitekitLegacyData.setup; const { canSetup } = global._googlesitekitLegacyData.permissions; this.state = { canSetup, isAuthenticated, isVerified, needReauthenticate, hasSearchConsoleProperty, hasSearchConsolePropertyFromTheStart: hasSearchConsoleProperty, connectURL, errorMsg: '', isSiteKitConnected, completeSetup: false, }; this.siteConnectedSetup = this.siteConnectedSetup.bind( this ); this.siteVerificationSetup = this.siteVerificationSetup.bind( this ); this.searchConsoleSetup = this.searchConsoleSetup.bind( this ); this.resetAndRestart = this.resetAndRestart.bind( this ); this.completeSetup = this.completeSetup.bind( this ); this.setErrorMessage = this.setErrorMessage.bind( this ); } async resetAndRestart() { await data.set( TYPE_CORE, 'site', 'reset' ); clearWebStorage(); this.setState( { isSiteKitConnected: false, isAuthenticated: false, isVerified: false, hasSearchConsoleProperty: false, completeSetup: false, errorMsg: '', } ); } completeSetup() { this.setState( { completeSetup: true, } ); } siteConnectedSetup( status ) { this.setState( { isSiteKitConnected: status, } ); } siteVerificationSetup( status ) { this.setState( { isVerified: status, } ); } searchConsoleSetup( status ) { this.setState( { hasSearchConsoleProperty: status, } ); } isSetupFinished() { const { isSiteKitConnected, isAuthenticated, isVerified, hasSearchConsoleProperty, completeSetup, } = this.state; return isSiteKitConnected && isAuthenticated && isVerified && hasSearchConsoleProperty && completeSetup; } setErrorMessage( errorMsg ) { this.setState( { errorMsg, } ); } getApplicableSteps() { const applicableSteps = STEPS; const slugs = Object.keys( applicableSteps ); let i; for ( i = 0; i < slugs.length; i++ ) { if ( ! applicableSteps[ slugs[ i ] ].isApplicable( this.state ) ) { delete applicableSteps[ slugs[ i ] ]; } } return applicableSteps; } currentStep( applicableSteps ) { const slugs = Object.keys( applicableSteps ); // Iterate through all steps (except the last one) and return the first one that is not completed. let i; for ( i = 0; i < slugs.length - 1; i++ ) { if ( ! applicableSteps[ slugs[ i ] ].isCompleted( this.state ) ) { return slugs[ i ]; } } // Return the last step only if all other steps are completed. return slugs[ i ]; } stepStatus( applicableSteps, step ) { if ( applicableSteps[ step ].isCompleted( this.state ) ) { return 'completed'; } const currentStep = this.currentStep( applicableSteps ); if ( step === currentStep ) { return 'inprogress'; } return ''; } render() { const { canSetup, isAuthenticated, isVerified, needReauthenticate, hasSearchConsoleProperty, connectURL, isSiteKitConnected, } = this.state; if ( this.isSetupFinished() ) { const redirectURL = getSiteKitAdminURL( 'googlesitekit-dashboard', { notification: 'authentication_success', }, ); delay( function() { global.location.replace( redirectURL ); }, 500, 'later' ); } const progressSteps = this.getApplicableSteps(); const currentStep = this.currentStep( progressSteps ); const WizardStepComponent = progressSteps[ currentStep ].Component; const wizardStepComponent = <WizardStepComponent siteConnectedSetup={ this.siteConnectedSetup } connectURL={ connectURL } siteVerificationSetup={ this.siteVerificationSetup } searchConsoleSetup={ this.searchConsoleSetup } completeSetup={ this.completeSetup } isSiteKitConnected={ isSiteKitConnected } isAuthenticated={ isAuthenticated } isVerified={ isVerified } needReauthenticate={ needReauthenticate } hasSearchConsoleProperty={ hasSearchConsoleProperty } setErrorMessage={ this.setErrorMessage } resetAndRestart={ progressSteps.clientCredentials ? this.resetAndRestart : undefined } />; const showVerificationSteps = canSetup; const showAuthenticateButton = ! showVerificationSteps && ! isAuthenticated; return ( <Fragment> <Header /> <div className="googlesitekit-wizard"> <div className="mdc-layout-grid"> <div className="mdc-layout-grid__inner"> <div className=" mdc-layout-grid__cell mdc-layout-grid__cell--span-12 "> <Layout> <section className="googlesitekit-wizard-progress"> <div className="mdc-layout-grid"> <div className="mdc-layout-grid__inner"> { showVerificationSteps && <div className=" mdc-layout-grid__cell mdc-layout-grid__cell--span-12 "> <div className="googlesitekit-wizard-progress__steps"> { Object.keys( progressSteps ).map( ( step, stepIndex ) => { return ( <WizardProgressStep key={ progressSteps[ step ].title } currentStep={ currentStep === step } title={ progressSteps[ step ].title } step={ stepIndex + 1 } status={ this.stepStatus( progressSteps, step ) } warning={ progressSteps[ step ].warning } error={ progressSteps[ step ].error } stepKey={ step } /> ); } ) } </div> </div> } </div> </div> { showAuthenticateButton && <div className="googlesitekit-setup__footer"> <div className="mdc-layout-grid"> <div className="mdc-layout-grid__inner"> <div className=" mdc-layout-grid__cell mdc-layout-grid__cell--span-12 "> <h1 className="googlesitekit-setup__title"> { __( 'Authenticate Site Kit', 'google-site-kit' ) } </h1> <p className="googlesitekit-setup__description"> { __( 'Please sign into your Google account to begin.', 'google-site-kit' ) } </p> <Button href="#" onClick={ async () => { await trackEvent( 'plugin_setup', 'signin_with_google' ); document.location = connectURL; } } >{ __( 'Sign in with Google', 'google-site-kit' ) }</Button> </div> </div> </div> </div> } </section> { showVerificationSteps && wizardStepComponent } </Layout> </div> </div> </div> </div> </Fragment> ); } } export default SetupUsingGCP;
1
36,664
Let's add a simple inline HOC around the default export below to provide the value as a prop (no need to introduce a reusable function for this yet).
google-site-kit-wp
js
@@ -29,7 +29,7 @@ module Beaker v_file << " v.vm.box = '#{host['box']}'\n" v_file << " v.vm.box_url = '#{host['box_url']}'\n" unless host['box_url'].nil? v_file << " v.vm.base_mac = '#{randmac}'\n" - v_file << " v.vm.network :private_network, ip: \"#{host['ip'].to_s}\", :netmask => \"255.255.0.0\"\n" + v_file << " v.vm.network :private_network, ip: \"#{host['ip'].to_s}\", :netmask => \"#{host['netmask'] ||= "255.255.0.0"}\"\n" v_file << " end\n" @logger.debug "created Vagrantfile for VagrantHost #{host.name}" end
1
require 'open3' module Beaker class Vagrant < Beaker::Hypervisor # Return a random mac address # # @return [String] a random mac address def randmac "080027" + (1..3).map{"%0.2X"%rand(256)}.join end def rand_chunk (2 + rand(252)).to_s #don't want a 0, 1, or a 255 end def randip "10.255.#{rand_chunk}.#{rand_chunk}" end def make_vfile hosts #HACK HACK HACK - add checks here to ensure that we have box + box_url #generate the VagrantFile v_file = "Vagrant.configure(\"2\") do |c|\n" hosts.each do |host| host['ip'] ||= randip #use the existing ip, otherwise default to a random ip v_file << " c.vm.define '#{host.name}' do |v|\n" v_file << " v.vm.hostname = '#{host.name}'\n" v_file << " v.vm.box = '#{host['box']}'\n" v_file << " v.vm.box_url = '#{host['box_url']}'\n" unless host['box_url'].nil? v_file << " v.vm.base_mac = '#{randmac}'\n" v_file << " v.vm.network :private_network, ip: \"#{host['ip'].to_s}\", :netmask => \"255.255.0.0\"\n" v_file << " end\n" @logger.debug "created Vagrantfile for VagrantHost #{host.name}" end v_file << " c.vm.provider :virtualbox do |vb|\n" v_file << " vb.customize [\"modifyvm\", :id, \"--memory\", \"1024\"]\n" v_file << " end\n" v_file << "end\n" File.open(@vagrant_file, 'w') do |f| f.write(v_file) end end def hack_etc_hosts hosts etc_hosts = "127.0.0.1\tlocalhost localhost.localdomain\n" hosts.each do |host| etc_hosts += "#{host['ip'].to_s}\t#{host.name}\n" end hosts.each do |host| set_etc_hosts(host, etc_hosts) end end def copy_ssh_to_root host #make is possible to log in as root by copying the ssh dir to root's account @logger.debug "Give root a copy of vagrant's keys" if host['platform'] =~ /windows/ host.exec(Command.new('sudo su -c "cp -r .ssh /home/Administrator/."')) else host.exec(Command.new('sudo su -c "cp -r .ssh /root/."')) end end def set_ssh_config host, user f = Tempfile.new("#{host.name}") ssh_config = Dir.chdir(@vagrant_path) do stdin, stdout, stderr, wait_thr = Open3.popen3('vagrant', 'ssh-config', host.name) if not wait_thr.value.success? raise "Failed to 'vagrant ssh-config' for #{host.name}" end stdout.read end #replace hostname with ip ssh_config = ssh_config.gsub(/#{host.name}/, host['ip']) unless not host['ip'] #set the user ssh_config = ssh_config.gsub(/User vagrant/, "User #{user}") f.write(ssh_config) f.rewind host['ssh'] = {:config => f.path()} host['user'] = user @temp_files << f end def initialize(vagrant_hosts, options) require 'tempfile' @options = options @logger = options[:logger] @temp_files = [] @vagrant_hosts = vagrant_hosts @vagrant_path = File.expand_path(File.join(File.basename(__FILE__), '..', '.vagrant', 'beaker_vagrant_files', File.basename(options[:hosts_file]))) FileUtils.mkdir_p(@vagrant_path) @vagrant_file = File.expand_path(File.join(@vagrant_path, "Vagrantfile")) end def provision if @options[:provision] #setting up new vagrant hosts #make sure that any old boxes are dead dead dead vagrant_cmd("destroy --force") if File.file?(@vagrant_file) make_vfile @vagrant_hosts vagrant_cmd("up") end @logger.debug "configure vagrant boxes (set ssh-config, switch to root user, hack etc/hosts)" @vagrant_hosts.each do |host| default_user = host['user'] set_ssh_config host, 'vagrant' copy_ssh_to_root host #shut down connection, will reconnect on next exec host.close set_ssh_config host, default_user end hack_etc_hosts @vagrant_hosts end def cleanup @logger.debug "removing temporory ssh-config files per-vagrant box" @temp_files.each do |f| f.close() end @logger.notify "Destroying vagrant boxes" vagrant_cmd("destroy --force") FileUtils.rm_rf(@vagrant_path) end def vagrant_cmd(args) Dir.chdir(@vagrant_path) do exit_status = 1 Open3.popen3("vagrant #{args}") {|stdin, stdout, stderr, wait_thr| while line = stdout.gets @logger.debug(line) end if not wait_thr.value.success? raise "Failed to exec 'vagrant #{args}'" end exit_status = wait_thr.value } if exit_status != 0 raise "Failed to execute vagrant_cmd ( #{args} )" end end end end end
1
4,927
I believe that you end up printing out the result of the assignment here instead of the netmask.
voxpupuli-beaker
rb
@@ -0,0 +1,11 @@ +export const CREDENTIALS = { + user: 'test', + password: 'test' +}; + +export const TARBALL = 'tarball-blahblah-file.name'; +export const PORT_SERVER_APP = '55550'; +export const PORT_SERVER_1 = '55551'; +export const PORT_SERVER_2 = '55552'; +export const PORT_SERVER_3 = '55553'; +export const DOMAIN_SERVERS = 'localhost';
1
1
18,071
We have to update the filename here.
verdaccio-verdaccio
js
@@ -121,6 +121,11 @@ std::string FlatCompiler::GetUsageString(const char *program_name) const { " (see the --cpp-str-flex-ctor option to change this behavior).\n" " --cpp-str-flex-ctor Don't construct custom string types by passing std::string\n" " from Flatbuffers, but (char* + length).\n" + " --cpp-field-case STYLE Generate C++ fields using selected case style.\n" + " Supported STYLE values:\n" + " * 'unchanged' - leave unchanged (default);\n" + " * 'upper' - schema snake_case emits UpperCamel;\n" + " * 'lower' - schema snake_case emits lowerCamel.\n" " --cpp-std CPP_STD Generate a C++ code using features of selected C++ standard.\n" " Supported CPP_STD values:\n" " * 'c++0x' - generate code compatible with old compilers;\n"
1
/* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "flatbuffers/flatc.h" #include <list> namespace flatbuffers { const char *FLATC_VERSION() { return FLATBUFFERS_VERSION(); } void FlatCompiler::ParseFile( flatbuffers::Parser &parser, const std::string &filename, const std::string &contents, std::vector<const char *> &include_directories) const { auto local_include_directory = flatbuffers::StripFileName(filename); include_directories.push_back(local_include_directory.c_str()); include_directories.push_back(nullptr); if (!parser.Parse(contents.c_str(), &include_directories[0], filename.c_str())) { Error(parser.error_, false, false); } if (!parser.error_.empty()) { Warn(parser.error_, false); } include_directories.pop_back(); include_directories.pop_back(); } void FlatCompiler::LoadBinarySchema(flatbuffers::Parser &parser, const std::string &filename, const std::string &contents) { if (!parser.Deserialize(reinterpret_cast<const uint8_t *>(contents.c_str()), contents.size())) { Error("failed to load binary schema: " + filename, false, false); } } void FlatCompiler::Warn(const std::string &warn, bool show_exe_name) const { params_.warn_fn(this, warn, show_exe_name); } void FlatCompiler::Error(const std::string &err, bool usage, bool show_exe_name) const { params_.error_fn(this, err, usage, show_exe_name); } std::string FlatCompiler::GetUsageString(const char *program_name) const { std::stringstream ss; ss << "Usage: " << program_name << " [OPTION]... FILE... [-- FILE...]\n"; for (size_t i = 0; i < params_.num_generators; ++i) { const Generator &g = params_.generators[i]; std::stringstream full_name; full_name << std::setw(16) << std::left << g.generator_opt_long; const char *name = g.generator_opt_short ? g.generator_opt_short : " "; const char *help = g.generator_help; ss << " " << full_name.str() << " " << name << " " << help << ".\n"; } // clang-format off // Output width // 12345678901234567890123456789012345678901234567890123456789012345678901234567890 ss << " -o PATH Prefix PATH to all generated files.\n" " -I PATH Search for includes in the specified path.\n" " -M Print make rules for generated files.\n" " --version Print the version number of flatc and exit.\n" " --strict-json Strict JSON: field names must be / will be quoted,\n" " no trailing commas in tables/vectors.\n" " --allow-non-utf8 Pass non-UTF-8 input through parser and emit nonstandard\n" " \\x escapes in JSON. (Default is to raise parse error on\n" " non-UTF-8 input.)\n" " --natural-utf8 Output strings with UTF-8 as human-readable strings.\n" " By default, UTF-8 characters are printed as \\uXXXX escapes.\n" " --defaults-json Output fields whose value is the default when\n" " writing JSON\n" " --unknown-json Allow fields in JSON that are not defined in the\n" " schema. These fields will be discared when generating\n" " binaries.\n" " --no-prefix Don\'t prefix enum values with the enum type in C++.\n" " --scoped-enums Use C++11 style scoped and strongly typed enums.\n" " also implies --no-prefix.\n" " --gen-includes (deprecated), this is the default behavior.\n" " If the original behavior is required (no include\n" " statements) use --no-includes.\n" " --no-includes Don\'t generate include statements for included\n" " schemas the generated file depends on (C++ / Python).\n" " --gen-mutable Generate accessors that can mutate buffers in-place.\n" " --gen-onefile Generate single output file for C# and Go.\n" " --gen-name-strings Generate type name functions for C++ and Rust.\n" " --gen-object-api Generate an additional object-based API.\n" " --gen-compare Generate operator== for object-based API types.\n" " --gen-nullable Add Clang _Nullable for C++ pointer. or @Nullable for Java\n" " --java-checkerframe work Add @Pure for Java.\n" " --gen-generated Add @Generated annotation for Java\n" " --gen-jvmstatic Add @JvmStatic annotation for Kotlin methods\n" " in companion object for interop from Java to Kotlin.\n" " --gen-all Generate not just code for the current schema files,\n" " but for all files it includes as well.\n" " If the language uses a single file for output (by default\n" " the case for C++ and JS), all code will end up in this one\n" " file.\n" " --cpp-include Adds an #include in generated file.\n" " --cpp-ptr-type T Set object API pointer type (default std::unique_ptr).\n" " --cpp-str-type T Set object API string type (default std::string).\n" " T::c_str(), T::length() and T::empty() must be supported.\n" " The custom type also needs to be constructible from std::string\n" " (see the --cpp-str-flex-ctor option to change this behavior).\n" " --cpp-str-flex-ctor Don't construct custom string types by passing std::string\n" " from Flatbuffers, but (char* + length).\n" " --cpp-std CPP_STD Generate a C++ code using features of selected C++ standard.\n" " Supported CPP_STD values:\n" " * 'c++0x' - generate code compatible with old compilers;\n" " * 'c++11' - use C++11 code generator (default);\n" " * 'c++17' - use C++17 features in generated code (experimental).\n" " --cpp-static-reflection When using C++17, generate extra code to provide compile-time\n" " (static) reflection of Flatbuffers types. Requires --cpp-std\n" " to be \"c++17\" or higher.\n" " --object-prefix Customise class prefix for C++ object-based API.\n" " --object-suffix Customise class suffix for C++ object-based API.\n" " Default value is \"T\".\n" " --go-namespace Generate the overriding namespace in Golang.\n" " --go-import Generate the overriding import for flatbuffers in Golang\n" " (default is \"github.com/google/flatbuffers/go\").\n" " --raw-binary Allow binaries without file_identifier to be read.\n" " This may crash flatc given a mismatched schema.\n" " --size-prefixed Input binaries are size prefixed buffers.\n" " --proto Input is a .proto, translate to .fbs.\n" " --proto-namespace-suffix Add this namespace to any flatbuffers generated\n" " SUFFIX from protobufs.\n" " --oneof-union Translate .proto oneofs to flatbuffer unions.\n" " --grpc Generate GRPC interfaces for the specified languages.\n" " --schema Serialize schemas instead of JSON (use with -b).\n" " --bfbs-comments Add doc comments to the binary schema files.\n" " --bfbs-builtins Add builtin attributes to the binary schema files.\n" " --bfbs-gen-embed Generate code to embed the bfbs schema to the source.\n" " --conform FILE Specify a schema the following schemas should be\n" " an evolution of. Gives errors if not.\n" " --conform-includes Include path for the schema given with --conform PATH\n" " --filename-suffix The suffix appended to the generated file names.\n" " Default is '_generated'.\n" " --filename-ext The extension appended to the generated file names.\n" " Default is language-specific (e.g., '.h' for C++)\n" " --include-prefix Prefix this path to any generated include statements.\n" " PATH\n" " --keep-prefix Keep original prefix of schema include statement.\n" " --reflect-types Add minimal type reflection to code generation.\n" " --reflect-names Add minimal type/name reflection.\n" " --root-type T Select or override the default root_type\n" " --require-explicit-ids When parsing schemas, require explicit ids (id: x).\n" " --force-defaults Emit default values in binary output from JSON\n" " --force-empty When serializing from object API representation,\n" " force strings and vectors to empty rather than null.\n" " --force-empty-vectors When serializing from object API representation,\n" " force vectors to empty rather than null.\n" " --flexbuffers Used with \"binary\" and \"json\" options, it generates\n" " data using schema-less FlexBuffers.\n" " --no-warnings Inhibit all warning messages.\n" "FILEs may be schemas (must end in .fbs), binary schemas (must end in .bfbs),\n" "or JSON files (conforming to preceding schema). FILEs after the -- must be\n" "binary flatbuffer format files.\n" "Output files are named using the base file name of the input,\n" "and written to the current directory or the path given by -o.\n" "example: " << program_name << " -c -b schema1.fbs schema2.fbs data.json\n"; // 12345678901234567890123456789012345678901234567890123456789012345678901234567890 // clang-format on return ss.str(); } int FlatCompiler::Compile(int argc, const char **argv) { if (params_.generators == nullptr || params_.num_generators == 0) { return 0; } flatbuffers::IDLOptions opts; std::string output_path; bool any_generator = false; bool print_make_rules = false; bool raw_binary = false; bool schema_binary = false; bool grpc_enabled = false; std::vector<std::string> filenames; std::list<std::string> include_directories_storage; std::vector<const char *> include_directories; std::vector<const char *> conform_include_directories; std::vector<bool> generator_enabled(params_.num_generators, false); size_t binary_files_from = std::numeric_limits<size_t>::max(); std::string conform_to_schema; for (int argi = 0; argi < argc; argi++) { std::string arg = argv[argi]; if (arg[0] == '-') { if (filenames.size() && arg[1] != '-') Error("invalid option location: " + arg, true); if (arg == "-o") { if (++argi >= argc) Error("missing path following: " + arg, true); output_path = flatbuffers::ConCatPathFileName( flatbuffers::PosixPath(argv[argi]), ""); } else if (arg == "-I") { if (++argi >= argc) Error("missing path following: " + arg, true); include_directories_storage.push_back( flatbuffers::PosixPath(argv[argi])); include_directories.push_back( include_directories_storage.back().c_str()); } else if (arg == "--conform") { if (++argi >= argc) Error("missing path following: " + arg, true); conform_to_schema = flatbuffers::PosixPath(argv[argi]); } else if (arg == "--conform-includes") { if (++argi >= argc) Error("missing path following: " + arg, true); include_directories_storage.push_back( flatbuffers::PosixPath(argv[argi])); conform_include_directories.push_back( include_directories_storage.back().c_str()); } else if (arg == "--include-prefix") { if (++argi >= argc) Error("missing path following: " + arg, true); opts.include_prefix = flatbuffers::ConCatPathFileName( flatbuffers::PosixPath(argv[argi]), ""); } else if (arg == "--keep-prefix") { opts.keep_include_path = true; } else if (arg == "--strict-json") { opts.strict_json = true; } else if (arg == "--allow-non-utf8") { opts.allow_non_utf8 = true; } else if (arg == "--natural-utf8") { opts.natural_utf8 = true; } else if (arg == "--go-namespace") { if (++argi >= argc) Error("missing golang namespace" + arg, true); opts.go_namespace = argv[argi]; } else if (arg == "--go-import") { if (++argi >= argc) Error("missing golang import" + arg, true); opts.go_import = argv[argi]; } else if (arg == "--defaults-json") { opts.output_default_scalars_in_json = true; } else if (arg == "--unknown-json") { opts.skip_unexpected_fields_in_json = true; } else if (arg == "--no-prefix") { opts.prefixed_enums = false; } else if (arg == "--scoped-enums") { opts.prefixed_enums = false; opts.scoped_enums = true; } else if (arg == "--no-union-value-namespacing") { opts.union_value_namespacing = false; } else if (arg == "--gen-mutable") { opts.mutable_buffer = true; } else if (arg == "--gen-name-strings") { opts.generate_name_strings = true; } else if (arg == "--gen-object-api") { opts.generate_object_based_api = true; } else if (arg == "--gen-compare") { opts.gen_compare = true; } else if (arg == "--cpp-include") { if (++argi >= argc) Error("missing include following: " + arg, true); opts.cpp_includes.push_back(argv[argi]); } else if (arg == "--cpp-ptr-type") { if (++argi >= argc) Error("missing type following: " + arg, true); opts.cpp_object_api_pointer_type = argv[argi]; } else if (arg == "--cpp-str-type") { if (++argi >= argc) Error("missing type following: " + arg, true); opts.cpp_object_api_string_type = argv[argi]; } else if (arg == "--cpp-str-flex-ctor") { opts.cpp_object_api_string_flexible_constructor = true; } else if (arg == "--no-cpp-direct-copy") { opts.cpp_direct_copy = false; } else if (arg == "--gen-nullable") { opts.gen_nullable = true; } else if (arg == "--java-checkerframework") { opts.java_checkerframework = true; } else if (arg == "--gen-generated") { opts.gen_generated = true; } else if (arg == "--object-prefix") { if (++argi >= argc) Error("missing prefix following: " + arg, true); opts.object_prefix = argv[argi]; } else if (arg == "--object-suffix") { if (++argi >= argc) Error("missing suffix following: " + arg, true); opts.object_suffix = argv[argi]; } else if (arg == "--gen-all") { opts.generate_all = true; opts.include_dependence_headers = false; } else if (arg == "--gen-includes") { // Deprecated, remove this option some time in the future. Warn("warning: --gen-includes is deprecated (it is now default)\n"); } else if (arg == "--no-includes") { opts.include_dependence_headers = false; } else if (arg == "--gen-onefile") { opts.one_file = true; } else if (arg == "--raw-binary") { raw_binary = true; } else if (arg == "--size-prefixed") { opts.size_prefixed = true; } else if (arg == "--") { // Separator between text and binary inputs. binary_files_from = filenames.size(); } else if (arg == "--proto") { opts.proto_mode = true; } else if (arg == "--proto-namespace-suffix") { if (++argi >= argc) Error("missing namespace suffix" + arg, true); opts.proto_namespace_suffix = argv[argi]; } else if (arg == "--oneof-union") { opts.proto_oneof_union = true; } else if (arg == "--schema") { schema_binary = true; } else if (arg == "-M") { print_make_rules = true; } else if (arg == "--version") { printf("flatc version %s\n", FLATC_VERSION()); exit(0); } else if (arg == "--grpc") { grpc_enabled = true; } else if (arg == "--bfbs-comments") { opts.binary_schema_comments = true; } else if (arg == "--bfbs-builtins") { opts.binary_schema_builtins = true; } else if (arg == "--bfbs-gen-embed") { opts.binary_schema_gen_embed = true; } else if (arg == "--reflect-types") { opts.mini_reflect = IDLOptions::kTypes; } else if (arg == "--reflect-names") { opts.mini_reflect = IDLOptions::kTypesAndNames; } else if (arg == "--require-explicit-ids") { opts.require_explicit_ids = true; } else if (arg == "--root-type") { if (++argi >= argc) Error("missing type following: " + arg, true); opts.root_type = argv[argi]; } else if (arg == "--filename-suffix") { if (++argi >= argc) Error("missing filename suffix: " + arg, true); opts.filename_suffix = argv[argi]; } else if (arg == "--filename-ext") { if (++argi >= argc) Error("missing filename extension: " + arg, true); opts.filename_extension = argv[argi]; } else if (arg == "--force-defaults") { opts.force_defaults = true; } else if (arg == "--force-empty") { opts.set_empty_strings_to_null = false; opts.set_empty_vectors_to_null = false; } else if (arg == "--force-empty-vectors") { opts.set_empty_vectors_to_null = false; } else if (arg == "--java-primitive-has-method") { opts.java_primitive_has_method = true; } else if (arg == "--cs-gen-json-serializer") { opts.cs_gen_json_serializer = true; } else if (arg == "--flexbuffers") { opts.use_flexbuffers = true; } else if (arg == "--gen-jvmstatic") { opts.gen_jvmstatic = true; } else if (arg == "--no-warnings") { opts.no_warnings = true; } else if (arg == "--cpp-std") { if (++argi >= argc) Error("missing C++ standard specification" + arg, true); opts.cpp_std = argv[argi]; } else if (arg.rfind("--cpp-std=", 0) == 0) { opts.cpp_std = arg.substr(std::string("--cpp-std=").size()); } else if (arg == "--cpp-static-reflection") { opts.cpp_static_reflection = true; } else { for (size_t i = 0; i < params_.num_generators; ++i) { if (arg == params_.generators[i].generator_opt_long || (params_.generators[i].generator_opt_short && arg == params_.generators[i].generator_opt_short)) { generator_enabled[i] = true; any_generator = true; opts.lang_to_generate |= params_.generators[i].lang; goto found; } } Error("unknown commandline argument: " + arg, true); found:; } } else { filenames.push_back(flatbuffers::PosixPath(argv[argi])); } } if (!filenames.size()) Error("missing input files", false, true); if (opts.proto_mode) { if (any_generator) Error("cannot generate code directly from .proto files", true); } else if (!any_generator && conform_to_schema.empty()) { Error("no options: specify at least one generator.", true); } flatbuffers::Parser conform_parser; if (!conform_to_schema.empty()) { std::string contents; if (!flatbuffers::LoadFile(conform_to_schema.c_str(), true, &contents)) Error("unable to load schema: " + conform_to_schema); if (flatbuffers::GetExtension(conform_to_schema) == reflection::SchemaExtension()) { LoadBinarySchema(conform_parser, conform_to_schema, contents); } else { ParseFile(conform_parser, conform_to_schema, contents, conform_include_directories); } } std::unique_ptr<flatbuffers::Parser> parser(new flatbuffers::Parser(opts)); for (auto file_it = filenames.begin(); file_it != filenames.end(); ++file_it) { auto &filename = *file_it; std::string contents; if (!flatbuffers::LoadFile(filename.c_str(), true, &contents)) Error("unable to load file: " + filename); bool is_binary = static_cast<size_t>(file_it - filenames.begin()) >= binary_files_from; auto ext = flatbuffers::GetExtension(filename); auto is_schema = ext == "fbs" || ext == "proto"; auto is_binary_schema = ext == reflection::SchemaExtension(); if (is_binary) { parser->builder_.Clear(); parser->builder_.PushFlatBuffer( reinterpret_cast<const uint8_t *>(contents.c_str()), contents.length()); if (!raw_binary) { // Generally reading binaries that do not correspond to the schema // will crash, and sadly there's no way around that when the binary // does not contain a file identifier. // We'd expect that typically any binary used as a file would have // such an identifier, so by default we require them to match. if (!parser->file_identifier_.length()) { Error("current schema has no file_identifier: cannot test if \"" + filename + "\" matches the schema, use --raw-binary to read this file" " anyway."); } else if (!flatbuffers::BufferHasIdentifier( contents.c_str(), parser->file_identifier_.c_str(), opts.size_prefixed)) { Error("binary \"" + filename + "\" does not have expected file_identifier \"" + parser->file_identifier_ + "\", use --raw-binary to read this file anyway."); } } } else { // Check if file contains 0 bytes. if (!opts.use_flexbuffers && !is_binary_schema && contents.length() != strlen(contents.c_str())) { Error("input file appears to be binary: " + filename, true); } if (is_schema) { // If we're processing multiple schemas, make sure to start each // one from scratch. If it depends on previous schemas it must do // so explicitly using an include. parser.reset(new flatbuffers::Parser(opts)); } if (is_binary_schema) { LoadBinarySchema(*parser.get(), filename, contents); } if (opts.use_flexbuffers) { if (opts.lang_to_generate == IDLOptions::kJson) { parser->flex_root_ = flexbuffers::GetRoot( reinterpret_cast<const uint8_t *>(contents.c_str()), contents.size()); } else { parser->flex_builder_.Clear(); ParseFile(*parser.get(), filename, contents, include_directories); } } else { ParseFile(*parser.get(), filename, contents, include_directories); if (!is_schema && !parser->builder_.GetSize()) { // If a file doesn't end in .fbs, it must be json/binary. Ensure we // didn't just parse a schema with a different extension. Error("input file is neither json nor a .fbs (schema) file: " + filename, true); } } if ((is_schema || is_binary_schema) && !conform_to_schema.empty()) { auto err = parser->ConformTo(conform_parser); if (!err.empty()) Error("schemas don\'t conform: " + err); } if (schema_binary || opts.binary_schema_gen_embed) { parser->Serialize(); } if (schema_binary) { parser->file_extension_ = reflection::SchemaExtension(); } } std::string filebase = flatbuffers::StripPath(flatbuffers::StripExtension(filename)); for (size_t i = 0; i < params_.num_generators; ++i) { parser->opts.lang = params_.generators[i].lang; if (generator_enabled[i]) { if (!print_make_rules) { flatbuffers::EnsureDirExists(output_path); if ((!params_.generators[i].schema_only || (is_schema || is_binary_schema)) && !params_.generators[i].generate(*parser.get(), output_path, filebase)) { Error(std::string("Unable to generate ") + params_.generators[i].lang_name + " for " + filebase); } } else { if (params_.generators[i].make_rule == nullptr) { Error(std::string("Cannot generate make rule for ") + params_.generators[i].lang_name); } else { std::string make_rule = params_.generators[i].make_rule( *parser.get(), output_path, filename); if (!make_rule.empty()) printf("%s\n", flatbuffers::WordWrap(make_rule, 80, " ", " \\").c_str()); } } if (grpc_enabled) { if (params_.generators[i].generateGRPC != nullptr) { if (!params_.generators[i].generateGRPC(*parser.get(), output_path, filebase)) { Error(std::string("Unable to generate GRPC interface for") + params_.generators[i].lang_name); } } else { Warn(std::string("GRPC interface generator not implemented for ") + params_.generators[i].lang_name); } } } } if (!opts.root_type.empty()) { if (!parser->SetRootType(opts.root_type.c_str())) Error("unknown root type: " + opts.root_type); else if (parser->root_struct_def_->fixed) Error("root type must be a table"); } if (opts.proto_mode) GenerateFBS(*parser.get(), output_path, filebase); // We do not want to generate code for the definitions in this file // in any files coming up next. parser->MarkGenerated(); } return 0; } } // namespace flatbuffers
1
21,387
The `--cpp-field-case` looks like `--cpp-field-style` instead of `-case`. - 'unchanged' - leave unchanged (default); - 'upper_camel' -upper camel case; - 'lower_camel' - lower camel case.
google-flatbuffers
java
@@ -95,9 +95,6 @@ ot_admin_builtin_upgrade (int argc, char **argv, GCancellable *cancellable, GErr "override-commit", NULL); } - /* Should we consider requiring --discard-hotfix here? */ - origin_changed |= g_key_file_remove_key (origin, "origin", "unlocked", NULL); - if (origin_changed) { /* XXX GCancellable parameter is not used. */
1
/* -*- mode: C; c-file-style: "gnu"; indent-tabs-mode: nil; -*- * * Copyright (C) 2012 Colin Walters <[email protected]> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. * * Author: Colin Walters <[email protected]> */ #include "config.h" #include "ot-main.h" #include "ot-admin-builtins.h" #include "ot-admin-functions.h" #include "ostree.h" #include "otutil.h" #include <unistd.h> #include <stdlib.h> #include <glib/gi18n.h> static gboolean opt_reboot; static gboolean opt_allow_downgrade; static char *opt_osname; static char *opt_override_commit; static GOptionEntry options[] = { { "os", 0, 0, G_OPTION_ARG_STRING, &opt_osname, "Use a different operating system root than the current one", "OSNAME" }, { "reboot", 'r', 0, G_OPTION_ARG_NONE, &opt_reboot, "Reboot after a successful upgrade", NULL }, { "allow-downgrade", 0, 0, G_OPTION_ARG_NONE, &opt_allow_downgrade, "Permit deployment of chronologically older trees", NULL }, { "override-commit", 0, 0, G_OPTION_ARG_STRING, &opt_override_commit, "Deploy CHECKSUM instead of the latest tree", "CHECKSUM" }, { NULL } }; gboolean ot_admin_builtin_upgrade (int argc, char **argv, GCancellable *cancellable, GError **error) { gboolean ret = FALSE; GOptionContext *context; glnx_unref_object OstreeSysroot *sysroot = NULL; glnx_unref_object OstreeSysrootUpgrader *upgrader = NULL; g_autoptr(GFile) deployment_path = NULL; g_autoptr(GFile) deployment_origin_path = NULL; g_autoptr(GKeyFile) origin = NULL; glnx_unref_object OstreeAsyncProgress *progress = NULL; gboolean changed; OstreeSysrootUpgraderPullFlags upgraderpullflags = 0; context = g_option_context_new ("Construct new tree from current origin and deploy it, if it changed"); if (!ostree_admin_option_context_parse (context, options, &argc, &argv, OSTREE_ADMIN_BUILTIN_FLAG_SUPERUSER, &sysroot, cancellable, error)) goto out; if (!ostree_sysroot_load (sysroot, cancellable, error)) goto out; upgrader = ostree_sysroot_upgrader_new_for_os (sysroot, opt_osname, cancellable, error); if (!upgrader) goto out; origin = ostree_sysroot_upgrader_dup_origin (upgrader); if (origin != NULL) { gboolean origin_changed = FALSE; if (opt_override_commit != NULL) { /* Override the commit to pull and deploy. */ g_key_file_set_string (origin, "origin", "override-commit", opt_override_commit); origin_changed = TRUE; } else { /* Strip any override-commit from the origin file so * we always upgrade to the latest available commit. */ origin_changed = g_key_file_remove_key (origin, "origin", "override-commit", NULL); } /* Should we consider requiring --discard-hotfix here? */ origin_changed |= g_key_file_remove_key (origin, "origin", "unlocked", NULL); if (origin_changed) { /* XXX GCancellable parameter is not used. */ if (!ostree_sysroot_upgrader_set_origin (upgrader, origin, NULL, error)) goto out; } } { g_auto(GLnxConsoleRef) console = { 0, }; glnx_console_lock (&console); if (console.is_tty) progress = ostree_async_progress_new_and_connect (ostree_repo_pull_default_console_progress_changed, &console); if (opt_allow_downgrade) upgraderpullflags |= OSTREE_SYSROOT_UPGRADER_PULL_FLAGS_ALLOW_OLDER; if (!ostree_sysroot_upgrader_pull (upgrader, 0, upgraderpullflags, progress, &changed, cancellable, error)) goto out; if (progress) ostree_async_progress_finish (progress); } if (!changed) { g_print ("No update available.\n"); } else { if (!ostree_sysroot_upgrader_deploy (upgrader, cancellable, error)) goto out; if (opt_reboot) { if (!ot_admin_execve_reboot (sysroot, error)) goto out; } } ret = TRUE; out: if (context) g_option_context_free (context); return ret; }
1
7,836
I haven't thought about this a lot yet, but if we were to try this change, we'd still need to keep the code to delete it from the `.origin` file for backcompat.
ostreedev-ostree
c
@@ -18,6 +18,9 @@ package org.hyperledger.besu.evmtool; import org.hyperledger.besu.config.GenesisConfigOptions; import org.hyperledger.besu.consensus.clique.CliqueProtocolSchedule; import org.hyperledger.besu.consensus.ibft.IbftBlockHeaderFunctions; +import org.hyperledger.besu.crypto.KeyPairSecurityModule; +import org.hyperledger.besu.crypto.NodeKey; +import org.hyperledger.besu.crypto.SECP256K1; import org.hyperledger.besu.ethereum.core.BlockHeaderFunctions; import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule;
1
/* * Copyright 2018 ConsenSys AG. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * SPDX-License-Identifier: Apache-2.0 * */ package org.hyperledger.besu.evmtool; import org.hyperledger.besu.config.GenesisConfigOptions; import org.hyperledger.besu.consensus.clique.CliqueProtocolSchedule; import org.hyperledger.besu.consensus.ibft.IbftBlockHeaderFunctions; import org.hyperledger.besu.ethereum.core.BlockHeaderFunctions; import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule; import javax.inject.Named; class CliqueGenesisFileModule extends GenesisFileModule { CliqueGenesisFileModule(final String genesisConfig) { super(genesisConfig); } @Override ProtocolSchedule<?> provideProtocolSchedule( final GenesisConfigOptions configOptions, @Named("RevertReasonEnabled") final boolean revertReasonEnabled) { // dagger can handle this magic one day return CliqueProtocolSchedule.create(configOptions, null, revertReasonEnabled); } @Override BlockHeaderFunctions blockHashFunction() { return IbftBlockHeaderFunctions.forOnChainBlock(); } }
1
22,462
In my opinion I think it is possible to add `private final` here
hyperledger-besu
java
@@ -265,7 +265,7 @@ func (p *Builder) writeProgramHeader() { p.b.LoadMapFD(R1, uint32(p.stateMapFD)) // R1 = 0 (64-bit immediate) p.b.Call(HelperMapLookupElem) // Call helper // Check return value for NULL. - p.b.JumpEqImm64(R0, 0, "deny") + p.b.JumpEqImm64(R0, 0, "exit") // Save state pointer in R9. p.b.Mov64(R9, R0) p.b.LabelNextInsn("policy")
1
// Copyright (c) 2020-2021 Tigera, Inc. All rights reserved. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package polprog import ( "fmt" "math" "math/bits" "strings" "github.com/projectcalico/felix/bpf/ipsets" "github.com/projectcalico/felix/bpf" log "github.com/sirupsen/logrus" . "github.com/projectcalico/felix/bpf/asm" "github.com/projectcalico/felix/bpf/state" "github.com/projectcalico/felix/ip" "github.com/projectcalico/felix/proto" "github.com/projectcalico/felix/rules" ) type Builder struct { b *Block tierID int policyID int ruleID int rulePartID int ipSetIDProvider ipSetIDProvider ipSetMapFD bpf.MapFD stateMapFD bpf.MapFD jumpMapFD bpf.MapFD } type ipSetIDProvider interface { GetNoAlloc(ipSetID string) uint64 } func NewBuilder(ipSetIDProvider ipSetIDProvider, ipsetMapFD, stateMapFD, jumpMapFD bpf.MapFD) *Builder { b := &Builder{ ipSetIDProvider: ipSetIDProvider, ipSetMapFD: ipsetMapFD, stateMapFD: stateMapFD, jumpMapFD: jumpMapFD, } return b } var offset int = 0 func nextOffset(size int, align int) int16 { offset -= size remainder := offset % align if remainder != 0 { // For negative numbers, the remainder is negative (e.g. -9 % 8 == -1) offset = offset - remainder - align } return int16(offset) } const ( // In Enterprise, there's an extra offset. stateEventHdrSize int16 = 0 ) var ( // Stack offsets. These are defined locally. offStateKey = nextOffset(4, 4) offSrcIPSetKey = nextOffset(ipsets.IPSetEntrySize, 8) offDstIPSetKey = nextOffset(ipsets.IPSetEntrySize, 8) // Offsets within the cal_tc_state struct. // WARNING: must be kept in sync with the definitions in bpf/include/jump.h. stateOffIPSrc int16 = stateEventHdrSize + 0 stateOffIPDst int16 = stateEventHdrSize + 4 _ = stateOffIPDst stateOffPreNATIPDst int16 = stateEventHdrSize + 8 _ = stateOffPreNATIPDst stateOffPostNATIPDst int16 = stateEventHdrSize + 12 stateOffPolResult int16 = stateEventHdrSize + 20 stateOffSrcPort int16 = stateEventHdrSize + 24 stateOffDstPort int16 = stateEventHdrSize + 26 stateOffICMPType = stateOffDstPort stateOffPreNATDstPort int16 = stateEventHdrSize + 28 _ = stateOffPreNATDstPort stateOffPostNATDstPort int16 = stateEventHdrSize + 30 stateOffIPProto int16 = stateEventHdrSize + 32 stateOffFlags int16 = stateEventHdrSize + 33 // Compile-time check that IPSetEntrySize hasn't changed; if it changes, the code will need to change. _ = [1]struct{}{{}}[20-ipsets.IPSetEntrySize] // Offsets within struct ip4_set_key. // WARNING: must be kept in sync with the definitions in bpf/ipsets/map.go. // WARNING: must be kept in sync with the definitions in bpf/include/policy.h. ipsKeyPrefix int16 = 0 ipsKeyID int16 = 4 ipsKeyAddr int16 = 12 ipsKeyPort int16 = 16 ipsKeyProto int16 = 18 ipsKeyPad int16 = 19 // Bits in the state flags field. FlagDestIsHost uint8 = 1 << 2 FlagSrcIsHost uint8 = 1 << 3 ) type Rule struct { *proto.Rule } type Policy struct { Name string Rules []Rule } type Tier struct { Name string EndAction TierEndAction Policies []Policy } type Rules struct { // Both workload and host interfaces can enforce host endpoint policy (carried here in the // Host... fields); in the case of a workload interface, that can only come from the // wildcard host endpoint, aka "host-*". // // However, only a workload interface can have any workload policy (carried here in the // Tiers and Profiles fields), and workload interfaces also Deny by default when there is no // workload policy at all. ForHostInterface (with reversed polarity) is the boolean that // tells us whether or not to implement workload policy and that default Deny. ForHostInterface bool // Indicates to suppress normal host policy because it's trumped by the setting of // DefaultEndpointToHostAction. SuppressNormalHostPolicy bool // Workload policy. Tiers []Tier Profiles []Profile // Host endpoint policy. HostPreDnatTiers []Tier HostForwardTiers []Tier HostNormalTiers []Tier HostProfiles []Profile // True when building a policy program for XDP, as opposed to for TC. This also means that // we are implementing untracked policy (provided in the HostNormalTiers field) and that // traffic is allowed to continue if not explicitly allowed or denied. ForXDP bool } type Profile = Policy type TierEndAction string const ( TierEndUndef TierEndAction = "" TierEndDeny TierEndAction = "deny" TierEndPass TierEndAction = "pass" ) func (p *Builder) Instructions(rules Rules) (Insns, error) { p.b = NewBlock() p.writeProgramHeader() if rules.ForXDP { // For an XDP program HostNormalTiers continues the untracked policy to enforce; // other fields are unused. goto normalPolicy } // Pre-DNAT policy: on a host interface, or host-* policy on a workload interface. Traffic // is allowed to continue if there is no applicable pre-DNAT policy. p.writeTiers(rules.HostPreDnatTiers, legDestPreNAT, "allowed_by_host_policy") // If traffic is to or from the local host, skip over any apply-on-forward policy. Note // that this case can be: // - on a workload interface, workload <--> own host // - on a host interface, this host (not a workload) <--> anywhere outside this host // // When rules.SuppressNormalHostPolicy is true, we also skip normal host policy; this is // the case when we're building the policy program for workload -> host and // DefaultEndpointToHostAction is ACCEPT or DROP; or for host -> workload. if rules.SuppressNormalHostPolicy { p.writeJumpIfToOrFromHost("allowed_by_host_policy") } else { p.writeJumpIfToOrFromHost("to_or_from_host") } // At this point we know we have traffic that is being forwarded through the host's root // network namespace. Note that this case can be: // - workload interface, workload <--> another local workload // - workload interface, workload <--> anywhere outside this host // - host interface, workload <--> anywhere outside this host // - host interface, anywhere outside this host <--> anywhere outside this host // Apply-On-Forward policy: on a host interface, or host-* policy on a workload interface. // Traffic is allowed to continue if there is no applicable AoF policy. p.writeTiers(rules.HostForwardTiers, legDest, "allowed_by_host_policy") // Now skip over normal host policy and jump to where we apply possible workload policy. p.b.Jump("allowed_by_host_policy") normalPolicy: if !rules.SuppressNormalHostPolicy { // "Normal" host policy, i.e. for non-forwarded traffic. p.b.LabelNextInsn("to_or_from_host") p.writeTiers(rules.HostNormalTiers, legDest, "allowed_by_host_policy") if rules.ForXDP { p.writeTiers(rules.HostNormalTiers, legDestPreNAT, "allowed_by_host_policy") p.b.Jump("xdp_pass") } else { p.writeTiers(rules.HostNormalTiers, legDest, "allowed_by_host_policy") p.writeProfiles(rules.HostProfiles, "allowed_by_host_policy") } } // End of host policy. p.b.LabelNextInsn("allowed_by_host_policy") if rules.ForHostInterface { // On a host interface there is no workload policy, so we are now done. p.b.Jump("allow") } else { // Workload policy. p.writeTiers(rules.Tiers, legDest, "allow") p.writeProfiles(rules.Profiles, "allow") } p.writeProgramFooter(rules.ForXDP) return p.b.Assemble() } // writeProgramHeader emits instructions to load the state from the state map, leaving // R6 = program context // R9 = pointer to state map func (p *Builder) writeProgramHeader() { // Preamble to the policy program. p.b.LabelNextInsn("start") p.b.Mov64(R6, R1) // Save R1 (context) in R6. // Zero-out the map key p.b.MovImm64(R1, 0) // R1 = 0 p.b.StoreStack32(R1, offStateKey) // Get pointer to map key in R2. p.b.Mov64(R2, R10) // R2 = R10 p.b.AddImm64(R2, int32(offStateKey)) // Load map file descriptor into R1. // clang uses a 64-bit load so copy that for now. p.b.LoadMapFD(R1, uint32(p.stateMapFD)) // R1 = 0 (64-bit immediate) p.b.Call(HelperMapLookupElem) // Call helper // Check return value for NULL. p.b.JumpEqImm64(R0, 0, "deny") // Save state pointer in R9. p.b.Mov64(R9, R0) p.b.LabelNextInsn("policy") } const ( jumpIdxPolicy = iota jumpIdxAllowed jumpIdxICMP _ = jumpIdxPolicy _ = jumpIdxICMP ) func (p *Builder) writeJumpIfToOrFromHost(label string) { // Load state flags. p.b.Load8(R1, R9, stateOffFlags) // Mask against host bits. p.b.AndImm32(R1, int32(FlagDestIsHost|FlagSrcIsHost)) // If non-zero, jump to specified label. p.b.JumpNEImm64(R1, 0, label) } // writeProgramFooter emits the program exit jump targets. func (p *Builder) writeProgramFooter(forXDP bool) { // Fall through here if there's no match. Also used when we hit an error or if policy rejects packet. p.b.LabelNextInsn("deny") if forXDP { p.b.MovImm64(R0, 1 /* XDP_DROP */) } else { p.b.MovImm64(R0, 2 /* TC_ACT_SHOT */) } p.b.Exit() if forXDP { p.b.LabelNextInsn("xdp_pass") p.b.MovImm64(R0, 2 /* XDP_PASS */) p.b.Exit() } if p.b.TargetIsUsed("allow") { p.b.LabelNextInsn("allow") // Store the policy result in the state for the next program to see. p.b.MovImm32(R1, int32(state.PolicyAllow)) p.b.Store32(R9, R1, stateOffPolResult) // Execute the tail call. p.b.Mov64(R1, R6) // First arg is the context. p.b.LoadMapFD(R2, uint32(p.jumpMapFD)) // Second arg is the map. p.b.MovImm32(R3, jumpIdxAllowed) // Third arg is the index (rather than a pointer to the index). p.b.Call(HelperTailCall) // Fall through if tail call fails. p.b.MovImm32(R1, state.PolicyTailCallFailed) p.b.Store32(R9, R1, stateOffPolResult) p.b.MovImm64(R0, 2 /* TC_ACT_SHOT */) p.b.Exit() } } func (p *Builder) setUpIPSetKey(ipsetID uint64, keyOffset, ipOffset, portOffset int16) { // TODO track whether we've already done an initialisation and skip the parts that don't change. // Zero the padding. p.b.MovImm64(R1, 0) // R1 = 0 p.b.StoreStack8(R1, keyOffset+ipsKeyPad) p.b.MovImm64(R1, 128) // R1 = 128 p.b.StoreStack32(R1, keyOffset+ipsKeyPrefix) // Store the IP address, port and protocol. p.b.Load32(R1, R9, ipOffset) p.b.StoreStack32(R1, keyOffset+ipsKeyAddr) p.b.Load16(R1, R9, portOffset) p.b.StoreStack16(R1, keyOffset+ipsKeyPort) p.b.Load8(R1, R9, stateOffIPProto) p.b.StoreStack8(R1, keyOffset+ipsKeyProto) // Store the IP set ID. It is 64-bit but, since it's a packed struct, we have to write it in two // 32-bit chunks. beIPSetID := bits.ReverseBytes64(ipsetID) p.b.MovImm32(R1, int32(beIPSetID)) p.b.StoreStack32(R1, keyOffset+ipsKeyID) p.b.MovImm32(R1, int32(beIPSetID>>32)) p.b.StoreStack32(R1, keyOffset+ipsKeyID+4) } func (p *Builder) writeTiers(tiers []Tier, destLeg matchLeg, allowLabel string) { actionLabels := map[string]string{ "allow": allowLabel, "deny": "deny", } for _, tier := range tiers { endOfTierLabel := fmt.Sprint("end_of_tier_", p.tierID) actionLabels["pass"] = endOfTierLabel actionLabels["next-tier"] = endOfTierLabel log.Debugf("Start of tier %d %q", p.tierID, tier.Name) for _, pol := range tier.Policies { p.writePolicy(pol, actionLabels, destLeg) } // End of tier rule. action := tier.EndAction if action == TierEndUndef { action = TierEndDeny } log.Debugf("End of tier %d %q: %s", p.tierID, tier.Name, action) p.writeRule(Rule{ Rule: &proto.Rule{}, }, actionLabels[string(action)], destLeg) p.b.LabelNextInsn(endOfTierLabel) p.tierID++ } } func (p *Builder) writeProfiles(profiles []Policy, allowLabel string) { log.Debugf("Start of profiles") for idx, prof := range profiles { p.writeProfile(prof, idx, allowLabel) } log.Debugf("End of profiles drop") p.writeRule(Rule{ Rule: &proto.Rule{}, }, "deny", legDest) } func (p *Builder) writePolicyRules(policy Policy, actionLabels map[string]string, destLeg matchLeg) { for ruleIdx, rule := range policy.Rules { log.Debugf("Start of rule %d", ruleIdx) action := strings.ToLower(rule.Action) if action == "log" { log.Debug("Skipping log rule. Not supported in BPF mode.") continue } p.writeRule(rule, actionLabels[action], destLeg) log.Debugf("End of rule %d", ruleIdx) } } func (p *Builder) writePolicy(policy Policy, actionLabels map[string]string, destLeg matchLeg) { log.Debugf("Start of policy %q %d", policy.Name, p.policyID) p.writePolicyRules(policy, actionLabels, destLeg) log.Debugf("End of policy %q %d", policy.Name, p.policyID) p.policyID++ } func (p *Builder) writeProfile(profile Profile, idx int, allowLabel string) { actionLabels := map[string]string{ "allow": allowLabel, "deny": "deny", "pass": "deny", "next-tier": "deny", } log.Debugf("Start of profile %q %d", profile.Name, idx) p.writePolicyRules(profile, actionLabels, legDest) log.Debugf("End of profile %q %d", profile.Name, idx) p.policyID++ } type matchLeg string const ( legSource matchLeg = "source" legDest matchLeg = "dest" legDestPreNAT matchLeg = "destPreNAT" ) func (leg matchLeg) offsetToStateIPAddressField() (offset int16) { if leg == legSource { offset = stateOffIPSrc } else if leg == legDestPreNAT { offset = stateOffPreNATIPDst } else { offset = stateOffPostNATIPDst } return } func (leg matchLeg) offsetToStatePortField() (portOffset int16) { if leg == legSource { portOffset = stateOffSrcPort } else if leg == legDestPreNAT { portOffset = stateOffPreNATDstPort } else { portOffset = stateOffPostNATDstPort } return } func (leg matchLeg) stackOffsetToIPSetKey() (keyOffset int16) { if leg == legSource { keyOffset = offSrcIPSetKey } else { keyOffset = offDstIPSetKey } return } func (p *Builder) writeRule(r Rule, actionLabel string, destLeg matchLeg) { if actionLabel == "" { log.Panic("empty action label") } rule := rules.FilterRuleToIPVersion(4, r.Rule) if rule == nil { log.Debugf("Version mismatch, skipping rule") return } p.writeStartOfRule() if rule.Protocol != nil { log.WithField("proto", rule.Protocol).Debugf("Protocol match") p.writeProtoMatch(false, rule.Protocol) } if rule.NotProtocol != nil { log.WithField("proto", rule.NotProtocol).Debugf("NotProtocol match") p.writeProtoMatch(true, rule.NotProtocol) } if len(rule.SrcNet) != 0 { log.WithField("cidrs", rule.SrcNet).Debugf("SrcNet match") p.writeCIDRSMatch(false, legSource, rule.SrcNet) } if len(rule.NotSrcNet) != 0 { log.WithField("cidrs", rule.NotSrcNet).Debugf("NotSrcNet match") p.writeCIDRSMatch(true, legSource, rule.NotSrcNet) } if len(rule.DstNet) != 0 { log.WithField("cidrs", rule.DstNet).Debugf("DstNet match") p.writeCIDRSMatch(false, destLeg, rule.DstNet) } if len(rule.NotDstNet) != 0 { log.WithField("cidrs", rule.NotDstNet).Debugf("NotDstNet match") p.writeCIDRSMatch(true, destLeg, rule.NotDstNet) } if len(rule.SrcIpSetIds) > 0 { log.WithField("ipSetIDs", rule.SrcIpSetIds).Debugf("SrcIpSetIds match") p.writeIPSetMatch(false, legSource, rule.SrcIpSetIds) } if len(rule.NotSrcIpSetIds) > 0 { log.WithField("ipSetIDs", rule.NotSrcIpSetIds).Debugf("NotSrcIpSetIds match") p.writeIPSetMatch(true, legSource, rule.NotSrcIpSetIds) } if len(rule.DstIpSetIds) > 1 { // We should only ever have one set here because they get combined in the calc graph. Enterprise // depends on that so we assert here too. log.WithField("rule", rule).Panic("proto.Rule has more than one DstIpSetIds") } if len(rule.DstIpSetIds) > 0 { // writeIPSetOrMatch used here because Enterprise has >1 IP set that need to be ORed together. log.WithField("ipSetIDs", rule.DstIpSetIds).Debugf("DstIpSetIds match") p.writeIPSetOrMatch(destLeg, rule.DstIpSetIds) } if len(rule.NotDstIpSetIds) > 0 { log.WithField("ipSetIDs", rule.NotDstIpSetIds).Debugf("NotDstIpSetIds match") p.writeIPSetMatch(true, destLeg, rule.NotDstIpSetIds) } if len(rule.DstIpPortSetIds) > 0 { log.WithField("ipPortSetIDs", rule.DstIpPortSetIds).Debugf("DstIpPortSetIds match") p.writeIPSetMatch(false, destLeg, rule.DstIpPortSetIds) } if len(rule.SrcPorts) > 0 || len(rule.SrcNamedPortIpSetIds) > 0 { log.WithField("ports", rule.SrcPorts).Debugf("SrcPorts match") p.writePortsMatch(false, legSource, rule.SrcPorts, rule.SrcNamedPortIpSetIds) } if len(rule.NotSrcPorts) > 0 || len(rule.NotSrcNamedPortIpSetIds) > 0 { log.WithField("ports", rule.NotSrcPorts).Debugf("NotSrcPorts match") p.writePortsMatch(true, legSource, rule.NotSrcPorts, rule.NotSrcNamedPortIpSetIds) } if len(rule.DstPorts) > 0 || len(rule.DstNamedPortIpSetIds) > 0 { log.WithField("ports", rule.DstPorts).Debugf("DstPorts match") p.writePortsMatch(false, destLeg, rule.DstPorts, rule.DstNamedPortIpSetIds) } if len(rule.NotDstPorts) > 0 || len(rule.NotDstNamedPortIpSetIds) > 0 { log.WithField("ports", rule.NotDstPorts).Debugf("NotDstPorts match") p.writePortsMatch(true, destLeg, rule.NotDstPorts, rule.NotDstNamedPortIpSetIds) } if rule.Icmp != nil { log.WithField("icmpv4", rule.Icmp).Debugf("ICMP type/code match") switch icmp := rule.Icmp.(type) { case *proto.Rule_IcmpTypeCode: p.writeICMPTypeCodeMatch(false, uint8(icmp.IcmpTypeCode.Type), uint8(icmp.IcmpTypeCode.Code)) case *proto.Rule_IcmpType: p.writeICMPTypeMatch(false, uint8(icmp.IcmpType)) } } if rule.NotIcmp != nil { log.WithField("icmpv4", rule.Icmp).Debugf("Not ICMP type/code match") switch icmp := rule.NotIcmp.(type) { case *proto.Rule_NotIcmpTypeCode: p.writeICMPTypeCodeMatch(true, uint8(icmp.NotIcmpTypeCode.Type), uint8(icmp.NotIcmpTypeCode.Code)) case *proto.Rule_NotIcmpType: p.writeICMPTypeMatch(true, uint8(icmp.NotIcmpType)) } } p.writeEndOfRule(r, actionLabel) p.ruleID++ p.rulePartID = 0 } func (p *Builder) writeStartOfRule() { } func (p *Builder) writeEndOfRule(rule Rule, actionLabel string) { // If all the match criteria are met, we fall through to the end of the rule // so all that's left to do is to jump to the relevant action. // TODO log and log-and-xxx actions p.b.Jump(actionLabel) p.b.LabelNextInsn(p.endOfRuleLabel()) } func (p *Builder) writeProtoMatch(negate bool, protocol *proto.Protocol) { p.b.Load8(R1, R9, stateOffIPProto) protoNum := protocolToNumber(protocol) if negate { p.b.JumpEqImm64(R1, int32(protoNum), p.endOfRuleLabel()) } else { p.b.JumpNEImm64(R1, int32(protoNum), p.endOfRuleLabel()) } } func (p *Builder) writeICMPTypeMatch(negate bool, icmpType uint8) { p.b.Load8(R1, R9, stateOffICMPType) if negate { p.b.JumpEqImm64(R1, int32(icmpType), p.endOfRuleLabel()) } else { p.b.JumpNEImm64(R1, int32(icmpType), p.endOfRuleLabel()) } } func (p *Builder) writeICMPTypeCodeMatch(negate bool, icmpType, icmpCode uint8) { p.b.Load16(R1, R9, stateOffICMPType) if negate { p.b.JumpEqImm64(R1, (int32(icmpCode)<<8)|int32(icmpType), p.endOfRuleLabel()) } else { p.b.JumpNEImm64(R1, (int32(icmpCode)<<8)|int32(icmpType), p.endOfRuleLabel()) } } func (p *Builder) writeCIDRSMatch(negate bool, leg matchLeg, cidrs []string) { p.b.Load32(R1, R9, leg.offsetToStateIPAddressField()) var onMatchLabel string if negate { // Match negated, if we match any CIDR then we jump to the next rule. onMatchLabel = p.endOfRuleLabel() } else { // Match is non-negated, if we match, got to the next match criteria. onMatchLabel = p.freshPerRuleLabel() } for _, cidrStr := range cidrs { cidr := ip.MustParseCIDROrIP(cidrStr) addrU32 := bits.ReverseBytes32(cidr.Addr().(ip.V4Addr).AsUint32()) // TODO IPv6 maskU32 := bits.ReverseBytes32(math.MaxUint32 << (32 - cidr.Prefix()) & math.MaxUint32) p.b.MovImm32(R2, int32(maskU32)) p.b.And32(R2, R1) p.b.JumpEqImm32(R2, int32(addrU32), onMatchLabel) } if !negate { // If we fall through then none of the CIDRs matched so the rule doesn't match. p.b.Jump(p.endOfRuleLabel()) // Label the next match so we can skip to it on success. p.b.LabelNextInsn(onMatchLabel) } } func (p *Builder) writeIPSetMatch(negate bool, leg matchLeg, ipSets []string) { // IP sets are different to CIDRs, if we have multiple IP sets then they all have to match // so we treat them as independent match criteria. for _, ipSetID := range ipSets { id := p.ipSetIDProvider.GetNoAlloc(ipSetID) if id == 0 { log.WithField("setID", ipSetID).Panic("Failed to look up IP set ID.") } keyOffset := leg.stackOffsetToIPSetKey() p.setUpIPSetKey(id, keyOffset, leg.offsetToStateIPAddressField(), leg.offsetToStatePortField()) p.b.LoadMapFD(R1, uint32(p.ipSetMapFD)) p.b.Mov64(R2, R10) p.b.AddImm64(R2, int32(keyOffset)) p.b.Call(HelperMapLookupElem) if negate { // Negated; if we got a hit (non-0) then the rule doesn't match. // (Otherwise we fall through to the next match criteria.) p.b.JumpNEImm64(R0, 0, p.endOfRuleLabel()) } else { // Non-negated; if we got a miss (0) then the rule can't match. // (Otherwise we fall through to the next match criteria.) p.b.JumpEqImm64(R0, 0, p.endOfRuleLabel()) } } } // Match if packet matches ANY of the given IP sets. func (p *Builder) writeIPSetOrMatch(leg matchLeg, ipSets []string) { onMatchLabel := p.freshPerRuleLabel() for _, ipSetID := range ipSets { id := p.ipSetIDProvider.GetNoAlloc(ipSetID) if id == 0 { log.WithField("setID", ipSetID).Panic("Failed to look up IP set ID.") } keyOffset := leg.stackOffsetToIPSetKey() p.setUpIPSetKey(id, keyOffset, leg.offsetToStateIPAddressField(), leg.offsetToStatePortField()) p.b.LoadMapFD(R1, uint32(p.ipSetMapFD)) p.b.Mov64(R2, R10) p.b.AddImm64(R2, int32(keyOffset)) p.b.Call(HelperMapLookupElem) // If we got a hit (non-0) then packet matches one of the IP sets. // (Otherwise we fall through to try the next IP set.) p.b.JumpNEImm64(R0, 0, onMatchLabel) } // If packet reaches here, it hasn't matched any of the IP sets. p.b.Jump(p.endOfRuleLabel()) // Label the next match so we can skip to it on success. p.b.LabelNextInsn(onMatchLabel) } func (p *Builder) writePortsMatch(negate bool, leg matchLeg, ports []*proto.PortRange, namedPorts []string) { // For a ports match, numeric ports and named ports are ORed together. Check any // numeric ports first and then any named ports. var onMatchLabel string if negate { // Match negated, if we match any port then we jump to the next rule. onMatchLabel = p.endOfRuleLabel() } else { // Match is non-negated, if we match, go to the next match criteria. onMatchLabel = p.freshPerRuleLabel() } // R1 = port to test against. p.b.Load16(R1, R9, leg.offsetToStatePortField()) for _, portRange := range ports { if portRange.First == portRange.Last { // Optimisation, single port, just do a comparison. p.b.JumpEqImm64(R1, portRange.First, onMatchLabel) } else { // Port range, var skipToNextPortLabel string if portRange.First > 0 { // If port is too low, skip to next port. skipToNextPortLabel = p.freshPerRuleLabel() p.b.JumpLTImm64(R1, portRange.First, skipToNextPortLabel) } // If port is in range, got a match, otherwise fall through to next port. p.b.JumpLEImm64(R1, portRange.Last, onMatchLabel) if portRange.First > 0 { p.b.LabelNextInsn(skipToNextPortLabel) } } } for _, ipSetID := range namedPorts { id := p.ipSetIDProvider.GetNoAlloc(ipSetID) if id == 0 { log.WithField("setID", ipSetID).Panic("Failed to look up IP set ID.") } keyOffset := leg.stackOffsetToIPSetKey() p.setUpIPSetKey(id, keyOffset, leg.offsetToStateIPAddressField(), leg.offsetToStatePortField()) p.b.LoadMapFD(R1, uint32(p.ipSetMapFD)) p.b.Mov64(R2, R10) p.b.AddImm64(R2, int32(keyOffset)) p.b.Call(HelperMapLookupElem) p.b.JumpNEImm64(R0, 0, onMatchLabel) } if !negate { // If we fall through then none of the ports matched so the rule doesn't match. p.b.Jump(p.endOfRuleLabel()) // Label the next match so we can skip to it on success. p.b.LabelNextInsn(onMatchLabel) } } func (p *Builder) freshPerRuleLabel() string { part := p.rulePartID p.rulePartID++ return fmt.Sprintf("rule_%d_part_%d", p.ruleID, part) } func (p *Builder) endOfRuleLabel() string { return fmt.Sprintf("rule_%d_no_match", p.ruleID) } func protocolToNumber(protocol *proto.Protocol) uint8 { var pcol uint8 switch p := protocol.NumberOrName.(type) { case *proto.Protocol_Name: switch strings.ToLower(p.Name) { case "tcp": pcol = 6 case "udp": pcol = 17 case "icmp": pcol = 1 case "sctp": pcol = 132 } case *proto.Protocol_Number: pcol = uint8(p.Number) } return pcol }
1
19,373
Feel like "exit" doesn't convey that the packet will be dropped. "drop-and-exit" or "error-exit" maybe?
projectcalico-felix
go
@@ -1330,7 +1330,7 @@ static bool check_main_create(pass_opt_t* opt, ast_t* ast) if(ast_childcount(params) != 1) { ast_error(opt->check.errors, params, - "the create constructor of a Main actor must take a single Env " + "A Main actor must have a create constructor which takes a single Env " "parameter"); ok = false; }
1
#include "reference.h" #include "literal.h" #include "postfix.h" #include "call.h" #include "../pass/expr.h" #include "../pass/names.h" #include "../pass/flatten.h" #include "../type/subtype.h" #include "../type/assemble.h" #include "../type/alias.h" #include "../type/viewpoint.h" #include "../type/cap.h" #include "../type/reify.h" #include "../type/lookup.h" #include "../ast/astbuild.h" #include "../ast/id.h" #include "../../libponyrt/mem/pool.h" #include <string.h> #include <assert.h> /** * Make sure the definition of something occurs before its use. This is for * both fields and local variable. */ bool def_before_use(pass_opt_t* opt, ast_t* def, ast_t* use, const char* name) { if((ast_line(def) > ast_line(use)) || ((ast_line(def) == ast_line(use)) && (ast_pos(def) > ast_pos(use)))) { ast_error(opt->check.errors, use, "declaration of '%s' appears after use", name); ast_error_continue(opt->check.errors, def, "declaration of '%s' appears here", name); return false; } return true; } static bool is_assigned_to(ast_t* ast, bool check_result_needed) { while(true) { ast_t* parent = ast_parent(ast); switch(ast_id(parent)) { case TK_ASSIGN: { // Has to be the left hand side of an assignment. Left and right sides // are swapped, so we must be the second child. if(ast_childidx(parent, 1) != ast) return false; if(!check_result_needed) return true; // The result of that assignment can't be used. return !is_result_needed(parent); } case TK_SEQ: { // Might be in a tuple on the left hand side. if(ast_childcount(parent) > 1) return false; break; } case TK_TUPLE: break; default: return false; } ast = parent; } } static bool is_constructed_from(pass_opt_t* opt, ast_t* ast, ast_t* type) { ast_t* parent = ast_parent(ast); if(ast_id(parent) != TK_DOT) return false; AST_GET_CHILDREN(parent, left, right); ast_t* find = lookup_try(opt, parent, type, ast_name(right)); if(find == NULL) return false; bool ok = ast_id(find) == TK_NEW; ast_free_unattached(find); return ok; } static bool valid_reference(pass_opt_t* opt, ast_t* ast, ast_t* type, sym_status_t status) { if(is_constructed_from(opt, ast, type)) return true; switch(status) { case SYM_DEFINED: return true; case SYM_CONSUMED: if(is_assigned_to(ast, true)) return true; ast_error(opt->check.errors, ast, "can't use a consumed local in an expression"); return false; case SYM_UNDEFINED: if(is_assigned_to(ast, true)) return true; ast_error(opt->check.errors, ast, "can't use an undefined variable in an expression"); return false; default: {} } assert(0); return false; } static bool check_provides(pass_opt_t* opt, ast_t* type, ast_t* provides, errorframe_t* errorf) { bool ok = true; switch(ast_id(provides)) { case TK_NONE: return true; case TK_PROVIDES: case TK_ISECTTYPE: { for(ast_t* child = ast_child(provides); child != NULL; child = ast_sibling(child)) { ok = check_provides(opt, type, child, errorf) && ok; } return ok; } case TK_NOMINAL: return is_sub_provides(type, provides, errorf, opt); default: {} } assert(0); return false; } bool expr_provides(pass_opt_t* opt, ast_t* ast) { // Check that the type actually provides everything it declares. // Since the traits pass has completed, all method imports have already // happened. At this point, we need to check that the type is a structural // subtype of all traits and interfaces it declares as provided. AST_GET_CHILDREN(ast, id, typeparams, cap, provides); ast_t* type = type_for_this(opt, ast, TK_REF, TK_NONE, true); errorframe_t err = NULL; if(!check_provides(opt, type, provides, &err)) { errorframe_t err2 = NULL; ast_error_frame(&err2, ast, "type does not implement its provides list"); errorframe_append(&err2, &err); errorframe_report(&err2, opt->check.errors); return false; } return true; } bool expr_param(pass_opt_t* opt, ast_t* ast) { AST_GET_CHILDREN(ast, id, type, init); ast_settype(ast, type); bool ok = true; if(ast_id(init) != TK_NONE) { // Initialiser type must match declared type. if(!coerce_literals(&init, type, opt)) return false; ast_t* init_type = ast_type(init); if(is_typecheck_error(init_type)) return false; init_type = alias(init_type); errorframe_t err = NULL; if(!is_subtype(init_type, type, &err, opt)) { errorframe_t err2 = NULL; ast_error_frame(&err2, init, "default argument is not a subtype of the parameter type"); errorframe_append(&err2, &err); errorframe_report(&err2, opt->check.errors); ok = false; } ast_free_unattached(init_type); } return ok; } bool expr_field(pass_opt_t* opt, ast_t* ast) { AST_GET_CHILDREN(ast, id, type, init, delegates); bool ok = true; for(ast_t* del = ast_child(delegates); del != NULL; del = ast_sibling(del)) { errorframe_t err = NULL; if(!is_subtype(type, del, &err, opt)) { errorframe_t err2 = NULL; ast_error_frame(&err2, ast, "field not a subtype of delegate"); errorframe_append(&err2, &err); errorframe_report(&err2, opt->check.errors); ok = false; } } if(ok) ast_settype(ast, type); return ok; } bool expr_fieldref(pass_opt_t* opt, ast_t* ast, ast_t* find, token_id tid) { AST_GET_CHILDREN(ast, left, right); ast_t* l_type = ast_type(left); if(is_typecheck_error(l_type)) return false; AST_GET_CHILDREN(find, id, f_type, init); // Viewpoint adapted type of the field. ast_t* type = viewpoint_type(l_type, f_type); if(ast_id(type) == TK_ARROW) { ast_t* upper = viewpoint_upper(type); if(upper == NULL) { ast_error(opt->check.errors, ast, "can't read a field through %s", ast_print_type(l_type)); return false; } ast_free_unattached(upper); } // In a recover expression, we can access obj.field if field is sendable // and not being assigned to, even if obj isn't sendable. typecheck_t* t = &opt->check; if(t->frame->recover != NULL) { if(!sendable(type)) { if(!sendable(l_type)) { errorframe_t frame = NULL; ast_error_frame(&frame, ast, "can't access field of non-sendable " "object inside of a recover expression"); ast_error_frame(&frame, find, "this would be possible if the field was " "sendable"); errorframe_report(&frame, opt->check.errors); return false; } } else { ast_t* parent = ast_parent(ast); ast_t* current = ast; while(ast_id(parent) != TK_RECOVER && ast_id(parent) != TK_ASSIGN) { current = parent; parent = ast_parent(parent); } if(ast_id(parent) == TK_ASSIGN && ast_child(parent) != current) { errorframe_t frame = NULL; ast_error_frame(&frame, ast, "can't access field of non-sendable " "object inside of a recover expression"); ast_error_frame(&frame, parent, "this would be possible if the field " "wasn't assigned to"); errorframe_report(&frame, opt->check.errors); return false; } } } // Set the unadapted field type. ast_settype(right, f_type); // Set the type so that it isn't free'd as unattached. ast_setid(ast, tid); ast_settype(ast, type); if(ast_id(left) == TK_THIS) { // Handle symbol status if the left side is 'this'. const char* name = ast_name(id); sym_status_t status; ast_get(ast, name, &status); if(!valid_reference(opt, ast, type, status)) return false; } return true; } bool expr_typeref(pass_opt_t* opt, ast_t** astp) { ast_t* ast = *astp; assert(ast_id(ast) == TK_TYPEREF); ast_t* type = ast_type(ast); if(is_typecheck_error(type)) return false; switch(ast_id(ast_parent(ast))) { case TK_QUALIFY: // Doesn't have to be valid yet. break; case TK_DOT: // Has to be valid. if(!expr_nominal(opt, &type)) { ast_settype(ast, ast_from(type, TK_ERRORTYPE)); ast_free_unattached(type); return false; } break; case TK_CALL: { // Has to be valid. if(!expr_nominal(opt, &type)) { ast_settype(ast, ast_from(type, TK_ERRORTYPE)); ast_free_unattached(type); return false; } // Transform to a default constructor. ast_t* dot = ast_from(ast, TK_DOT); ast_add(dot, ast_from_string(ast, "create")); ast_swap(ast, dot); *astp = dot; ast_add(dot, ast); if(!expr_dot(opt, astp)) { ast_settype(ast, ast_from(type, TK_ERRORTYPE)); ast_free_unattached(type); return false; } ast_t* ast = *astp; // If the default constructor has no parameters, transform to an apply // call. if((ast_id(ast) == TK_NEWREF) || (ast_id(ast) == TK_NEWBEREF)) { type = ast_type(ast); if(is_typecheck_error(type)) return false; assert(ast_id(type) == TK_FUNTYPE); AST_GET_CHILDREN(type, cap, typeparams, params, result); if(ast_id(params) == TK_NONE) { // Add a call node. ast_t* call = ast_from(ast, TK_CALL); ast_add(call, ast_from(call, TK_NONE)); // Named ast_add(call, ast_from(call, TK_NONE)); // Positional ast_swap(ast, call); ast_append(call, ast); if(!expr_call(opt, &call)) { ast_settype(ast, ast_from(type, TK_ERRORTYPE)); ast_free_unattached(type); return false; } // Add a dot node. ast_t* apply = ast_from(call, TK_DOT); ast_add(apply, ast_from_string(call, "apply")); ast_swap(call, apply); ast_add(apply, call); if(!expr_dot(opt, &apply)) { ast_settype(ast, ast_from(type, TK_ERRORTYPE)); ast_free_unattached(type); return false; } } } return true; } default: { // Has to be valid. if(!expr_nominal(opt, &type)) { ast_settype(ast, ast_from(type, TK_ERRORTYPE)); ast_free_unattached(type); return false; } // Transform to a default constructor. ast_t* dot = ast_from(ast, TK_DOT); ast_add(dot, ast_from_string(ast, "create")); ast_swap(ast, dot); ast_add(dot, ast); // Call the default constructor with no arguments. ast_t* call = ast_from(ast, TK_CALL); ast_swap(dot, call); ast_add(call, dot); // Receiver comes last. ast_add(call, ast_from(ast, TK_NONE)); // Named args. ast_add(call, ast_from(ast, TK_NONE)); // Positional args. *astp = call; if(!expr_dot(opt, &dot)) { ast_settype(ast, ast_from(type, TK_ERRORTYPE)); ast_free_unattached(type); return false; } if(!expr_call(opt, astp)) { ast_settype(ast, ast_from(type, TK_ERRORTYPE)); ast_free_unattached(type); return false; } break; } } return true; } static const char* suggest_alt_name(ast_t* ast, const char* name) { assert(ast != NULL); assert(name != NULL); size_t name_len = strlen(name); if(is_name_private(name)) { // Try without leading underscore const char* try_name = stringtab(name + 1); if(ast_get(ast, try_name, NULL) != NULL) return try_name; } else { // Try with a leading underscore char* buf = (char*)ponyint_pool_alloc_size(name_len + 2); buf[0] = '_'; strncpy(buf + 1, name, name_len + 1); const char* try_name = stringtab_consume(buf, name_len + 2); if(ast_get(ast, try_name, NULL) != NULL) return try_name; } // Try with a different case (without crossing type/value boundary) ast_t* case_ast = ast_get_case(ast, name, NULL); if(case_ast != NULL) { ast_t* id = case_ast; if(ast_id(id) != TK_ID) id = ast_child(id); assert(ast_id(id) == TK_ID); const char* try_name = ast_name(id); if(ast_get(ast, try_name, NULL) != NULL) return try_name; } // Give up return NULL; } bool expr_reference(pass_opt_t* opt, ast_t** astp) { typecheck_t* t = &opt->check; ast_t* ast = *astp; // Everything we reference must be in scope. const char* name = ast_name(ast_child(ast)); sym_status_t status; ast_t* def = ast_get(ast, name, &status); if(def == NULL) { const char* alt_name = suggest_alt_name(ast, name); if(alt_name == NULL) ast_error(opt->check.errors, ast, "can't find declaration of '%s'", name); else ast_error(opt->check.errors, ast, "can't find declaration of '%s', did you mean '%s'?", name, alt_name); return false; } switch(ast_id(def)) { case TK_PACKAGE: { // Only allowed if in a TK_DOT with a type. if(ast_id(ast_parent(ast)) != TK_DOT) { ast_error(opt->check.errors, ast, "a package can only appear as a prefix to a type"); return false; } ast_setid(ast, TK_PACKAGEREF); return true; } case TK_INTERFACE: case TK_TRAIT: case TK_TYPE: case TK_TYPEPARAM: case TK_PRIMITIVE: case TK_STRUCT: case TK_CLASS: case TK_ACTOR: { // It's a type name. This may not be a valid type, since it may need // type arguments. ast_t* id = ast_child(def); const char* name = ast_name(id); ast_t* type = type_sugar(ast, NULL, name); ast_settype(ast, type); ast_setid(ast, TK_TYPEREF); return expr_typeref(opt, astp); } case TK_FVAR: case TK_FLET: case TK_EMBED: { // Transform to "this.f". if(!def_before_use(opt, def, ast, name)) return false; ast_t* dot = ast_from(ast, TK_DOT); ast_add(dot, ast_child(ast)); ast_t* self = ast_from(ast, TK_THIS); ast_add(dot, self); ast_replace(astp, dot); if(!expr_this(opt, self)) return false; return expr_dot(opt, astp); } case TK_PARAM: { if(t->frame->def_arg != NULL) { ast_error(opt->check.errors, ast, "can't reference a parameter in a default argument"); return false; } if(!def_before_use(opt, def, ast, name)) return false; ast_t* type = ast_type(def); if(is_typecheck_error(type)) return false; if(!valid_reference(opt, ast, type, status)) return false; if(!sendable(type) && (t->frame->recover != NULL)) { ast_error(opt->check.errors, ast, "can't access a non-sendable " "parameter from inside a recover expression"); return false; } // Get the type of the parameter and attach it to our reference. // Automatically consume a parameter if the function is done. ast_t* r_type = type; if(is_method_return(t, ast)) r_type = consume_type(type, TK_NONE); ast_settype(ast, r_type); ast_setid(ast, TK_PARAMREF); return true; } case TK_NEW: case TK_BE: case TK_FUN: { // Transform to "this.f". ast_t* dot = ast_from(ast, TK_DOT); ast_add(dot, ast_child(ast)); ast_t* self = ast_from(ast, TK_THIS); ast_add(dot, self); ast_replace(astp, dot); if(!expr_this(opt, self)) return false; return expr_dot(opt, astp); } case TK_ID: { if(!def_before_use(opt, def, ast, name)) return false; ast_t* type = ast_type(def); if(is_typecheck_error(type)) return false; if(type != NULL && ast_id(type) == TK_INFERTYPE) { ast_error(opt->check.errors, ast, "cannot infer type of %s\n", ast_nice_name(def)); ast_settype(def, ast_from(def, TK_ERRORTYPE)); ast_settype(ast, ast_from(ast, TK_ERRORTYPE)); return false; } if(!valid_reference(opt, ast, type, status)) return false; ast_t* var = ast_parent(def); switch(ast_id(var)) { case TK_VAR: ast_setid(ast, TK_VARREF); break; case TK_LET: case TK_MATCH_CAPTURE: ast_setid(ast, TK_LETREF); break; default: assert(0); return false; } if(!sendable(type)) { if(t->frame->recover != NULL) { ast_t* def_recover = ast_nearest(def, TK_RECOVER); if(t->frame->recover != def_recover) { ast_error(opt->check.errors, ast, "can't access a non-sendable " "local defined outside of a recover expression from within " "that recover expression"); return false; } } } // Get the type of the local and attach it to our reference. // Automatically consume a local if the function is done. ast_t* r_type = type; if(is_method_return(t, ast)) r_type = consume_type(type, TK_NONE); ast_settype(ast, r_type); return true; } default: {} } assert(0); return false; } bool expr_local(pass_opt_t* opt, ast_t* ast) { assert(ast != NULL); assert(ast_type(ast) != NULL); AST_GET_CHILDREN(ast, id, type); assert(type != NULL); if(ast_id(type) == TK_NONE) { // No type specified, infer it later if(!is_assigned_to(ast, false)) { ast_error(opt->check.errors, ast, "locals must specify a type or be assigned a value"); return false; } } else if(ast_id(ast) == TK_LET) { // Let, check we have a value assigned if(!is_assigned_to(ast, false)) { ast_error(opt->check.errors, ast, "can't declare a let local without assigning to it"); return false; } } return true; } bool expr_addressof(pass_opt_t* opt, ast_t* ast) { // Check if we're in an FFI call. ast_t* parent = ast_parent(ast); bool ok = false; if(ast_id(parent) == TK_SEQ) { parent = ast_parent(parent); if(ast_id(parent) == TK_POSITIONALARGS) { parent = ast_parent(parent); if(ast_id(parent) == TK_FFICALL) ok = true; } } if(!ok) { ast_error(opt->check.errors, ast, "the addressof operator can only be used for FFI arguments"); return false; } ast_t* expr = ast_child(ast); switch(ast_id(expr)) { case TK_FVARREF: case TK_VARREF: case TK_FUNREF: case TK_BEREF: break; case TK_FLETREF: ast_error(opt->check.errors, ast, "can't take the address of a let field"); return false; case TK_EMBEDREF: ast_error(opt->check.errors, ast, "can't take the address of an embed field"); return false; case TK_LETREF: ast_error(opt->check.errors, ast, "can't take the address of a let local"); return false; case TK_PARAMREF: ast_error(opt->check.errors, ast, "can't take the address of a function parameter"); return false; default: ast_error(opt->check.errors, ast, "can only take the address of a local, field or method"); return false; } // Set the type to Pointer[ast_type(expr)]. Set to Pointer[None] for function // pointers. ast_t* expr_type = ast_type(expr); if(is_typecheck_error(expr_type)) return false; switch(ast_id(expr)) { case TK_FUNREF: case TK_BEREF: expr_type = type_builtin(opt, ast, "None"); break; default: {} } ast_t* type = type_pointer_to(opt, expr_type); ast_settype(ast, type); return true; } bool expr_digestof(pass_opt_t* opt, ast_t* ast) { ast_t* expr = ast_child(ast); switch(ast_id(expr)) { case TK_FVARREF: case TK_FLETREF: case TK_EMBEDREF: case TK_VARREF: case TK_LETREF: case TK_PARAMREF: case TK_THIS: break; default: ast_error(opt->check.errors, ast, "can only get the digest of a field, local, parameter or this"); return false; } // Set the type to U64. ast_t* type = type_builtin(opt, expr, "U64"); ast_settype(ast, type); return true; } bool expr_dontcare(pass_opt_t* opt, ast_t* ast) { // We are a tuple element. That tuple must either be a pattern or the LHS // of an assignment. It can be embedded in other tuples, which may appear // in sequences. ast_t* tuple = ast_parent(ast); if(ast_id(tuple) == TK_TUPLE) { ast_t* parent = ast_parent(tuple); while((ast_id(parent) == TK_TUPLE) || (ast_id(parent) == TK_SEQ)) { tuple = parent; parent = ast_parent(tuple); } switch(ast_id(parent)) { case TK_ASSIGN: { AST_GET_CHILDREN(parent, right, left); if(tuple == left) { ast_settype(ast, ast); return true; } break; } case TK_CASE: { AST_GET_CHILDREN(parent, pattern, guard, body); if(tuple == pattern) { ast_settype(ast, ast); return true; } break; } default: {} } } ast_error(opt->check.errors, ast, "the don't care token can only appear " "in a tuple, either on the LHS of an assignment or in a pattern"); return false; } bool expr_this(pass_opt_t* opt, ast_t* ast) { typecheck_t* t = &opt->check; if(t->frame->def_arg != NULL) { ast_error(opt->check.errors, ast, "can't reference 'this' in a default argument"); return false; } sym_status_t status; ast_get(ast, stringtab("this"), &status); if(status == SYM_CONSUMED) { ast_error(opt->check.errors, ast, "can't use a consumed 'this' in an expression"); return false; } assert(status == SYM_NONE); token_id cap = cap_for_this(t); if(!cap_sendable(cap) && (t->frame->recover != NULL)) { ast_t* parent = ast_parent(ast); if(ast_id(parent) != TK_DOT) cap = TK_TAG; } bool make_arrow = false; if(cap == TK_BOX) { cap = TK_REF; make_arrow = true; } ast_t* type = type_for_this(opt, ast, cap, TK_NONE, false); if(make_arrow) { BUILD(arrow, ast, NODE(TK_ARROW, NODE(TK_THISTYPE) TREE(type))); type = arrow; } // Get the nominal type, which may be the right side of an arrow type. ast_t* nominal; bool arrow; if(ast_id(type) == TK_NOMINAL) { nominal = type; arrow = false; } else { nominal = ast_childidx(type, 1); arrow = true; } ast_t* typeargs = ast_childidx(nominal, 2); ast_t* typearg = ast_child(typeargs); while(typearg != NULL) { if(!expr_nominal(opt, &typearg)) { ast_error(opt->check.errors, ast, "couldn't create a type for 'this'"); ast_free(type); return false; } typearg = ast_sibling(typearg); } if(!expr_nominal(opt, &nominal)) { ast_error(opt->check.errors, ast, "couldn't create a type for 'this'"); ast_free(type); return false; } // Unless this is a field lookup, treat an incomplete `this` as a tag. ast_t* parent = ast_parent(ast); bool incomplete_ok = false; if((ast_id(parent) == TK_DOT) && (ast_child(parent) == ast)) { ast_t* right = ast_sibling(ast); assert(ast_id(right) == TK_ID); ast_t* find = lookup_try(opt, ast, nominal, ast_name(right)); if(find != NULL) { switch(ast_id(find)) { case TK_FVAR: case TK_FLET: case TK_EMBED: incomplete_ok = true; break; default: {} } } } if(!incomplete_ok && is_this_incomplete(t, ast)) { ast_t* tag_type = set_cap_and_ephemeral(nominal, TK_TAG, TK_NONE); ast_replace(&nominal, tag_type); } if(arrow) type = ast_parent(nominal); else type = nominal; ast_settype(ast, type); return true; } bool expr_tuple(pass_opt_t* opt, ast_t* ast) { ast_t* child = ast_child(ast); ast_t* type; if(ast_sibling(child) == NULL) { type = ast_type(child); } else { type = ast_from(ast, TK_TUPLETYPE); while(child != NULL) { ast_t* c_type = ast_type(child); if(c_type == NULL) return false; if(is_control_type(c_type)) { ast_error(opt->check.errors, child, "a tuple can't contain a control flow expression"); return false; } if(is_type_literal(c_type)) { // At least one tuple member is literal, so whole tuple is ast_free(type); make_literal_type(ast); ast_inheritflags(ast); return true; } ast_append(type, c_type); child = ast_sibling(child); } } ast_settype(ast, type); ast_inheritflags(ast); return true; } bool expr_nominal(pass_opt_t* opt, ast_t** astp) { // Resolve type aliases and typeparam references. if(!names_nominal(opt, *astp, astp, true)) return false; ast_t* ast = *astp; switch(ast_id(ast)) { case TK_TYPEPARAMREF: return flatten_typeparamref(opt, ast) == AST_OK; case TK_NOMINAL: break; default: return true; } // If still nominal, check constraints. ast_t* def = (ast_t*)ast_data(ast); // Special case: don't check the constraint of a Pointer or an Array. These // builtin types have no contraint on their type parameter, and it is safe // to bind a struct as a type argument (which is not safe on any user defined // type, as that type might then be used for pattern matching). if(is_pointer(ast) || is_literal(ast, "Array")) return true; ast_t* typeparams = ast_childidx(def, 1); ast_t* typeargs = ast_childidx(ast, 2); if(!reify_defaults(typeparams, typeargs, true, opt)) return false; if(is_maybe(ast)) { // MaybePointer[A] must be bound to a struct. assert(ast_childcount(typeargs) == 1); ast_t* typeparam = ast_child(typeparams); ast_t* typearg = ast_child(typeargs); bool ok = false; switch(ast_id(typearg)) { case TK_NOMINAL: { ast_t* def = (ast_t*)ast_data(typearg); ok = ast_id(def) == TK_STRUCT; break; } case TK_TYPEPARAMREF: { ast_t* def = (ast_t*)ast_data(typearg); ok = def == typeparam; break; } default: {} } if(!ok) { ast_error(opt->check.errors, ast, "%s is not allowed: " "the type argument to MaybePointer must be a struct", ast_print_type(ast)); return false; } return true; } return check_constraints(typeargs, typeparams, typeargs, true, opt); } static bool show_partiality(pass_opt_t* opt, ast_t* ast) { ast_t* child = ast_child(ast); bool found = false; while(child != NULL) { if(ast_canerror(child)) found |= show_partiality(opt, child); child = ast_sibling(child); } if(found) return true; if(ast_canerror(ast)) { ast_error(opt->check.errors, ast, "an error can be raised here"); return true; } return false; } static bool check_fields_defined(pass_opt_t* opt, ast_t* ast) { assert(ast_id(ast) == TK_NEW); ast_t* members = ast_parent(ast); ast_t* member = ast_child(members); bool result = true; while(member != NULL) { switch(ast_id(member)) { case TK_FVAR: case TK_FLET: case TK_EMBED: { sym_status_t status; ast_t* id = ast_child(member); ast_t* def = ast_get(ast, ast_name(id), &status); if((def != member) || (status != SYM_DEFINED)) { ast_error(opt->check.errors, def, "field left undefined in constructor"); result = false; } break; } default: {} } member = ast_sibling(member); } if(!result) ast_error(opt->check.errors, ast, "constructor with undefined fields is here"); return result; } static bool check_return_type(pass_opt_t* opt, ast_t* ast) { AST_GET_CHILDREN(ast, cap, id, typeparams, params, type, can_error, body); ast_t* body_type = ast_type(body); if(is_typecheck_error(body_type)) return false; // The last statement is an error, and we've already checked any return // expressions in the method. if(is_control_type(body_type)) return true; // If it's a compiler intrinsic, ignore it. if(ast_id(body_type) == TK_COMPILE_INTRINSIC) return true; // The body type must match the return type, without subsumption, or an alias // of the body type must be a subtype of the return type. ast_t* a_type = alias(type); ast_t* a_body_type = alias(body_type); bool ok = true; errorframe_t info = NULL; if(!is_subtype(body_type, type, &info, opt) || !is_subtype(a_body_type, a_type, &info, opt)) { errorframe_t frame = NULL; ast_t* last = ast_childlast(body); ast_error_frame(&frame, last, "function body isn't the result type"); ast_error_frame(&frame, type, "function return type: %s", ast_print_type(type)); ast_error_frame(&frame, body_type, "function body type: %s", ast_print_type(body_type)); errorframe_append(&frame, &info); errorframe_report(&frame, opt->check.errors); ok = false; } ast_free_unattached(a_type); ast_free_unattached(a_body_type); return ok; } static bool check_main_create(pass_opt_t* opt, ast_t* ast) { if(ast_id(opt->check.frame->type) != TK_ACTOR) return true; ast_t* type_id = ast_child(opt->check.frame->type); if(strcmp(ast_name(type_id), "Main")) return true; AST_GET_CHILDREN(ast, cap, id, typeparams, params, result, can_error); if(strcmp(ast_name(id), "create")) return true; bool ok = true; if(ast_id(ast) != TK_NEW) { ast_error(opt->check.errors, ast, "the create method of a Main actor must be a constructor"); ok = false; } if(ast_id(typeparams) != TK_NONE) { ast_error(opt->check.errors, typeparams, "the create constructor of a Main actor must not be polymorphic"); ok = false; } if(ast_childcount(params) != 1) { ast_error(opt->check.errors, params, "the create constructor of a Main actor must take a single Env " "parameter"); ok = false; } ast_t* param = ast_child(params); if(param != NULL) { ast_t* p_type = ast_childidx(param, 1); if(!is_env(p_type)) { ast_error(opt->check.errors, p_type, "must be of type Env"); ok = false; } } return ok; } static bool check_primitive_init(pass_opt_t* opt, ast_t* ast) { if(ast_id(opt->check.frame->type) != TK_PRIMITIVE) return true; AST_GET_CHILDREN(ast, cap, id, typeparams, params, result, can_error); if(strcmp(ast_name(id), "_init")) return true; bool ok = true; if(ast_id(ast_childidx(opt->check.frame->type, 1)) != TK_NONE) { ast_error(opt->check.errors, ast, "a primitive with type parameters cannot have an _init"); ok = false; } if(ast_id(ast) != TK_FUN) { ast_error(opt->check.errors, ast, "a primitive _init must be a function"); ok = false; } if(ast_id(cap) != TK_BOX) { ast_error(opt->check.errors, cap, "a primitive _init must be box"); ok = false; } if(ast_id(typeparams) != TK_NONE) { ast_error(opt->check.errors, typeparams, "a primitive _init must not be polymorphic"); ok = false; } if(ast_childcount(params) != 0) { ast_error(opt->check.errors, params, "a primitive _init must take no parameters"); ok = false; } if(!is_none(result)) { ast_error(opt->check.errors, result, "a primitive _init must return None"); ok = false; } if(ast_id(can_error) != TK_NONE) { ast_error(opt->check.errors, can_error, "a primitive _init cannot raise an error"); ok = false; } return ok; } static bool check_finaliser(pass_opt_t* opt, ast_t* ast) { AST_GET_CHILDREN(ast, cap, id, typeparams, params, result, can_error, body); if(strcmp(ast_name(id), "_final")) return true; bool ok = true; if((ast_id(opt->check.frame->type) == TK_PRIMITIVE) && (ast_id(ast_childidx(opt->check.frame->type, 1)) != TK_NONE)) { ast_error(opt->check.errors, ast, "a primitive with type parameters cannot have a _final"); ok = false; } if(ast_id(ast) != TK_FUN) { ast_error(opt->check.errors, ast, "_final must be a function"); ok = false; } if(ast_id(cap) != TK_BOX) { ast_error(opt->check.errors, cap, "_final must be box"); ok = false; } if(ast_id(typeparams) != TK_NONE) { ast_error(opt->check.errors, typeparams, "_final must not be polymorphic"); ok = false; } if(ast_childcount(params) != 0) { ast_error(opt->check.errors, params, "_final must not have parameters"); ok = false; } if(!is_none(result)) { ast_error(opt->check.errors, result, "_final must return None"); ok = false; } if(ast_id(can_error) != TK_NONE) { ast_error(opt->check.errors, can_error, "_final cannot raise an error"); ok = false; } return ok; } bool expr_fun(pass_opt_t* opt, ast_t* ast) { typecheck_t* t = &opt->check; AST_GET_CHILDREN(ast, cap, id, typeparams, params, type, can_error, body); if(ast_id(body) == TK_NONE) return true; if(!coerce_literals(&body, type, opt)) return false; bool is_trait = (ast_id(t->frame->type) == TK_TRAIT) || (ast_id(t->frame->type) == TK_INTERFACE) || (ast_id((ast_t*)ast_data(ast)) == TK_TRAIT) || (ast_id((ast_t*)ast_data(ast)) == TK_INTERFACE); // Check partial functions. if(ast_id(can_error) == TK_QUESTION) { // If a partial function, check that we might actually error. ast_t* body_type = ast_type(body); if(body_type == NULL) { // An error has already occurred. assert(errors_get_count(t->errors) > 0); return false; } if(!is_trait && !ast_canerror(body) && (ast_id(body_type) != TK_COMPILE_INTRINSIC)) { ast_error(opt->check.errors, can_error, "function body is not partial but the function is"); return false; } } else { // If not a partial function, check that we can't error. if(ast_canerror(body)) { ast_error(opt->check.errors, can_error, "function body is partial but the function is not"); show_partiality(opt, body); return false; } } if(!check_primitive_init(opt, ast) || !check_finaliser(opt, ast)) return false; if(!check_main_create(opt, ast)) return false; switch(ast_id(ast)) { case TK_NEW: { bool ok = true; if(is_machine_word(type)) { if(!check_return_type(opt, ast)) ok = false; } if(!check_fields_defined(opt, ast)) ok = false; return ok; } case TK_FUN: return check_return_type(opt, ast); default: {} } return true; } bool expr_compile_intrinsic(pass_opt_t* opt, ast_t* ast) { (void)opt; ast_settype(ast, ast_from(ast, TK_COMPILE_INTRINSIC)); return true; }
1
8,786
How do you feel about "The Main actor" instead of "A Main actor", while we're already here changing the message?
ponylang-ponyc
c
@@ -38,6 +38,7 @@ public interface PermissionNameProvider { COLL_READ_PERM("collection-admin-read", null), CORE_READ_PERM("core-admin-read", null), CORE_EDIT_PERM("core-admin-edit", null), + ZK_READ_PERM("zk-read", null), READ_PERM("read", "*"), UPDATE_PERM("update", "*"), CONFIG_EDIT_PERM("config-edit", unmodifiableSet(new HashSet<>(asList("*", null)))),
1
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.security; import java.util.HashSet; import java.util.Map; import java.util.Set; import static java.util.Arrays.asList; import static java.util.Collections.singleton; import static java.util.Collections.unmodifiableMap; import static java.util.Collections.unmodifiableSet; import static java.util.function.Function.identity; import static java.util.stream.Collectors.toMap; /** * A requestHandler should implement this interface to provide the well known permission * at request time */ public interface PermissionNameProvider { enum Name { COLL_EDIT_PERM("collection-admin-edit", null), COLL_READ_PERM("collection-admin-read", null), CORE_READ_PERM("core-admin-read", null), CORE_EDIT_PERM("core-admin-edit", null), READ_PERM("read", "*"), UPDATE_PERM("update", "*"), CONFIG_EDIT_PERM("config-edit", unmodifiableSet(new HashSet<>(asList("*", null)))), CONFIG_READ_PERM("config-read", "*"), SCHEMA_READ_PERM("schema-read", "*"), SCHEMA_EDIT_PERM("schema-edit", "*"), SECURITY_EDIT_PERM("security-edit", null), SECURITY_READ_PERM("security-read", null), METRICS_READ_PERM("metrics-read", null), AUTOSCALING_READ_PERM("autoscaling-read", null), AUTOSCALING_WRITE_PERM("autoscaling-write", null), AUTOSCALING_HISTORY_READ_PERM("autoscaling-history-read", null), METRICS_HISTORY_READ_PERM("metrics-history-read", null), FILESTORE_READ_PERM("filestore-read", null), FILESTORE_WRITE_PERM("filestore-write", null), PACKAGE_EDIT_PERM("package-edit", null), PACKAGE_READ_PERM("package-read", null), ALL("all", unmodifiableSet(new HashSet<>(asList("*", null)))) ; final String name; final Set<String> collName; Name(String s, Object collName) { name = s; this.collName = collName instanceof Set? (Set<String>)collName : singleton((String)collName); } public static Name get(String s) { return values.get(s); } public String getPermissionName() { return name; } } Set<String> NULL = singleton(null); Set<String> ANY = singleton("*"); Map<String, Name> values = unmodifiableMap(asList(Name.values()).stream().collect(toMap(Name::getPermissionName, identity()))); Name getPermissionName(AuthorizationContext request); }
1
33,045
I cannot see that this new permission is used anywhere? And if the new zk handler is covered by `zk-read`, should not also existing `ZookeeperInfoHandler` handler implement PermissionNameProvider and declare the same permission, for consistency?
apache-lucene-solr
java
@@ -103,7 +103,7 @@ def as_spark_type(tpe) -> types.DataType: """ # TODO: Add "boolean" and "string" types. # ArrayType - if tpe in (np.ndarray,): + if tpe in (list, np.ndarray,): return types.ArrayType(types.StringType()) elif hasattr(tpe, "__origin__") and issubclass(tpe.__origin__, list): return types.ArrayType(as_spark_type(tpe.__args__[0]))
1
# # Copyright (C) 2019 Databricks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Utilities to deal with types. This is mostly focused on python3. """ import typing import datetime import decimal from inspect import getfullargspec, isclass import numpy as np import pandas as pd from pandas.api.types import is_datetime64_dtype, is_datetime64tz_dtype import pyarrow as pa import pyspark.sql.types as types try: from pyspark.sql.types import to_arrow_type, from_arrow_type except ImportError: from pyspark.sql.pandas.types import to_arrow_type, from_arrow_type from databricks import koalas as ks # For running doctests and reference resolution in PyCharm. from databricks.koalas.typedef.string_typehints import resolve_string_type_hint T = typing.TypeVar("T") Scalar = typing.Union[ int, float, bool, str, bytes, decimal.Decimal, datetime.date, datetime.datetime, None ] # A column of data, with the data type. class SeriesType(typing.Generic[T]): def __init__(self, tpe): self.tpe = tpe # type: types.DataType def __repr__(self): return "SeriesType[{}]".format(self.tpe) class DataFrameType(object): def __init__(self, tpe, names=None): if names is None: # Default names `c0, c1, ... cn`. self.tpe = types.StructType( [types.StructField("c%s" % i, tpe[i]) for i in range(len(tpe))] ) # type: types.StructType else: self.tpe = types.StructType( [types.StructField(n, t) for n, t in zip(names, tpe)] ) # type: types.StructType def __repr__(self): return "DataFrameType[{}]".format(self.tpe) # The type is a scalar type that is furthermore understood by Spark. class ScalarType(object): def __init__(self, tpe): self.tpe = tpe # type: types.DataType def __repr__(self): return "ScalarType[{}]".format(self.tpe) # The type is left unspecified or we do not know about this type. class UnknownType(object): def __init__(self, tpe): self.tpe = tpe def __repr__(self): return "UnknownType[{}]".format(self.tpe) class NameTypeHolder(object): name = None tpe = None def as_spark_type(tpe) -> types.DataType: """ Given a Python type, returns the equivalent spark type. Accepts: - the built-in types in Python - the built-in types in numpy - list of pairs of (field_name, type) - dictionaries of field_name -> type - Python3's typing system """ # TODO: Add "boolean" and "string" types. # ArrayType if tpe in (np.ndarray,): return types.ArrayType(types.StringType()) elif hasattr(tpe, "__origin__") and issubclass(tpe.__origin__, list): return types.ArrayType(as_spark_type(tpe.__args__[0])) # BinaryType elif tpe in (bytes, np.character, np.bytes_, np.string_): return types.BinaryType() # BooleanType elif tpe in (bool, np.bool, "bool", "?"): return types.BooleanType() # DateType elif tpe in (datetime.date,): return types.DateType() # NumericType elif tpe in (np.int8, np.byte, "int8", "byte", "b"): return types.ByteType() elif tpe in (decimal.Decimal,): # TODO: considering about the precision & scale for decimal type. return types.DecimalType(38, 18) elif tpe in (float, np.float, np.float64, "float", "float64", "double"): return types.DoubleType() elif tpe in (np.float32, "float32", "f"): return types.FloatType() elif tpe in (np.int32, "int32", "i"): return types.IntegerType() elif tpe in (int, np.int, np.int64, "int", "int64", "long", "bigint"): return types.LongType() elif tpe in (np.int16, "int16", "short"): return types.ShortType() # StringType elif tpe in (str, np.unicode_, "str", "U"): return types.StringType() # TimestampType elif tpe in (datetime.datetime, np.datetime64, "datetime64[ns]", "M"): return types.TimestampType() else: raise TypeError("Type %s was not understood." % tpe) def spark_type_to_pandas_dtype(spark_type): """ Return the given Spark DataType to pandas dtype. """ if isinstance(spark_type, (types.DateType, types.StructType, types.UserDefinedType)): return np.dtype("object") elif isinstance(spark_type, types.TimestampType): return np.dtype("datetime64[ns]") else: return np.dtype(to_arrow_type(spark_type).to_pandas_dtype()) def infer_pd_series_spark_type(s: pd.Series) -> types.DataType: """Infer Spark DataType from pandas Series dtype. :param s: :class:`pandas.Series` to be inferred :return: the inferred Spark data type """ dt = s.dtype if dt == np.dtype("object"): if len(s) == 0 or s.isnull().all(): raise ValueError("can not infer schema from empty or null dataset") elif hasattr(s[0], "__UDT__"): return s[0].__UDT__ else: return from_arrow_type(pa.Array.from_pandas(s).type) elif is_datetime64_dtype(dt) or is_datetime64tz_dtype(dt): return types.TimestampType() else: return from_arrow_type(pa.from_numpy_dtype(dt)) def infer_return_type(f) -> typing.Union[SeriesType, DataFrameType, ScalarType, UnknownType]: """ >>> def func() -> int: ... pass >>> infer_return_type(func).tpe LongType >>> def func() -> ks.Series[int]: ... pass >>> infer_return_type(func).tpe LongType >>> def func() -> ks.DataFrame[np.float, str]: ... pass >>> infer_return_type(func).tpe StructType(List(StructField(c0,DoubleType,true),StructField(c1,StringType,true))) >>> def func() -> ks.DataFrame[np.float]: ... pass >>> infer_return_type(func).tpe StructType(List(StructField(c0,DoubleType,true))) >>> def func() -> 'int': ... pass >>> infer_return_type(func).tpe LongType >>> def func() -> 'ks.Series[int]': ... pass >>> infer_return_type(func).tpe LongType >>> def func() -> 'ks.DataFrame[np.float, str]': ... pass >>> infer_return_type(func).tpe StructType(List(StructField(c0,DoubleType,true),StructField(c1,StringType,true))) >>> def func() -> 'ks.DataFrame[np.float]': ... pass >>> infer_return_type(func).tpe StructType(List(StructField(c0,DoubleType,true))) >>> def func() -> ks.DataFrame['a': np.float, 'b': int]: ... pass >>> infer_return_type(func).tpe StructType(List(StructField(a,DoubleType,true),StructField(b,LongType,true))) >>> def func() -> "ks.DataFrame['a': np.float, 'b': int]": ... pass >>> infer_return_type(func).tpe StructType(List(StructField(a,DoubleType,true),StructField(b,LongType,true))) >>> pdf = pd.DataFrame({"a": [1, 2, 3], "b": [3, 4, 5]}) >>> def func() -> ks.DataFrame[pdf.dtypes]: ... pass >>> infer_return_type(func).tpe StructType(List(StructField(c0,LongType,true),StructField(c1,LongType,true))) >>> pdf = pd.DataFrame({"a": [1, 2, 3], "b": [3, 4, 5]}) >>> def func() -> ks.DataFrame[zip(pdf.columns, pdf.dtypes)]: ... pass >>> infer_return_type(func).tpe StructType(List(StructField(a,LongType,true),StructField(b,LongType,true))) """ # We should re-import to make sure the class 'SeriesType' is not treated as a class # within this module locally. See Series.__class_getitem__ which imports this class # canonically. from databricks.koalas.typedef import SeriesType, NameTypeHolder spec = getfullargspec(f) tpe = spec.annotations.get("return", None) if isinstance(tpe, str): # This type hint can happen when given hints are string to avoid forward reference. tpe = resolve_string_type_hint(tpe) if hasattr(tpe, "__origin__") and ( issubclass(tpe.__origin__, SeriesType) or tpe.__origin__ == ks.Series ): # TODO: remove "tpe.__origin__ == ks.Series" when we drop Python 3.5 and 3.6. inner = as_spark_type(tpe.__args__[0]) return SeriesType(inner) if hasattr(tpe, "__origin__") and tpe.__origin__ == ks.DataFrame: # When Python version is lower then 3.7. Unwrap it to a Tuple type # hints. tpe = tpe.__args__[0] # Note that, DataFrame type hints will create a Tuple. # Python 3.6 has `__name__`. Python 3.7 and 3.8 have `_name`. # Check if the name is Tuple. name = getattr(tpe, "_name", getattr(tpe, "__name__", None)) if name == "Tuple": tuple_type = tpe if hasattr(tuple_type, "__tuple_params__"): # Python 3.5.0 to 3.5.2 has '__tuple_params__' instead. # See https://github.com/python/cpython/blob/v3.5.2/Lib/typing.py parameters = getattr(tuple_type, "__tuple_params__") else: parameters = getattr(tuple_type, "__args__") if len(parameters) > 0 and all( isclass(p) and issubclass(p, NameTypeHolder) for p in parameters ): names = [p.name for p in parameters if issubclass(p, NameTypeHolder)] types = [p.tpe for p in parameters if issubclass(p, NameTypeHolder)] return DataFrameType([as_spark_type(t) for t in types], names) return DataFrameType([as_spark_type(t) for t in parameters]) inner = as_spark_type(tpe) if inner is None: return UnknownType(tpe) else: return ScalarType(inner)
1
17,464
Is this reasonable?
databricks-koalas
py
@@ -21,6 +21,7 @@ class AutoAugment(object): augment images. Examples: + TODO: Implement 'Shear', 'Sharpness' and 'Rotate' transforms >>> replace = (104, 116, 124) >>> policies = [ >>> [
1
import copy import numpy as np from ..builder import PIPELINES from .compose import Compose @PIPELINES.register_module() class AutoAugment(object): """Auto augmentation. This data augmentation is proposed in `Learning Data Augmentation Strategies for Object Detection <https://arxiv.org/pdf/1906.11172>`_ # noqa: E501 Args: policies (list[list[dict]]): The policies of auto augmentation. Each policy in ``policies`` is a specific augmentation policy, and is composed by several augmentations (dict). When AutoAugment is called, a random policy in ``policies`` will be selected to augment images. Examples: >>> replace = (104, 116, 124) >>> policies = [ >>> [ >>> dict(type='Sharpness', prob=0.0, level=8), >>> dict( >>> type='Shear', >>> prob=0.4, >>> level=0, >>> replace=replace, >>> axis='x') >>> ], >>> [ >>> dict( >>> type='Rotate', >>> prob=0.6, >>> level=10, >>> replace=replace), >>> dict(type='Color', prob=1.0, level=6) >>> ] >>> ] >>> augmentation = AutoAugment(policies) >>> img = np.ones(100, 100, 3) >>> gt_bboxes = np.ones(10, 4) >>> results = dict(img=img, gt_bboxes=gt_bboxes) >>> results = augmentation(results) """ def __init__(self, policies): assert isinstance(policies, list) and len(policies) > 0, \ 'Policies must be a non-empty list.' for policy in policies: assert isinstance(policy, list) and len(policy) > 0, \ 'Each policy in policies must be a non-empty list.' for augment in policy: assert isinstance(augment, dict) and 'type' in augment, \ 'Each specific augmentation must be a dict with key' \ ' "type".' self.policies = copy.deepcopy(policies) self.transforms = [Compose(policy) for policy in self.policies] def __call__(self, results): transform = np.random.choice(self.transforms) return transform(results) def __repr__(self): return f'{self.__class__.__name__}(policies={self.policies}'
1
20,646
We may move this TODO to Line15.
open-mmlab-mmdetection
py
@@ -263,7 +263,12 @@ class OrderController extends BaseFrontController /* check cart count */ $this->checkCartNotEmpty(); - + + /* check stock not empty */ + if(true === ConfigQuery::checkAvailableStock()) { + return $this->checkStockNotEmpty(); + } + /* check delivery address and module */ $this->checkValidDelivery();
1
<?php /*************************************************************************************/ /* */ /* Thelia */ /* */ /* Copyright (c) OpenStudio */ /* email : [email protected] */ /* web : http://www.thelia.net */ /* */ /* This program is free software; you can redistribute it and/or modify */ /* it under the terms of the GNU General Public License as published by */ /* the Free Software Foundation; either version 3 of the License */ /* */ /* This program is distributed in the hope that it will be useful, */ /* but WITHOUT ANY WARRANTY; without even the implied warranty of */ /* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */ /* GNU General Public License for more details. */ /* */ /* You should have received a copy of the GNU General Public License */ /* along with this program. If not, see <http://www.gnu.org/licenses/>. */ /* */ /*************************************************************************************/ namespace Front\Controller; use Front\Front; use Propel\Runtime\ActiveQuery\Criteria; use Propel\Runtime\Exception\PropelException; use Symfony\Component\HttpFoundation\File\MimeType\MimeTypeGuesser; use Symfony\Component\HttpFoundation\Request; use Symfony\Component\HttpKernel\Exception\AccessDeniedHttpException; use Thelia\Controller\Front\BaseFrontController; use Thelia\Core\Event\Order\OrderEvent; use Thelia\Core\Event\TheliaEvents; use Thelia\Core\HttpFoundation\Response; use Thelia\Exception\TheliaProcessException; use Thelia\Form\Exception\FormValidationException; use Thelia\Form\OrderDelivery; use Thelia\Form\OrderPayment; use Thelia\Log\Tlog; use Thelia\Model\AddressQuery; use Thelia\Model\AreaDeliveryModuleQuery; use Thelia\Model\ConfigQuery; use Thelia\Model\ModuleQuery; use Thelia\Model\Order; use Thelia\Model\OrderProductQuery; use Thelia\Model\OrderQuery; /** * Class OrderController * @package Thelia\Controller\Front * @author Etienne Roudeix <[email protected]> */ class OrderController extends BaseFrontController { /** * Check if the cart contains only virtual products. */ public function deliverView() { $this->checkAuth(); $this->checkCartNotEmpty(); // check if the cart contains only virtual products $cart = $this->getSession()->getSessionCart($this->getDispatcher()); if ($cart->isVirtual()) { // get the virtual product module $customer = $this->getSecurityContext()->getCustomerUser(); $deliveryAddress = AddressQuery::create() ->filterByCustomerId($customer->getId()) ->orderByIsDefault(Criteria::DESC) ->findOne(); if (null !== $deliveryAddress) { $deliveryModule = ModuleQuery::create() ->filterByCode('VirtualProductDelivery') ->filterByActivate(1) ->findOne() ; if (null !== $deliveryModule) { /* get postage amount */ $moduleInstance = $deliveryModule->getModuleInstance($this->container); $postage = $moduleInstance->getPostage($deliveryAddress->getCountry()); $orderEvent = $this->getOrderEvent(); $orderEvent->setDeliveryAddress($deliveryAddress->getId()); $orderEvent->setDeliveryModule($deliveryModule->getId()); $orderEvent->setPostage($postage); $this->getDispatcher()->dispatch(TheliaEvents::ORDER_SET_DELIVERY_ADDRESS, $orderEvent); $this->getDispatcher()->dispatch(TheliaEvents::ORDER_SET_DELIVERY_MODULE, $orderEvent); $this->getDispatcher()->dispatch(TheliaEvents::ORDER_SET_POSTAGE, $orderEvent); return $this->generateRedirectFromRoute("order.invoice"); } else { Tlog::getInstance()->error( $this->getTranslator()->trans( "To enabled the virtual product functionality, the module VirtualProductDelivery should be activated", [], Front::MESSAGE_DOMAIN ) ); } } } return $this->render('order-delivery'); } /** * set delivery address * set delivery module */ public function deliver() { $this->checkAuth(); $this->checkCartNotEmpty(); $message = false; $orderDelivery = new OrderDelivery($this->getRequest()); try { $form = $this->validateForm($orderDelivery, "post"); $deliveryAddressId = $form->get("delivery-address")->getData(); $deliveryModuleId = $form->get("delivery-module")->getData(); $deliveryAddress = AddressQuery::create()->findPk($deliveryAddressId); $deliveryModule = ModuleQuery::create()->findPk($deliveryModuleId); /* check that the delivery address belongs to the current customer */ if ($deliveryAddress->getCustomerId() !== $this->getSecurityContext()->getCustomerUser()->getId()) { throw new \Exception( $this->getTranslator()->trans( "Delivery address does not belong to the current customer", [], Front::MESSAGE_DOMAIN ) ); } /* check that the delivery module fetches the delivery address area */ if(AreaDeliveryModuleQuery::create() ->filterByAreaId($deliveryAddress->getCountry()->getAreaId()) ->filterByDeliveryModuleId($deliveryModuleId) ->count() == 0) { throw new \Exception( $this->getTranslator()->trans( "Delivery module cannot be use with selected delivery address", [], Front::MESSAGE_DOMAIN ) ); } /* get postage amount */ $moduleInstance = $deliveryModule->getModuleInstance($this->container); $postage = $moduleInstance->getPostage($deliveryAddress->getCountry()); $orderEvent = $this->getOrderEvent(); $orderEvent->setDeliveryAddress($deliveryAddressId); $orderEvent->setDeliveryModule($deliveryModuleId); $orderEvent->setPostage($postage); $this->getDispatcher()->dispatch(TheliaEvents::ORDER_SET_DELIVERY_ADDRESS, $orderEvent); $this->getDispatcher()->dispatch(TheliaEvents::ORDER_SET_DELIVERY_MODULE, $orderEvent); $this->getDispatcher()->dispatch(TheliaEvents::ORDER_SET_POSTAGE, $orderEvent); return $this->generateRedirectFromRoute("order.invoice"); } catch (FormValidationException $e) { $message = $this->getTranslator()->trans("Please check your input: %s", ['%s' => $e->getMessage()], Front::MESSAGE_DOMAIN); } catch (PropelException $e) { $this->getParserContext()->setGeneralError($e->getMessage()); } catch (\Exception $e) { $message = $this->getTranslator()->trans("Sorry, an error occured: %s", ['%s' => $e->getMessage()], Front::MESSAGE_DOMAIN); } if ($message !== false) { Tlog::getInstance()->error(sprintf("Error during order delivery process : %s. Exception was %s", $message, $e->getMessage())); $orderDelivery->setErrorMessage($message); $this->getParserContext() ->addForm($orderDelivery) ->setGeneralError($message) ; } } /** * set invoice address * set payment module */ public function invoice() { $this->checkAuth(); $this->checkCartNotEmpty(); $this->checkValidDelivery(); $message = false; $orderPayment = new OrderPayment($this->getRequest()); try { $form = $this->validateForm($orderPayment, "post"); $invoiceAddressId = $form->get("invoice-address")->getData(); $paymentModuleId = $form->get("payment-module")->getData(); /* check that the invoice address belongs to the current customer */ $invoiceAddress = AddressQuery::create()->findPk($invoiceAddressId); if ($invoiceAddress->getCustomerId() !== $this->getSecurityContext()->getCustomerUser()->getId()) { throw new \Exception( $this->getTranslator()->trans( "Invoice address does not belong to the current customer", [], Front::MESSAGE_DOMAIN ) ); } $orderEvent = $this->getOrderEvent(); $orderEvent->setInvoiceAddress($invoiceAddressId); $orderEvent->setPaymentModule($paymentModuleId); $this->getDispatcher()->dispatch(TheliaEvents::ORDER_SET_INVOICE_ADDRESS, $orderEvent); $this->getDispatcher()->dispatch(TheliaEvents::ORDER_SET_PAYMENT_MODULE, $orderEvent); return $this->generateRedirectFromRoute("order.payment.process"); } catch (FormValidationException $e) { $message = $this->getTranslator()->trans("Please check your input: %s", ['%s' => $e->getMessage()], Front::MESSAGE_DOMAIN); } catch (PropelException $e) { $this->getParserContext()->setGeneralError($e->getMessage()); } catch (\Exception $e) { $message = $this->getTranslator()->trans("Sorry, an error occured: %s", ['%s' => $e->getMessage()], Front::MESSAGE_DOMAIN); } if ($message !== false) { Tlog::getInstance()->error(sprintf("Error during order payment process : %s. Exception was %s", $message, $e->getMessage())); $orderPayment->setErrorMessage($message); $this->getParserContext() ->addForm($orderPayment) ->setGeneralError($message) ; } } public function pay() { /* check customer */ $this->checkAuth(); /* check cart count */ $this->checkCartNotEmpty(); /* check delivery address and module */ $this->checkValidDelivery(); /* check invoice address and payment module */ $this->checkValidInvoice(); $orderEvent = $this->getOrderEvent(); $this->getDispatcher()->dispatch(TheliaEvents::ORDER_PAY, $orderEvent); $placedOrder = $orderEvent->getPlacedOrder(); if (null !== $placedOrder && null !== $placedOrder->getId()) { /* order has been placed */ if ($orderEvent->hasResponse()) { return $orderEvent->getResponse(); } else { return $this->generateRedirectFromRoute('order.placed', [], ['order_id' => $orderEvent->getPlacedOrder()->getId()]); } } else { /* order has not been placed */ return $this->generateRedirectFromRoute('cart.view'); } } public function orderPlaced($order_id) { /* check if the placed order matched the customer */ $placedOrder = OrderQuery::create()->findPk( $this->getRequest()->attributes->get('order_id') ); if (null === $placedOrder) { throw new TheliaProcessException( $this->getTranslator()->trans( "No placed order", [], Front::MESSAGE_DOMAIN ), TheliaProcessException::NO_PLACED_ORDER, $placedOrder ); } $customer = $this->getSecurityContext()->getCustomerUser(); if (null === $customer || $placedOrder->getCustomerId() !== $customer->getId()) { throw new TheliaProcessException( $this->getTranslator()->trans( "Received placed order id does not belong to the current customer", [], Front::MESSAGE_DOMAIN ), TheliaProcessException::PLACED_ORDER_ID_BAD_CURRENT_CUSTOMER, $placedOrder ); } $this->getDispatcher()->dispatch(TheliaEvents::ORDER_CART_CLEAR, $this->getOrderEvent()); $this->getParserContext()->set("placed_order_id", $placedOrder->getId()); } public function orderFailed($order_id, $message) { /* check if the placed order matched the customer */ $failedOrder = OrderQuery::create()->findPk( $this->getRequest()->attributes->get('order_id') ); if (null === $failedOrder) { throw new TheliaProcessException("No failed order", TheliaProcessException::NO_PLACED_ORDER, $failedOrder); } $customer = $this->getSecurityContext()->getCustomerUser(); if (null === $customer || $failedOrder->getCustomerId() !== $customer->getId()) { throw new TheliaProcessException( $this->getTranslator()->trans( "Received failed order id does not belong to the current customer", [], Front::MESSAGE_DOMAIN ) , TheliaProcessException::PLACED_ORDER_ID_BAD_CURRENT_CUSTOMER, $failedOrder ); } $this->getParserContext() ->set("failed_order_id", $failedOrder->getId()) ->set("failed_order_message", $message) ; } protected function getOrderEvent() { $order = $this->getOrder($this->getRequest()); return new OrderEvent($order); } public function getOrder(Request $request) { $session = $request->getSession(); if (null !== $order = $session->getOrder()) { return $order; } $order = new Order(); $session->setOrder($order); return $order; } public function generateInvoicePdf($order_id) { $this->checkOrderCustomer($order_id); return $this->generateOrderPdf($order_id, ConfigQuery::read('pdf_invoice_file', 'invoice')); } public function generateDeliveryPdf($order_id) { $this->checkOrderCustomer($order_id); return $this->generateOrderPdf($order_id, ConfigQuery::read('pdf_delivery_file', 'delivery')); } public function downloadVirtualProduct($order_product_id) { if (null !== $orderProduct = OrderProductQuery::create()->findPk($order_product_id)){ $order = $orderProduct->getOrder(); if ($order->isPaid()){ // check customer $this->checkOrderCustomer($order->getId()); if ($orderProduct->getVirtualDocument()) { // try to get the file $path = THELIA_ROOT . ConfigQuery::read('documents_library_path', 'local/media/documents') . DS . "product" . DS . $orderProduct->getVirtualDocument(); if (!is_file($path) || !is_readable($path)) { throw new \ErrorException( $this->getTranslator()->trans( "The file [%file] does not exist", [ "%file" => $order_product_id ] ) ); } $data = file_get_contents($path); $mime = MimeTypeGuesser::getInstance() ->guess($path) ; return new Response($data, 200, ["Content-Type" => $mime]); } } } throw new AccessDeniedHttpException(); } private function checkOrderCustomer($order_id) { $this->checkAuth(); $order = OrderQuery::create()->findPk($order_id); $valid = true; if ($order) { $customerOrder = $order->getCustomer(); $customer = $this->getSecurityContext()->getCustomerUser(); if ($customerOrder->getId() != $customer->getId()) { $valid = false; } } else { $valid = false; } if (false === $valid) { throw new AccessDeniedHttpException(); } } public function getDeliveryModuleListAjaxAction() { $country = $this->getRequest()->get( 'country_id', $this->container->get('thelia.taxEngine')->getDeliveryCountry()->getId() ); $this->checkXmlHttpRequest(); $args = array('country' => $country); return $this->render('ajax/order-delivery-module-list', $args); } }
1
10,681
You have to verify the return type. If it's a reponse, return it. Otherwise do nothing.
thelia-thelia
php
@@ -44,15 +44,17 @@ #include <Kokkos_Core.hpp> #include <Kokkos_Timer.hpp> -#include <bench.hpp> #include <cstdlib> +template <class T> +void run_stride_unroll(int, int, int, int, int, int, int, int); + int main(int argc, char* argv[]) { Kokkos::initialize(); if (argc < 10) { printf("Arguments: N K R D U F T S\n"); - printf(" P: Precision (1==float, 2==double)\n"); + printf(" P: Precision (1==float, 2==double, 3==int32_t, 4==int64_t)\n"); printf(" N,K: dimensions of the 2D array to allocate\n"); printf(" R: how often to loop through the K dimension with each team\n"); printf(" D: distance between loaded elements (stride)\n");
1
/* //@HEADER // ************************************************************************ // // Kokkos v. 3.0 // Copyright (2020) National Technology & Engineering // Solutions of Sandia, LLC (NTESS). // // Under the terms of Contract DE-NA0003525 with NTESS, // the U.S. Government retains certain rights in this software. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // 3. Neither the name of the Corporation nor the names of the // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY NTESS "AS IS" AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL NTESS OR THE // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Questions? Contact Christian R. Trott ([email protected]) // // ************************************************************************ //@HEADER */ #include <Kokkos_Core.hpp> #include <Kokkos_Timer.hpp> #include <bench.hpp> #include <cstdlib> int main(int argc, char* argv[]) { Kokkos::initialize(); if (argc < 10) { printf("Arguments: N K R D U F T S\n"); printf(" P: Precision (1==float, 2==double)\n"); printf(" N,K: dimensions of the 2D array to allocate\n"); printf(" R: how often to loop through the K dimension with each team\n"); printf(" D: distance between loaded elements (stride)\n"); printf(" U: how many independent flops to do per load\n"); printf( " F: how many times to repeat the U unrolled operations before " "reading next element\n"); printf(" T: team size\n"); printf( " S: shared memory per team (used to control occupancy on GPUs)\n"); printf("Example Input GPU:\n"); printf(" Bandwidth Bound : 2 100000 1024 1 1 1 1 256 6000\n"); printf(" Cache Bound : 2 100000 1024 64 1 1 1 512 20000\n"); printf(" Compute Bound : 2 100000 1024 1 1 8 64 256 6000\n"); printf(" Load Slots Used : 2 20000 256 32 16 1 1 256 6000\n"); printf(" Inefficient Load: 2 20000 256 32 2 1 1 256 20000\n"); Kokkos::finalize(); return 0; } int P = std::stoi(argv[1]); int N = std::stoi(argv[2]); int K = std::stoi(argv[3]); int R = std::stoi(argv[4]); int D = std::stoi(argv[5]); int U = std::stoi(argv[6]); int F = std::stoi(argv[7]); int T = std::stoi(argv[8]); int S = std::stoi(argv[9]); if (U > 8) { printf("U must be 1-8\n"); return 0; } if ((D != 1) && (D != 2) && (D != 4) && (D != 8) && (D != 16) && (D != 32)) { printf("D must be one of 1,2,4,8,16,32\n"); return 0; } if ((P != 1) && (P != 2)) { printf("P must be one of 1,2\n"); return 0; } if (P == 1) { run_stride_unroll<float>(N, K, R, D, U, F, T, S); } if (P == 2) { run_stride_unroll<double>(N, K, R, D, U, F, T, S); } Kokkos::finalize(); }
1
32,605
No. Keep the header include and do `extern template ...` to skip the instantiation from that compile unit. (I pushed a fix directly to your branch)
kokkos-kokkos
cpp
@@ -203,6 +203,8 @@ class TestPython3Checker(testutils.CheckerTestCase): '[x for x in {}]', 'func({})', 'a, b = {}', + 'max({}())', + 'min({}())', ] non_iterating_code = [ 'x = __({}())',
1
# -*- coding: utf-8 -*- # Copyright (c) 2014-2017 Claudiu Popa <[email protected]> # Copyright (c) 2014-2015 Brett Cannon <[email protected]> # Copyright (c) 2015 Ionel Cristian Maries <[email protected]> # Copyright (c) 2015 Cosmin Poieana <[email protected]> # Copyright (c) 2015 Viorel Stirbu <[email protected]> # Copyright (c) 2016-2017 Roy Williams <[email protected]> # Copyright (c) 2016 Roy Williams <[email protected]> # Copyright (c) 2016 Derek Gustafson <[email protected]> # Copyright (c) 2017 Daniel Miller <[email protected]> # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/master/COPYING """Tests for the python3 checkers.""" from __future__ import absolute_import import sys import textwrap import pytest import astroid from pylint import testutils from pylint.checkers import python3 as checker from pylint.interfaces import INFERENCE_FAILURE, INFERENCE # Decorator for any tests that will fail under Python 3 python2_only = pytest.mark.skipif(sys.version_info[0] > 2, reason='Python 2 only') # TODO(cpopa): Port these to the functional test framework instead. class TestPython3Checker(testutils.CheckerTestCase): CHECKER_CLASS = checker.Python3Checker def check_bad_builtin(self, builtin_name): node = astroid.extract_node(builtin_name + ' #@') message = builtin_name.lower() + '-builtin' with self.assertAddsMessages(testutils.Message(message, node=node)): self.checker.visit_name(node) @python2_only def test_bad_builtins(self): builtins = [ 'apply', 'buffer', 'cmp', 'coerce', 'execfile', 'file', 'input', 'intern', 'long', 'raw_input', 'round', 'reduce', 'StandardError', 'unichr', 'unicode', 'xrange', 'reload', ] for builtin in builtins: self.check_bad_builtin(builtin) def as_iterable_in_for_loop_test(self, fxn): code = "for x in {}(): pass".format(fxn) module = astroid.parse(code) with self.assertNoMessages(): self.walk(module) def as_used_by_iterable_in_for_loop_test(self, fxn): checker = '{}-builtin-not-iterating'.format(fxn) node = astroid.extract_node(""" for x in (whatever( {}() #@ )): pass """.format(fxn)) message = testutils.Message(checker, node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def as_iterable_in_genexp_test(self, fxn): code = "x = (x for x in {}())".format(fxn) module = astroid.parse(code) with self.assertNoMessages(): self.walk(module) def as_iterable_in_listcomp_test(self, fxn): code = "x = [x for x in {}(None, [1])]".format(fxn) module = astroid.parse(code) with self.assertNoMessages(): self.walk(module) def as_used_in_variant_in_genexp_test(self, fxn): checker = '{}-builtin-not-iterating'.format(fxn) node = astroid.extract_node(""" list( __({}(x)) for x in [1] ) """.format(fxn)) message = testutils.Message(checker, node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def as_used_in_variant_in_listcomp_test(self, fxn): checker = '{}-builtin-not-iterating'.format(fxn) node = astroid.extract_node(""" [ __({}(None, x)) for x in [[1]]] """.format(fxn)) message = testutils.Message(checker, node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def as_argument_to_callable_constructor_test(self, fxn, callable_fn): module = astroid.parse("x = {}({}())".format(callable_fn, fxn)) with self.assertNoMessages(): self.walk(module) def as_argument_to_materialized_filter(self, callable_fn): module = astroid.parse("list(filter(None, {}()))".format(callable_fn)) with self.assertNoMessages(): self.walk(module) def as_argument_to_random_fxn_test(self, fxn): checker = '{}-builtin-not-iterating'.format(fxn) node = astroid.extract_node(""" y( {}() #@ ) """.format(fxn)) message = testutils.Message(checker, node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def as_argument_to_str_join_test(self, fxn): code = "x = ''.join({}())".format(fxn) module = astroid.parse(code) with self.assertNoMessages(): self.walk(module) def as_iterable_in_unpacking(self, fxn): node = astroid.extract_node(""" a, b = __({}()) """.format(fxn)) with self.assertNoMessages(): self.checker.visit_call(node) def as_assignment(self, fxn): checker = '{}-builtin-not-iterating'.format(fxn) node = astroid.extract_node(""" a = __({}()) """.format(fxn)) message = testutils.Message(checker, node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def iterating_context_tests(self, fxn): """Helper for verifying a function isn't used as an iterator.""" self.as_iterable_in_for_loop_test(fxn) self.as_used_by_iterable_in_for_loop_test(fxn) self.as_iterable_in_genexp_test(fxn) self.as_iterable_in_listcomp_test(fxn) self.as_used_in_variant_in_genexp_test(fxn) self.as_used_in_variant_in_listcomp_test(fxn) self.as_argument_to_random_fxn_test(fxn) self.as_argument_to_str_join_test(fxn) self.as_iterable_in_unpacking(fxn) self.as_assignment(fxn) self.as_argument_to_materialized_filter(fxn) for func in ('iter', 'list', 'tuple', 'sorted', 'set', 'sum', 'any', 'all', 'enumerate', 'dict'): self.as_argument_to_callable_constructor_test(fxn, func) def test_dict_subclasses_methods_in_iterating_context(self): iterating, not_iterating = astroid.extract_node(''' from __future__ import absolute_import from collections import defaultdict d = defaultdict(list) a, b = d.keys() #@ x = d.keys() #@ ''') with self.assertNoMessages(): self.checker.visit_call(iterating.value) message = testutils.Message('dict-keys-not-iterating', node=not_iterating.value) with self.assertAddsMessages(message): self.checker.visit_call(not_iterating.value) def test_dict_methods_in_iterating_context(self): iterating_code = [ 'for x in {}: pass', '(x for x in {})', '[x for x in {}]', 'func({})', 'a, b = {}', ] non_iterating_code = [ 'x = __({}())', '__({}())[0]', ] for method in ('keys', 'items', 'values'): dict_method = '{{}}.{}'.format(method) for code in iterating_code: with_value = code.format(dict_method) module = astroid.parse(with_value) with self.assertNoMessages(): self.walk(module) for code in non_iterating_code: with_value = code.format(dict_method) node = astroid.extract_node(with_value) checker = 'dict-{}-not-iterating'.format(method) message = testutils.Message(checker, node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_map_in_iterating_context(self): self.iterating_context_tests('map') def test_zip_in_iterating_context(self): self.iterating_context_tests('zip') def test_range_in_iterating_context(self): self.iterating_context_tests('range') def test_filter_in_iterating_context(self): self.iterating_context_tests('filter') def defined_method_test(self, method, warning): """Helper for verifying that a certain method is not defined.""" node = astroid.extract_node(""" class Foo(object): def __{}__(self, other): #@ pass""".format(method)) message = testutils.Message(warning, node=node) with self.assertAddsMessages(message): self.checker.visit_functiondef(node) def test_delslice_method(self): self.defined_method_test('delslice', 'delslice-method') def test_getslice_method(self): self.defined_method_test('getslice', 'getslice-method') def test_setslice_method(self): self.defined_method_test('setslice', 'setslice-method') def test_coerce_method(self): self.defined_method_test('coerce', 'coerce-method') def test_oct_method(self): self.defined_method_test('oct', 'oct-method') def test_hex_method(self): self.defined_method_test('hex', 'hex-method') def test_nonzero_method(self): self.defined_method_test('nonzero', 'nonzero-method') def test_cmp_method(self): self.defined_method_test('cmp', 'cmp-method') def test_div_method(self): self.defined_method_test('div', 'div-method') def test_idiv_method(self): self.defined_method_test('idiv', 'idiv-method') def test_rdiv_method(self): self.defined_method_test('rdiv', 'rdiv-method') def test_eq_and_hash_method(self): """Helper for verifying that a certain method is not defined.""" node = astroid.extract_node(""" class Foo(object): #@ def __eq__(self, other): pass def __hash__(self): pass""") with self.assertNoMessages(): self.checker.visit_classdef(node) def test_eq_and_hash_is_none(self): """Helper for verifying that a certain method is not defined.""" node = astroid.extract_node(""" class Foo(object): #@ def __eq__(self, other): pass __hash__ = None""") with self.assertNoMessages(): self.checker.visit_classdef(node) def test_eq_without_hash_method(self): """Helper for verifying that a certain method is not defined.""" node = astroid.extract_node(""" class Foo(object): #@ def __eq__(self, other): pass""") message = testutils.Message('eq-without-hash', node=node) with self.assertAddsMessages(message): self.checker.visit_classdef(node) @python2_only def test_print_statement(self): node = astroid.extract_node('print "Hello, World!" #@') message = testutils.Message('print-statement', node=node) with self.assertAddsMessages(message): self.checker.visit_print(node) @python2_only def test_backtick(self): node = astroid.extract_node('`test`') message = testutils.Message('backtick', node=node) with self.assertAddsMessages(message): self.checker.visit_repr(node) def test_relative_import(self): node = astroid.extract_node('import string #@') message = testutils.Message('no-absolute-import', node=node) with self.assertAddsMessages(message): self.checker.visit_import(node) with self.assertNoMessages(): # message should only be added once self.checker.visit_import(node) def test_relative_from_import(self): node = astroid.extract_node('from os import path #@') message = testutils.Message('no-absolute-import', node=node) with self.assertAddsMessages(message): self.checker.visit_importfrom(node) with self.assertNoMessages(): # message should only be added once self.checker.visit_importfrom(node) def test_absolute_import(self): module_import = astroid.parse( 'from __future__ import absolute_import; import os') module_from = astroid.parse( 'from __future__ import absolute_import; from os import path') with self.assertNoMessages(): for module in (module_import, module_from): self.walk(module) def test_import_star_module_level(self): node = astroid.extract_node(''' def test(): from lala import * #@ ''') absolute = testutils.Message('no-absolute-import', node=node) star = testutils.Message('import-star-module-level', node=node) with self.assertAddsMessages(absolute, star): self.checker.visit_importfrom(node) def test_division(self): node = astroid.extract_node('3 / 2 #@') message = testutils.Message('old-division', node=node) with self.assertAddsMessages(message): self.checker.visit_binop(node) def test_division_with_future_statement(self): module = astroid.parse('from __future__ import division; 3 / 2') with self.assertNoMessages(): self.walk(module) def test_floor_division(self): node = astroid.extract_node(' 3 // 2 #@') with self.assertNoMessages(): self.checker.visit_binop(node) def test_division_by_float(self): left_node = astroid.extract_node('3.0 / 2 #@') right_node = astroid.extract_node(' 3 / 2.0 #@') with self.assertNoMessages(): for node in (left_node, right_node): self.checker.visit_binop(node) def test_dict_iter_method(self): for meth in ('keys', 'values', 'items'): node = astroid.extract_node('x.iter%s() #@' % meth) message = testutils.Message('dict-iter-method', node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_dict_iter_method_on_dict(self): nodes = astroid.extract_node(''' from collections import defaultdict {}.iterkeys() #@ defaultdict(list).iterkeys() #@ class Someclass(dict): pass Someclass().iterkeys() #@ ''') for node in nodes: message = testutils.Message('dict-iter-method', node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_dict_not_iter_method(self): arg_node = astroid.extract_node('x.iterkeys(x) #@') stararg_node = astroid.extract_node('x.iterkeys(*x) #@') kwarg_node = astroid.extract_node('x.iterkeys(y=x) #@') non_dict_node = astroid.extract_node('x=[]\nx.iterkeys() #@') with self.assertNoMessages(): for node in (arg_node, stararg_node, kwarg_node, non_dict_node): self.checker.visit_call(node) def test_dict_view_method(self): for meth in ('keys', 'values', 'items'): node = astroid.extract_node('x.view%s() #@' % meth) message = testutils.Message('dict-view-method', node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_dict_view_method_on_dict(self): nodes = astroid.extract_node(''' from collections import defaultdict {}.viewkeys() #@ defaultdict(list).viewkeys() #@ class Someclass(dict): pass Someclass().viewkeys() #@ ''') for node in nodes: message = testutils.Message('dict-view-method', node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_dict_not_view_method(self): arg_node = astroid.extract_node('x.viewkeys(x) #@') stararg_node = astroid.extract_node('x.viewkeys(*x) #@') kwarg_node = astroid.extract_node('x.viewkeys(y=x) #@') non_dict_node = astroid.extract_node('x=[]\nx.viewkeys() #@') with self.assertNoMessages(): for node in (arg_node, stararg_node, kwarg_node, non_dict_node): self.checker.visit_call(node) def test_next_method(self): node = astroid.extract_node('x.next() #@') message = testutils.Message('next-method-called', node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_not_next_method(self): arg_node = astroid.extract_node('x.next(x) #@') stararg_node = astroid.extract_node('x.next(*x) #@') kwarg_node = astroid.extract_node('x.next(y=x) #@') with self.assertNoMessages(): for node in (arg_node, stararg_node, kwarg_node): self.checker.visit_call(node) def test_metaclass_assignment(self): node = astroid.extract_node(""" class Foo(object): #@ __metaclass__ = type""") message = testutils.Message('metaclass-assignment', node=node) with self.assertAddsMessages(message): self.checker.visit_classdef(node) def test_metaclass_global_assignment(self): module = astroid.parse('__metaclass__ = type') with self.assertNoMessages(): self.walk(module) @python2_only def test_parameter_unpacking(self): node = astroid.extract_node('def func((a, b)):#@\n pass') arg = node.args.args[0] with self.assertAddsMessages(testutils.Message('parameter-unpacking', node=arg)): self.checker.visit_arguments(node.args) @python2_only def test_old_raise_syntax(self): node = astroid.extract_node('raise Exception, "test"') message = testutils.Message('old-raise-syntax', node=node) with self.assertAddsMessages(message): self.checker.visit_raise(node) def test_xreadlines_attribute(self): node = astroid.extract_node(""" f.xreadlines #@ """) message = testutils.Message('xreadlines-attribute', node=node) with self.assertAddsMessages(message): self.checker.visit_attribute(node) def test_exception_message_attribute(self): node = astroid.extract_node(""" try: raise Exception("test") except Exception as e: e.message #@ """) message = testutils.Message('exception-message-attribute', node=node) with self.assertAddsMessages(message): self.checker.visit_attribute(node) def test_normal_message_attribute(self): node = astroid.extract_node(""" e.message #@ """) with self.assertNoMessages(): self.checker.visit_attribute(node) def test_invalid_codec(self): node = astroid.extract_node('foobar.encode("hex") #@') message = testutils.Message('invalid-str-codec', node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_valid_codec(self): node = astroid.extract_node('foobar.encode("ascii", "ignore") #@') with self.assertNoMessages(): self.checker.visit_call(node) def test_visit_call_with_kwarg(self): node = astroid.extract_node('foobar.raz(encoding="hex") #@') with self.assertNoMessages(): self.checker.visit_call(node) def test_invalid_open_codec(self): node = astroid.extract_node('open(foobar, encoding="hex") #@') message = testutils.Message('invalid-str-codec', node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_valid_open_codec(self): node = astroid.extract_node('open(foobar, encoding="palmos") #@') with self.assertNoMessages(): self.checker.visit_call(node) @python2_only def test_raising_string(self): node = astroid.extract_node('raise "Test"') message = testutils.Message('raising-string', node=node) with self.assertAddsMessages(message): self.checker.visit_raise(node) @python2_only def test_checker_disabled_by_default(self): node = astroid.parse(textwrap.dedent(""" abc = 1l raise Exception, "test" raise "test" `abc` """)) with self.assertNoMessages(): self.walk(node) def test_using_cmp_argument(self): nodes = astroid.extract_node(""" [].sort(cmp=lambda x: x) #@ a = list(range(x)) a.sort(cmp=lambda x: x) #@ sorted([], cmp=lambda x: x) #@ """) for node in nodes: message = testutils.Message('using-cmp-argument', node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_sys_maxint(self): node = astroid.extract_node(''' import sys sys.maxint #@ ''') message = testutils.Message('sys-max-int', node=node) with self.assertAddsMessages(message): self.checker.visit_attribute(node) def test_itertools_izip(self): node = astroid.extract_node(''' from itertools import izip #@ ''') absolute_import_message = testutils.Message('no-absolute-import', node=node) message = testutils.Message('deprecated-itertools-function', node=node) with self.assertAddsMessages(absolute_import_message, message): self.checker.visit_importfrom(node) def test_deprecated_types_fields(self): node = astroid.extract_node(''' from types import StringType #@ ''') absolute_import_message = testutils.Message('no-absolute-import', node=node) message = testutils.Message('deprecated-types-field', node=node) with self.assertAddsMessages(absolute_import_message, message): self.checker.visit_importfrom(node) def test_sys_maxint_imort_from(self): node = astroid.extract_node(''' from sys import maxint #@ ''') absolute_import_message = testutils.Message('no-absolute-import', node=node) message = testutils.Message('sys-max-int', node=node) with self.assertAddsMessages(absolute_import_message, message): self.checker.visit_importfrom(node) def test_object_maxint(self): node = astroid.extract_node(''' sys = object() sys.maxint #@ ''') with self.assertNoMessages(): self.checker.visit_attribute(node) def test_bad_import(self): node = astroid.extract_node(''' import urllib2, sys #@ ''') absolute_import_message = testutils.Message('no-absolute-import', node=node) message = testutils.Message('bad-python3-import', node=node) with self.assertAddsMessages(absolute_import_message, message): self.checker.visit_import(node) @python2_only def test_bad_import_not_on_relative(self): samples = [ 'from .commands import titi', 'from . import commands', ] for code in samples: node = astroid.extract_node(code) absolute_import_message = testutils.Message('no-absolute-import', node=node) with self.assertAddsMessages(absolute_import_message): self.checker.visit_importfrom(node) self.checker._future_absolute_import = False def test_bad_import_conditional(self): node = astroid.extract_node(''' import six if six.PY2: import urllib2 #@ ''') absolute_import_message = testutils.Message('no-absolute-import', node=node) with self.assertAddsMessages(absolute_import_message): self.checker.visit_import(node) def test_bad_import_try_except_handler(self): node = astroid.extract_node(''' try: from hashlib import sha except: import sha #@ ''') absolute_import_message = testutils.Message('no-absolute-import', node=node) with self.assertAddsMessages(absolute_import_message): self.checker.visit_import(node) def test_bad_import_try(self): node = astroid.extract_node(''' try: import md5 #@ except: from hashlib import md5 finally: pass ''') absolute_import_message = testutils.Message('no-absolute-import', node=node) with self.assertAddsMessages(absolute_import_message): self.checker.visit_import(node) def test_bad_import_try_finally(self): node = astroid.extract_node(''' try: import Queue #@ finally: import queue ''') absolute_import_message = testutils.Message('no-absolute-import', node=node) message = testutils.Message('bad-python3-import', node=node) with self.assertAddsMessages(absolute_import_message, message): self.checker.visit_import(node) def test_bad_import_from(self): node = astroid.extract_node(''' from cStringIO import StringIO #@ ''') absolute_import_message = testutils.Message('no-absolute-import', node=node) message = testutils.Message('bad-python3-import', node=node) with self.assertAddsMessages(absolute_import_message, message): self.checker.visit_importfrom(node) def test_bad_string_attribute(self): node = astroid.extract_node(''' import string string.maketrans #@ ''') message = testutils.Message('deprecated-string-function', node=node) with self.assertAddsMessages(message): self.checker.visit_attribute(node) def test_bad_operator_attribute(self): node = astroid.extract_node(''' import operator operator.div #@ ''') message = testutils.Message('deprecated-operator-function', node=node) with self.assertAddsMessages(message): self.checker.visit_attribute(node) def test_comprehension_escape(self): list_comp, set_comp, dict_comp = astroid.extract_node(''' [i for i in range(10)] i #@ {c for c in range(10)} c #@ {j:j for j in range(10)} j #@ ''') message = testutils.Message('comprehension-escape', node=list_comp) with self.assertAddsMessages(message): self.checker.visit_name(list_comp) for node in (set_comp, dict_comp): with self.assertNoMessages(): self.checker.visit_name(node) def test_comprehension_escape_newly_introduced(self): node = astroid.extract_node(''' [i for i in range(3)] for i in range(3): i ''') with self.assertNoMessages(): self.walk(node) def test_exception_escape(self): bad, good = astroid.extract_node(''' try: 1/0 except ValueError as exc: pass exc #@ try: 2/0 except (ValueError, TypeError) as exc: exc = 2 exc #@ ''') message = testutils.Message('exception-escape', node=bad) with self.assertAddsMessages(message): self.checker.visit_name(bad) with self.assertNoMessages(): self.checker.visit_name(good) def test_bad_sys_attribute(self): node = astroid.extract_node(''' import sys sys.exc_clear #@ ''') message = testutils.Message('deprecated-sys-function', node=node) with self.assertAddsMessages(message): self.checker.visit_attribute(node) def test_bad_urllib_attribute(self): nodes = astroid.extract_node(''' import urllib urllib.addbase #@ urllib.splithost #@ urllib.urlretrieve #@ urllib.urlopen #@ urllib.urlencode #@ ''') for node in nodes: message = testutils.Message('deprecated-urllib-function', node=node) with self.assertAddsMessages(message): self.checker.visit_attribute(node) def test_ok_string_attribute(self): node = astroid.extract_node(''' import string string.ascii_letters #@ ''') with self.assertNoMessages(): self.checker.visit_attribute(node) def test_bad_string_call(self): node = astroid.extract_node(''' import string string.upper("hello world") #@ ''') message = testutils.Message('deprecated-string-function', node=node) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_ok_shadowed_call(self): node = astroid.extract_node(''' import six.moves.configparser six.moves.configparser.ConfigParser() #@ ''') with self.assertNoMessages(): self.checker.visit_call(node) def test_ok_string_call(self): node = astroid.extract_node(''' import string string.Foramtter() #@ ''') with self.assertNoMessages(): self.checker.visit_call(node) def test_bad_string_import_from(self): node = astroid.extract_node(''' from string import atoi #@ ''') absolute_import_message = testutils.Message('no-absolute-import', node=node) message = testutils.Message('deprecated-string-function', node=node) with self.assertAddsMessages(absolute_import_message, message): self.checker.visit_importfrom(node) def test_ok_string_import_from(self): node = astroid.extract_node(''' from string import digits #@ ''') absolute_import_message = testutils.Message('no-absolute-import', node=node) with self.assertAddsMessages(absolute_import_message): self.checker.visit_importfrom(node) def test_bad_str_translate_call_string_literal(self): node = astroid.extract_node(''' foobar.translate(None, 'abc123') #@ ''') message = testutils.Message('deprecated-str-translate-call', node=node, confidence=INFERENCE_FAILURE) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_bad_str_translate_call_variable(self): node = astroid.extract_node(''' def raz(foobar): foobar.translate(None, 'hello') #@ ''') message = testutils.Message('deprecated-str-translate-call', node=node, confidence=INFERENCE_FAILURE) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_bad_str_translate_call_infer_str(self): node = astroid.extract_node(''' foobar = "hello world" foobar.translate(None, foobar) #@ ''') message = testutils.Message('deprecated-str-translate-call', node=node, confidence=INFERENCE) with self.assertAddsMessages(message): self.checker.visit_call(node) def test_ok_str_translate_call_integer(self): node = astroid.extract_node(''' foobar.translate(None, 33) #@ ''') with self.assertNoMessages(): self.checker.visit_call(node) def test_ok_str_translate_call_keyword(self): node = astroid.extract_node(''' foobar.translate(None, 'foobar', raz=33) #@ ''') with self.assertNoMessages(): self.checker.visit_call(node) def test_ok_str_translate_call_not_str(self): node = astroid.extract_node(''' foobar = {} foobar.translate(None, 'foobar') #@ ''') with self.assertNoMessages(): self.checker.visit_call(node) def test_non_py2_conditional(self): code = ''' from __future__ import absolute_import import sys x = {} if sys.maxsize: x.iterkeys() #@ ''' node = astroid.extract_node(code) module = node.parent.parent message = testutils.Message('dict-iter-method', node=node) with self.assertAddsMessages(message): self.walk(module) def test_six_conditional(self): code = ''' from __future__ import absolute_import import six x = {} if six.PY2: x.iterkeys() ''' module = astroid.parse(code) with self.assertNoMessages(): self.walk(module) def test_versioninfo_conditional(self): code = ''' from __future__ import absolute_import import sys x = {} if sys.version_info[0] == 2: x.iterkeys() ''' module = astroid.parse(code) with self.assertNoMessages(): self.walk(module) def test_versioninfo_tuple_conditional(self): code = ''' from __future__ import absolute_import import sys x = {} if sys.version_info == (2, 7): x.iterkeys() ''' module = astroid.parse(code) with self.assertNoMessages(): self.walk(module) def test_six_ifexp_conditional(self): code = ''' from __future__ import absolute_import import six import string string.translate if six.PY2 else None ''' module = astroid.parse(code) with self.assertNoMessages(): self.walk(module) def test_next_defined(self): node = astroid.extract_node(""" class Foo(object): def next(self): #@ pass""") message = testutils.Message('next-method-defined', node=node) with self.assertAddsMessages(message): self.checker.visit_functiondef(node) def test_next_defined_too_many_args(self): node = astroid.extract_node(""" class Foo(object): def next(self, foo=None): #@ pass""") with self.assertNoMessages(): self.checker.visit_functiondef(node) def test_next_defined_static_method_too_many_args(self): node = astroid.extract_node(""" class Foo(object): @staticmethod def next(self): #@ pass""") with self.assertNoMessages(): self.checker.visit_functiondef(node) def test_next_defined_static_method(self): node = astroid.extract_node(""" class Foo(object): @staticmethod def next(): #@ pass""") message = testutils.Message('next-method-defined', node=node) with self.assertAddsMessages(message): self.checker.visit_functiondef(node) def test_next_defined_class_method(self): node = astroid.extract_node(""" class Foo(object): @classmethod def next(cls): #@ pass""") message = testutils.Message('next-method-defined', node=node) with self.assertAddsMessages(message): self.checker.visit_functiondef(node) @python2_only class TestPython3TokenChecker(testutils.CheckerTestCase): CHECKER_CLASS = checker.Python3TokenChecker def _test_token_message(self, code, symbolic_message): tokens = testutils._tokenize_str(code) message = testutils.Message(symbolic_message, line=1) with self.assertAddsMessages(message): self.checker.process_tokens(tokens) def test_long_suffix(self): for code in ("1l", "1L"): self._test_token_message(code, 'long-suffix') def test_old_ne_operator(self): self._test_token_message("1 <> 2", "old-ne-operator") def test_old_octal_literal(self): for octal in ("045", "055", "075", "077", "076543"): self._test_token_message(octal, "old-octal-literal") # Make sure we are catching only octals. for non_octal in ("45", "00", "085", "08", "1"): tokens = testutils._tokenize_str(non_octal) with self.assertNoMessages(): self.checker.process_tokens(tokens) def test_non_ascii_bytes_literal(self): code = 'b"测试"' self._test_token_message(code, 'non-ascii-bytes-literal') for code in ("测试", "测试", 'abcdef', b'\x80'): tokens = testutils._tokenize_str(code) with self.assertNoMessages(): self.checker.process_tokens(tokens)
1
10,302
these tests are currently somewhat nonsensical. This code expands to `a, b = {}.keys` when it really should be expanding to `a, b = {}.keys()` -- though fixing this causes the test to fail so I suspect something worse is going on here that I don't quite understand?
PyCQA-pylint
py
@@ -90,7 +90,7 @@ public class TestAvroNameMapping extends TestAvroReadProjection { projected = writeAndRead(writeSchema, readSchema, record, nameMapping); Record projectedL1 = ((Map<String, Record>) projected.get("location")).get("l1"); Assert.assertNotNull("Field missing from table mapping is renamed", projectedL1.getSchema().getField("long_r2")); - Assert.assertNull("location.value.long, should not be read", projectedL1.get("long_r2")); + Assert.assertNull("location.value.long, should not be read", projectedL1.get("long")); } @Test
1
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iceberg.avro; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import java.io.File; import java.io.IOException; import java.util.List; import java.util.Map; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.io.DatumWriter; import org.apache.iceberg.AssertHelpers; import org.apache.iceberg.Files; import org.apache.iceberg.Schema; import org.apache.iceberg.mapping.MappedField; import org.apache.iceberg.mapping.MappedFields; import org.apache.iceberg.mapping.MappingUtil; import org.apache.iceberg.mapping.NameMapping; import org.apache.iceberg.types.Comparators; import org.apache.iceberg.types.Types; import org.junit.Assert; import org.junit.Test; import static org.apache.avro.generic.GenericData.Record; public class TestAvroNameMapping extends TestAvroReadProjection { @Test public void testMapProjections() throws IOException { Schema writeSchema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(5, "location", Types.MapType.ofOptional(6, 7, Types.StringType.get(), Types.StructType.of( Types.NestedField.required(1, "lat", Types.FloatType.get()), Types.NestedField.optional(2, "long", Types.FloatType.get()) ) ))); Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table")); record.put("id", 34L); Record location = new Record(AvroSchemaUtil.fromOption( AvroSchemaUtil.fromOption(record.getSchema().getField("location").schema()) .getValueType())); location.put("lat", 52.995143f); location.put("long", -1.539054f); record.put("location", ImmutableMap.of("l1", location)); // Table mapping does not project `location` map NameMapping nameMapping = MappingUtil.create(new Schema( Types.NestedField.required(0, "id", Types.LongType.get()))); Schema readSchema = writeSchema; Record projected = writeAndRead(writeSchema, readSchema, record, nameMapping); // field id 5 comes from read schema Assert.assertNotNull("Field missing from table mapping is renamed", projected.getSchema().getField("location_r5")); Assert.assertNull("location field should not be read", projected.get("location_r5")); Assert.assertEquals(34L, projected.get("id")); // Table mapping partially project `location` map value nameMapping = MappingUtil.create(new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(5, "location", Types.MapType.ofOptional(6, 7, Types.StringType.get(), Types.StructType.of( Types.NestedField.required(1, "lat", Types.FloatType.get())))))); projected = writeAndRead(writeSchema, readSchema, record, nameMapping); Record projectedL1 = ((Map<String, Record>) projected.get("location")).get("l1"); Assert.assertNotNull("Field missing from table mapping is renamed", projectedL1.getSchema().getField("long_r2")); Assert.assertNull("location.value.long, should not be read", projectedL1.get("long_r2")); } @Test public void testComplexMapKeys() throws IOException { Schema writeSchema = new Schema( Types.NestedField.required(5, "location", Types.MapType.ofRequired(6, 7, Types.StructType.of( Types.NestedField.required(3, "k1", Types.StringType.get()), Types.NestedField.required(4, "k2", Types.StringType.get()) ), Types.StructType.of( Types.NestedField.required(1, "lat", Types.FloatType.get()), Types.NestedField.optional(2, "long", Types.FloatType.get()) ) ))); Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table")); org.apache.avro.Schema locationSchema = record.getSchema().getField("location").schema(); Record locationElement = new Record(locationSchema.getElementType()); Record locationKey = new Record(locationElement.getSchema().getField("key").schema()); Record locationValue = new Record(locationElement.getSchema().getField("value").schema()); locationKey.put("k1", "k1"); locationKey.put("k2", "k2"); locationValue.put("lat", 52.995143f); locationValue.put("long", -1.539054f); locationElement.put("key", locationKey); locationElement.put("value", locationValue); record.put("location", ImmutableList.of(locationElement)); // project a subset of the map's value columns in NameMapping NameMapping nameMapping = MappingUtil.create(new Schema( Types.NestedField.required(5, "location", Types.MapType.ofOptional(6, 7, Types.StructType.of( Types.NestedField.required(3, "k1", Types.StringType.get()), Types.NestedField.optional(4, "k2", Types.StringType.get()) ), Types.StructType.of( Types.NestedField.required(1, "lat", Types.FloatType.get()) ) )))); Schema readSchema = new Schema( Types.NestedField.required(5, "location", Types.MapType.ofOptional(6, 7, Types.StructType.of( Types.NestedField.required(3, "k1", Types.StringType.get()), Types.NestedField.optional(4, "k2", Types.StringType.get()) ), Types.StructType.of( Types.NestedField.required(1, "lat", Types.FloatType.get()), Types.NestedField.optional(2, "long", Types.FloatType.get()) ) ))); Record projected = writeAndRead(writeSchema, readSchema, record, nameMapping); // The data is read back as a map Map<Record, Record> projectedLocation = (Map<Record, Record>) projected.get("location"); Record projectedKey = projectedLocation.keySet().iterator().next(); Record projectedValue = projectedLocation.values().iterator().next(); Assert.assertEquals(0, Comparators.charSequences().compare("k1", (CharSequence) projectedKey.get("k1"))); Assert.assertEquals(0, Comparators.charSequences().compare("k2", (CharSequence) projectedKey.get("k2"))); Assert.assertEquals(52.995143f, projectedValue.get("lat")); Assert.assertNotNull(projectedValue.getSchema().getField("long_r2")); Assert.assertNull(projectedValue.get("long_r2")); } @Test public void testMissingRequiredFields() { Schema writeSchema = new Schema( Types.NestedField.required(19, "x", Types.IntegerType.get()), Types.NestedField.optional(18, "y", Types.IntegerType.get())); Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table")); record.put("x", 1); record.put("y", 2); // table mapping not projecting a required field 'x' NameMapping nameMapping = MappingUtil.create(new Schema( Types.NestedField.optional(18, "y", Types.IntegerType.get()))); Schema readSchema = writeSchema; AssertHelpers.assertThrows("Missing required field in nameMapping", IllegalArgumentException.class, "Missing required field: x", // In this case, pruneColumns result is an empty record () -> writeAndRead(writeSchema, readSchema, record, nameMapping)); } @Test public void testArrayProjections() throws Exception { Schema writeSchema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(22, "point", Types.ListType.ofOptional(21, Types.StructType.of( Types.NestedField.required(19, "x", Types.IntegerType.get()), Types.NestedField.optional(18, "y", Types.IntegerType.get()) )) ) ); Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table")); record.put("id", 34L); Record pointRecord = new Record(AvroSchemaUtil.fromOption( AvroSchemaUtil.fromOption(record.getSchema().getField("point").schema()).getElementType())); pointRecord.put("x", 1); pointRecord.put("y", 2); record.put("point", ImmutableList.of(pointRecord)); NameMapping nameMapping = MappingUtil.create(new Schema( // Optional array field missing. Types.NestedField.required(0, "id", Types.LongType.get()))); Schema readSchema = writeSchema; Record projected = writeAndRead(writeSchema, readSchema, record, nameMapping); Assert.assertNotNull("Field missing from table mapping is renamed", projected.getSchema().getField("point_r22")); Assert.assertNull("point field is not projected", projected.get("point_r22")); Assert.assertEquals(34L, projected.get("id")); // point array is partially projected nameMapping = MappingUtil.create(new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(22, "point", Types.ListType.ofOptional(21, Types.StructType.of( Types.NestedField.required(19, "x", Types.IntegerType.get()))) ) )); projected = writeAndRead(writeSchema, readSchema, record, nameMapping); Record point = ((List<Record>) projected.get("point")).get(0); Assert.assertNotNull("Field missing from table mapping is renamed", point.getSchema().getField("y_r18")); Assert.assertEquals("point.x is projected", 1, point.get("x")); Assert.assertNull("point.y is not projected", point.get("y_r18")); Assert.assertEquals(34L, projected.get("id")); } @Test public void testAliases() throws IOException { Schema writeSchema = new Schema( Types.NestedField.optional(22, "points", Types.ListType.ofOptional(21, Types.StructType.of( Types.NestedField.required(19, "x", Types.IntegerType.get()))))); Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table")); Record pointRecord = new Record(AvroSchemaUtil.fromOption( AvroSchemaUtil.fromOption(record.getSchema().getField("points").schema()).getElementType())); pointRecord.put("x", 1); record.put("points", ImmutableList.of(pointRecord)); NameMapping nameMapping = NameMapping.of( MappedFields.of( MappedField.of(22, "points", MappedFields.of( MappedField.of(21, "element", MappedFields.of( MappedField.of(19, Lists.newArrayList("x")))))))); Schema readSchema = new Schema( Types.NestedField.optional(22, "points", Types.ListType.ofOptional(21, Types.StructType.of( // x renamed to y Types.NestedField.required(19, "y", Types.IntegerType.get()))))); Record projected = writeAndRead(writeSchema, readSchema, record, nameMapping); Assert.assertEquals("x is read as y", 1, ((List<Record>) projected.get("points")).get(0).get("y")); readSchema = new Schema( Types.NestedField.optional(22, "points", Types.ListType.ofOptional(21, Types.StructType.of( // x renamed to z Types.NestedField.required(19, "z", Types.IntegerType.get()))))); projected = writeAndRead(writeSchema, readSchema, record, nameMapping); Assert.assertEquals("x is read as z", 1, ((List<Record>) projected.get("points")).get(0).get("z")); } @Test public void testInferredMapping() throws IOException { Schema writeSchema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(1, "data", Types.StringType.get())); Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table")); record.put("id", 34L); record.put("data", "data"); Schema readSchema = writeSchema; // Pass null for nameMapping so that it is automatically inferred from read schema Record projected = writeAndRead(writeSchema, readSchema, record, null); Assert.assertEquals(record, projected); } @Override protected Record writeAndRead(String desc, Schema writeSchema, Schema readSchema, Record inputRecord) throws IOException { // Use all existing TestAvroReadProjection tests to verify that // we get the same projected Avro record whether we use // NameMapping together with file schema without field-ids or we // use a file schema having field-ids Record record = super.writeAndRead(desc, writeSchema, readSchema, inputRecord); Record projectedWithNameMapping = writeAndRead( writeSchema, readSchema, inputRecord, MappingUtil.create(writeSchema)); Assert.assertEquals(record, projectedWithNameMapping); return record; } private Record writeAndRead(Schema writeSchema, Schema readSchema, Record record, NameMapping nameMapping) throws IOException { File file = temp.newFile(); // Write without file ids org.apache.avro.Schema writeAvroSchema = RemoveIds.removeIds(writeSchema); DatumWriter<Record> datumWriter = new GenericDatumWriter<>(writeAvroSchema); try (DataFileWriter<Record> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(writeAvroSchema, file); dataFileWriter.append(record); } Iterable<GenericData.Record> records = Avro.read(Files.localInput(file)) .project(readSchema) .nameMapping(nameMapping) .build(); return Iterables.getOnlyElement(records); } }
1
14,497
Why did this need to change?
apache-iceberg
java
@@ -0,0 +1,14 @@ +// Copyright (c) 2019 IoTeX Foundation +// This is an alpha (internal) release and is not suitable for production. This source code is provided 'as is' and no +// warranties are given as to title or non-infringement, merchantability or fitness for purpose and, to the extent +// permitted by law, all liability for your use of the code is disclaimed. This source code is governed by Apache +// License 2.0 that can be found in the LICENSE file. + +package factory + +import "github.com/iotexproject/iotex-core/action/protocol" + +// BranchDB defines the interface of an branch state db, which could start a new transaction +type BranchDB interface { + NewTransaction() (protocol.Transaction, error) +}
1
1
19,034
/go/pkg/mod/golang.org/x/[email protected]/adaptor_go1_13.go:16:21: Frame not declared by package errors (from `typecheck`)
iotexproject-iotex-core
go
@@ -128,6 +128,7 @@ public class ZkStateReader implements SolrCloseable { public static final String CONFIGS_ZKNODE = "/configs"; public final static String CONFIGNAME_PROP = "configName"; + public final static String COLLECTION_CONFIG_PROP = "collection.configName"; public static final String SAMPLE_PERCENTAGE = "samplePercentage";
1
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.common.cloud; import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; import java.util.function.UnaryOperator; import java.util.stream.Collectors; import org.apache.solr.common.AlreadyClosedException; import org.apache.solr.common.Callable; import org.apache.solr.common.SolrCloseable; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.params.CollectionAdminParams; import org.apache.solr.common.params.CoreAdminParams; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.ObjectReleaseTracker; import org.apache.solr.common.util.Pair; import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.common.util.Utils; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.NoNodeException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.Watcher.Event.EventType; import org.apache.zookeeper.data.Stat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static java.util.Collections.EMPTY_MAP; import static java.util.Collections.emptySortedSet; import static org.apache.solr.common.cloud.UrlScheme.HTTP; import static org.apache.solr.common.util.Utils.fromJSON; public class ZkStateReader implements SolrCloseable { public static final int STATE_UPDATE_DELAY = Integer.getInteger("solr.OverseerStateUpdateDelay", 2000); // delay between cloud state updates private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public static final String BASE_URL_PROP = "base_url"; public static final String NODE_NAME_PROP = "node_name"; public static final String CORE_NODE_NAME_PROP = "core_node_name"; public static final String ROLES_PROP = "roles"; public static final String STATE_PROP = "state"; // if this flag equals to false and the replica does not exist in cluster state, set state op become no op (default is true) public static final String FORCE_SET_STATE_PROP = "force_set_state"; /** * SolrCore name. */ public static final String CORE_NAME_PROP = "core"; public static final String COLLECTION_PROP = "collection"; public static final String ELECTION_NODE_PROP = "election_node"; public static final String SHARD_ID_PROP = "shard"; public static final String REPLICA_PROP = "replica"; public static final String SHARD_RANGE_PROP = "shard_range"; public static final String SHARD_STATE_PROP = "shard_state"; public static final String SHARD_PARENT_PROP = "shard_parent"; public static final String NUM_SHARDS_PROP = "numShards"; public static final String LEADER_PROP = "leader"; public static final String SHARED_STORAGE_PROP = "shared_storage"; public static final String PROPERTY_PROP = "property"; public static final String PROPERTY_PROP_PREFIX = "property."; public static final String PROPERTY_VALUE_PROP = "property.value"; public static final String MAX_AT_ONCE_PROP = "maxAtOnce"; public static final String MAX_WAIT_SECONDS_PROP = "maxWaitSeconds"; public static final String STATE_TIMESTAMP_PROP = "stateTimestamp"; public static final String COLLECTIONS_ZKNODE = "/collections"; public static final String LIVE_NODES_ZKNODE = "/live_nodes"; public static final String ALIASES = "/aliases.json"; /** * This ZooKeeper file is no longer used starting with Solr 9 but keeping the name around to check if it * is still present and non empty (in case of upgrade from previous Solr version). It used to contain collection * state for all collections in the cluster. */ public static final String UNSUPPORTED_CLUSTER_STATE = "/clusterstate.json"; public static final String CLUSTER_PROPS = "/clusterprops.json"; public static final String COLLECTION_PROPS_ZKNODE = "collectionprops.json"; public static final String REJOIN_AT_HEAD_PROP = "rejoinAtHead"; public static final String SOLR_SECURITY_CONF_PATH = "/security.json"; public static final String SOLR_PKGS_PATH = "/packages.json"; public static final String DEFAULT_SHARD_PREFERENCES = "defaultShardPreferences"; public static final String REPLICATION_FACTOR = "replicationFactor"; public static final String MAX_CORES_PER_NODE = "maxCoresPerNode"; public static final String PULL_REPLICAS = "pullReplicas"; public static final String NRT_REPLICAS = "nrtReplicas"; public static final String TLOG_REPLICAS = "tlogReplicas"; public static final String READ_ONLY = "readOnly"; public static final String ROLES = "/roles.json"; public static final String CONFIGS_ZKNODE = "/configs"; public final static String CONFIGNAME_PROP = "configName"; public static final String SAMPLE_PERCENTAGE = "samplePercentage"; /** * @deprecated use {@link org.apache.solr.common.params.CollectionAdminParams#DEFAULTS} instead. */ @Deprecated public static final String COLLECTION_DEF = "collectionDefaults"; public static final String URL_SCHEME = "urlScheme"; private static final String SOLR_ENVIRONMENT = "environment"; public static final String REPLICA_TYPE = "type"; public static final String CONTAINER_PLUGINS = "plugin"; public static final String PLACEMENT_PLUGIN = "placement-plugin"; /** * A view of the current state of all collections. */ protected volatile ClusterState clusterState; private static final int GET_LEADER_RETRY_INTERVAL_MS = 50; private static final int GET_LEADER_RETRY_DEFAULT_TIMEOUT = Integer.parseInt(System.getProperty("zkReaderGetLeaderRetryTimeoutMs", "4000")); ; public static final String LEADER_ELECT_ZKNODE = "leader_elect"; public static final String SHARD_LEADERS_ZKNODE = "leaders"; public static final String ELECTION_NODE = "election"; /** * "Interesting" and actively watched Collections. */ private final ConcurrentHashMap<String, DocCollection> watchedCollectionStates = new ConcurrentHashMap<>(); /** * "Interesting" but not actively watched Collections. */ private final ConcurrentHashMap<String, LazyCollectionRef> lazyCollectionStates = new ConcurrentHashMap<>(); /** * Collection properties being actively watched */ private final ConcurrentHashMap<String, VersionedCollectionProps> watchedCollectionProps = new ConcurrentHashMap<>(); /** * Watchers of Collection properties */ private final ConcurrentHashMap<String, PropsWatcher> collectionPropsWatchers = new ConcurrentHashMap<>(); private volatile SortedSet<String> liveNodes = emptySortedSet(); private volatile Map<String, Object> clusterProperties = Collections.emptyMap(); private final ZkConfigManager configManager; private ConfigData securityData; private final Runnable securityNodeListener; private ConcurrentHashMap<String, CollectionWatch<DocCollectionWatcher>> collectionWatches = new ConcurrentHashMap<>(); // named this observers so there's less confusion between CollectionPropsWatcher map and the PropsWatcher map. private ConcurrentHashMap<String, CollectionWatch<CollectionPropsWatcher>> collectionPropsObservers = new ConcurrentHashMap<>(); private Set<CloudCollectionsListener> cloudCollectionsListeners = ConcurrentHashMap.newKeySet(); private final ExecutorService notifications = ExecutorUtil.newMDCAwareCachedThreadPool("watches"); private Set<LiveNodesListener> liveNodesListeners = ConcurrentHashMap.newKeySet(); private Set<ClusterPropertiesListener> clusterPropertiesListeners = ConcurrentHashMap.newKeySet(); /** * Used to submit notifications to Collection Properties watchers in order **/ private final ExecutorService collectionPropsNotifications = ExecutorUtil.newMDCAwareSingleThreadExecutor(new SolrNamedThreadFactory("collectionPropsNotifications")); private static final long LAZY_CACHE_TIME = TimeUnit.NANOSECONDS.convert(STATE_UPDATE_DELAY, TimeUnit.MILLISECONDS); private Future<?> collectionPropsCacheCleaner; // only kept to identify if the cleaner has already been started. private static class CollectionWatch<T> { int coreRefCount = 0; Set<T> stateWatchers = ConcurrentHashMap.newKeySet(); public boolean canBeRemoved() { return coreRefCount + stateWatchers.size() == 0; } } public static final Set<String> KNOWN_CLUSTER_PROPS = Set.of( URL_SCHEME, CoreAdminParams.BACKUP_LOCATION, DEFAULT_SHARD_PREFERENCES, MAX_CORES_PER_NODE, SAMPLE_PERCENTAGE, SOLR_ENVIRONMENT, CollectionAdminParams.DEFAULTS, CONTAINER_PLUGINS, PLACEMENT_PLUGIN ); /** * Returns config set name for collection. * TODO move to DocCollection (state.json). * * @param collection to return config set name for */ public String readConfigName(String collection) throws KeeperException { String configName = null; String path = COLLECTIONS_ZKNODE + "/" + collection; log.debug("Loading collection config from: [{}]", path); try { byte[] data = zkClient.getData(path, null, null, true); if (data == null) { log.warn("No config data found at path {}.", path); throw new KeeperException.NoNodeException("No config data found at path: " + path); } ZkNodeProps props = ZkNodeProps.load(data); configName = props.getStr(CONFIGNAME_PROP); if (configName == null) { log.warn("No config data found at path{}. ", path); throw new KeeperException.NoNodeException("No config data found at path: " + path); } } catch (InterruptedException e) { SolrZkClient.checkInterrupted(e); log.warn("Thread interrupted when loading config name for collection {}", collection); throw new SolrException(ErrorCode.SERVER_ERROR, "Thread interrupted when loading config name for collection " + collection, e); } return configName; } private final SolrZkClient zkClient; private final boolean closeClient; private volatile boolean closed = false; private Set<CountDownLatch> waitLatches = ConcurrentHashMap.newKeySet(); public ZkStateReader(SolrZkClient zkClient) { this(zkClient, null); } public ZkStateReader(SolrZkClient zkClient, Runnable securityNodeListener) { this.zkClient = zkClient; this.configManager = new ZkConfigManager(zkClient); this.closeClient = false; this.securityNodeListener = securityNodeListener; assert ObjectReleaseTracker.track(this); } public ZkStateReader(String zkServerAddress, int zkClientTimeout, int zkClientConnectTimeout) { this.zkClient = new SolrZkClient(zkServerAddress, zkClientTimeout, zkClientConnectTimeout, // on reconnect, reload cloud info new OnReconnect() { @Override public void command() { try { ZkStateReader.this.createClusterStateWatchersAndUpdate(); } catch (KeeperException e) { log.error("A ZK error has occurred", e); throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "A ZK error has occurred", e); } catch (InterruptedException e) { // Restore the interrupted status Thread.currentThread().interrupt(); log.error("Interrupted", e); throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "Interrupted", e); } } }); this.configManager = new ZkConfigManager(zkClient); this.closeClient = true; this.securityNodeListener = null; assert ObjectReleaseTracker.track(this); } public ZkConfigManager getConfigManager() { return configManager; } /** * Forcibly refresh cluster state from ZK. Do this only to avoid race conditions because it's expensive. * <p> * It is cheaper to call {@link #forceUpdateCollection(String)} on a single collection if you must. * * @lucene.internal */ public void forciblyRefreshAllClusterStateSlow() throws KeeperException, InterruptedException { synchronized (getUpdateLock()) { if (clusterState == null) { // Never initialized, just run normal initialization. createClusterStateWatchersAndUpdate(); return; } // No need to set watchers because we should already have watchers registered for everything. refreshCollectionList(null); refreshLiveNodes(null); // Need a copy so we don't delete from what we're iterating over. Collection<String> safeCopy = new ArrayList<>(watchedCollectionStates.keySet()); Set<String> updatedCollections = new HashSet<>(); for (String coll : safeCopy) { DocCollection newState = fetchCollectionState(coll, null); if (updateWatchedCollection(coll, newState)) { updatedCollections.add(coll); } } constructState(updatedCollections); } } /** * Forcibly refresh a collection's internal state from ZK. Try to avoid having to resort to this when * a better design is possible. */ //TODO shouldn't we call ZooKeeper.sync() at the right places to prevent reading a stale value? We do so for aliases. public void forceUpdateCollection(String collection) throws KeeperException, InterruptedException { synchronized (getUpdateLock()) { if (clusterState == null) { log.warn("ClusterState watchers have not been initialized"); return; } ClusterState.CollectionRef ref = clusterState.getCollectionRef(collection); if (ref == null) { // We either don't know anything about this collection (maybe it's new?). // see if it just got created. LazyCollectionRef tryLazyCollection = new LazyCollectionRef(collection); if (tryLazyCollection.get() != null) { // What do you know, it exists! log.debug("Adding lazily-loaded reference for collection {}", collection); lazyCollectionStates.putIfAbsent(collection, tryLazyCollection); constructState(Collections.singleton(collection)); } } else if (ref.isLazilyLoaded()) { log.debug("Refreshing lazily-loaded state for collection {}", collection); if (ref.get() != null) { return; } } else if (watchedCollectionStates.containsKey(collection)) { // Exists as a watched collection, force a refresh. log.debug("Forcing refresh of watched collection state for {}", collection); DocCollection newState = fetchCollectionState(collection, null); if (updateWatchedCollection(collection, newState)) { constructState(Collections.singleton(collection)); } } else { log.error("Collection {} is not lazy nor watched!", collection); } } } /** * Refresh the set of live nodes. */ public void updateLiveNodes() throws KeeperException, InterruptedException { refreshLiveNodes(null); } public Integer compareStateVersions(String coll, int version) { DocCollection collection = clusterState.getCollectionOrNull(coll); if (collection == null) return null; if (collection.getZNodeVersion() < version) { if (log.isDebugEnabled()) { log.debug("Server older than client {}<{}", collection.getZNodeVersion(), version); } DocCollection nu = getCollectionLive(this, coll); if (nu == null) return -1; if (nu.getZNodeVersion() > collection.getZNodeVersion()) { if (updateWatchedCollection(coll, nu)) { synchronized (getUpdateLock()) { constructState(Collections.singleton(coll)); } } collection = nu; } } if (collection.getZNodeVersion() == version) { return null; } if (log.isDebugEnabled()) { log.debug("Wrong version from client [{}]!=[{}]", version, collection.getZNodeVersion()); } return collection.getZNodeVersion(); } @SuppressWarnings({"unchecked"}) public synchronized void createClusterStateWatchersAndUpdate() throws KeeperException, InterruptedException { // We need to fetch the current cluster state and the set of live nodes log.debug("Updating cluster state from ZooKeeper... "); try { // on reconnect of SolrZkClient force refresh and re-add watches. loadClusterProperties(); refreshLiveNodes(new LiveNodeWatcher()); refreshCollections(); refreshCollectionList(new CollectionsChildWatcher()); refreshAliases(aliasesManager); if (securityNodeListener != null) { addSecurityNodeWatcher(pair -> { ConfigData cd = new ConfigData(); cd.data = pair.first() == null || pair.first().length == 0 ? EMPTY_MAP : Utils.getDeepCopy((Map) fromJSON(pair.first()), 4, false); cd.version = pair.second() == null ? -1 : pair.second().getVersion(); securityData = cd; securityNodeListener.run(); }); securityData = getSecurityProps(true); } collectionPropsObservers.forEach((k, v) -> { collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true); }); } catch (KeeperException.NoNodeException nne) { throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready"); } } private void addSecurityNodeWatcher(final Callable<Pair<byte[], Stat>> callback) throws KeeperException, InterruptedException { zkClient.exists(SOLR_SECURITY_CONF_PATH, new Watcher() { @Override public void process(WatchedEvent event) { // session events are not change events, and do not remove the watcher if (EventType.None.equals(event.getType())) { return; } try { synchronized (ZkStateReader.this.getUpdateLock()) { log.debug("Updating [{}] ... ", SOLR_SECURITY_CONF_PATH); // remake watch final Stat stat = new Stat(); byte[] data = "{}".getBytes(StandardCharsets.UTF_8); if (EventType.NodeDeleted.equals(event.getType())) { // Node deleted, just recreate watch without attempting a read - SOLR-9679 getZkClient().exists(SOLR_SECURITY_CONF_PATH, this, true); } else { data = getZkClient().getData(SOLR_SECURITY_CONF_PATH, this, stat, true); } try { callback.call(new Pair<>(data, stat)); } catch (Exception e) { log.error("Error running collections node listener", e); } } } catch (KeeperException.ConnectionLossException | KeeperException.SessionExpiredException e) { log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: ", e); } catch (KeeperException e) { log.error("A ZK error has occurred", e); throw new ZooKeeperException(ErrorCode.SERVER_ERROR, "", e); } catch (InterruptedException e) { // Restore the interrupted status Thread.currentThread().interrupt(); log.warn("Interrupted", e); } } }, true); } /** * Construct the total state view from all sources. * Must hold {@link #getUpdateLock()} before calling this. * * @param changedCollections collections that have changed since the last call, * and that should fire notifications */ private void constructState(Set<String> changedCollections) { Set<String> liveNodes = this.liveNodes; // volatile read Map<String, ClusterState.CollectionRef> result = new LinkedHashMap<>(); // Add collections for (Map.Entry<String, DocCollection> entry : watchedCollectionStates.entrySet()) { result.put(entry.getKey(), new ClusterState.CollectionRef(entry.getValue())); } // Finally, add any lazy collections that aren't already accounted for. for (Map.Entry<String, LazyCollectionRef> entry : lazyCollectionStates.entrySet()) { result.putIfAbsent(entry.getKey(), entry.getValue()); } this.clusterState = new ClusterState(result, liveNodes); if (log.isDebugEnabled()) { log.debug("clusterStateSet: interesting [{}] watched [{}] lazy [{}] total [{}]", collectionWatches.keySet().size(), watchedCollectionStates.keySet().size(), lazyCollectionStates.keySet().size(), clusterState.getCollectionStates().size()); } if (log.isTraceEnabled()) { log.trace("clusterStateSet: interesting [{}] watched [{}] lazy [{}] total [{}]", collectionWatches.keySet(), watchedCollectionStates.keySet(), lazyCollectionStates.keySet(), clusterState.getCollectionStates()); } notifyCloudCollectionsListeners(); for (String collection : changedCollections) { notifyStateWatchers(collection, clusterState.getCollectionOrNull(collection)); } } /** * Refresh collections. */ private void refreshCollections() { for (String coll : collectionWatches.keySet()) { new StateWatcher(coll).refreshAndWatch(); } } // We don't get a Stat or track versions on getChildren() calls, so force linearization. private final Object refreshCollectionListLock = new Object(); /** * Search for any lazy-loadable collections. */ private void refreshCollectionList(Watcher watcher) throws KeeperException, InterruptedException { synchronized (refreshCollectionListLock) { List<String> children = null; try { children = zkClient.getChildren(COLLECTIONS_ZKNODE, watcher, true); } catch (KeeperException.NoNodeException e) { log.warn("Error fetching collection names: ", e); // fall through } if (children == null || children.isEmpty()) { lazyCollectionStates.clear(); return; } // Don't lock getUpdateLock() here, we don't need it and it would cause deadlock. // Don't mess with watchedCollections, they should self-manage. // First, drop any children that disappeared. this.lazyCollectionStates.keySet().retainAll(children); for (String coll : children) { // We will create an eager collection for any interesting collections, so don't add to lazy. if (!collectionWatches.containsKey(coll)) { // Double check contains just to avoid allocating an object. LazyCollectionRef existing = lazyCollectionStates.get(coll); if (existing == null) { lazyCollectionStates.putIfAbsent(coll, new LazyCollectionRef(coll)); } } } } } // We don't get a Stat or track versions on getChildren() calls, so force linearization. private final Object refreshCollectionsSetLock = new Object(); // Ensures that only the latest getChildren fetch gets applied. private final AtomicReference<Set<String>> lastFetchedCollectionSet = new AtomicReference<>(); /** * Register a CloudCollectionsListener to be called when the set of collections within a cloud changes. */ public void registerCloudCollectionsListener(CloudCollectionsListener cloudCollectionsListener) { cloudCollectionsListeners.add(cloudCollectionsListener); notifyNewCloudCollectionsListener(cloudCollectionsListener); } /** * Remove a registered CloudCollectionsListener. */ public void removeCloudCollectionsListener(CloudCollectionsListener cloudCollectionsListener) { cloudCollectionsListeners.remove(cloudCollectionsListener); } private void notifyNewCloudCollectionsListener(CloudCollectionsListener listener) { listener.onChange(Collections.emptySet(), lastFetchedCollectionSet.get()); } private void notifyCloudCollectionsListeners() { notifyCloudCollectionsListeners(false); } private void notifyCloudCollectionsListeners(boolean notifyIfSame) { synchronized (refreshCollectionsSetLock) { final Set<String> newCollections = getCurrentCollections(); final Set<String> oldCollections = lastFetchedCollectionSet.getAndSet(newCollections); if (!newCollections.equals(oldCollections) || notifyIfSame) { cloudCollectionsListeners.forEach(listener -> listener.onChange(oldCollections, newCollections)); } } } private Set<String> getCurrentCollections() { Set<String> collections = new HashSet<>(); collections.addAll(watchedCollectionStates.keySet()); collections.addAll(lazyCollectionStates.keySet()); return collections; } private class LazyCollectionRef extends ClusterState.CollectionRef { private final String collName; private volatile long lastUpdateTime; private DocCollection cachedDocCollection; public LazyCollectionRef(String collName) { super(null); this.collName = collName; this.lastUpdateTime = -1; } @Override public synchronized DocCollection get(boolean allowCached) { gets.incrementAndGet(); if (!allowCached || lastUpdateTime < 0 || System.nanoTime() - lastUpdateTime > LAZY_CACHE_TIME) { boolean shouldFetch = true; if (cachedDocCollection != null) { Stat freshStats = null; try { freshStats = zkClient.exists(getCollectionPath(collName), null, true); } catch (Exception e) { } if (freshStats != null && !cachedDocCollection.isModified(freshStats.getVersion(), freshStats.getCversion())) { shouldFetch = false; } } if (shouldFetch) { cachedDocCollection = getCollectionLive(ZkStateReader.this, collName); lastUpdateTime = System.nanoTime(); } } return cachedDocCollection; } @Override public boolean isLazilyLoaded() { return true; } @Override public String toString() { return "LazyCollectionRef(" + collName + ")"; } } // We don't get a Stat or track versions on getChildren() calls, so force linearization. private final Object refreshLiveNodesLock = new Object(); // Ensures that only the latest getChildren fetch gets applied. private final AtomicReference<SortedSet<String>> lastFetchedLiveNodes = new AtomicReference<>(); /** * Refresh live_nodes. */ private void refreshLiveNodes(Watcher watcher) throws KeeperException, InterruptedException { synchronized (refreshLiveNodesLock) { SortedSet<String> newLiveNodes; try { List<String> nodeList = zkClient.getChildren(LIVE_NODES_ZKNODE, watcher, true); newLiveNodes = new TreeSet<>(nodeList); } catch (KeeperException.NoNodeException e) { newLiveNodes = emptySortedSet(); } lastFetchedLiveNodes.set(newLiveNodes); } // Can't lock getUpdateLock() until we release the other, it would cause deadlock. SortedSet<String> oldLiveNodes, newLiveNodes; synchronized (getUpdateLock()) { newLiveNodes = lastFetchedLiveNodes.getAndSet(null); if (newLiveNodes == null) { // Someone else won the race to apply the last update, just exit. return; } oldLiveNodes = this.liveNodes; this.liveNodes = newLiveNodes; if (clusterState != null) { clusterState.setLiveNodes(newLiveNodes); } } if (oldLiveNodes.size() != newLiveNodes.size()) { if (log.isInfoEnabled()) { log.info("Updated live nodes from ZooKeeper... ({}) -> ({})", oldLiveNodes.size(), newLiveNodes.size()); } } if (log.isDebugEnabled()) { log.debug("Updated live nodes from ZooKeeper... {} -> {}", oldLiveNodes, newLiveNodes); } if (!oldLiveNodes.equals(newLiveNodes)) { // fire listeners liveNodesListeners.forEach(listener -> { if (listener.onChange(new TreeSet<>(oldLiveNodes), new TreeSet<>(newLiveNodes))) { removeLiveNodesListener(listener); } }); } } public void registerClusterPropertiesListener(ClusterPropertiesListener listener) { // fire it once with current properties if (listener.onChange(getClusterProperties())) { removeClusterPropertiesListener(listener); } else { clusterPropertiesListeners.add(listener); } } public void removeClusterPropertiesListener(ClusterPropertiesListener listener) { clusterPropertiesListeners.remove(listener); } public void registerLiveNodesListener(LiveNodesListener listener) { // fire it once with current live nodes if (listener.onChange(new TreeSet<>(getClusterState().getLiveNodes()), new TreeSet<>(getClusterState().getLiveNodes()))) { removeLiveNodesListener(listener); } liveNodesListeners.add(listener); } public void removeLiveNodesListener(LiveNodesListener listener) { liveNodesListeners.remove(listener); } /** * @return information about the cluster from ZooKeeper */ public ClusterState getClusterState() { return clusterState; } public Object getUpdateLock() { return this; } public void close() { this.closed = true; notifications.shutdownNow(); waitLatches.parallelStream().forEach(c -> { c.countDown(); }); ExecutorUtil.shutdownAndAwaitTermination(notifications); ExecutorUtil.shutdownAndAwaitTermination(collectionPropsNotifications); if (closeClient) { zkClient.close(); } assert ObjectReleaseTracker.release(this); } @Override public boolean isClosed() { return closed; } public String getLeaderUrl(String collection, String shard, int timeout) throws InterruptedException { Replica replica = getLeaderRetry(collection, shard, timeout); if (replica == null || replica.getBaseUrl() == null) { return null; } ZkCoreNodeProps props = new ZkCoreNodeProps(replica); return props.getCoreUrl(); } public Replica getLeader(Set<String> liveNodes, DocCollection docCollection, String shard) { Replica replica = docCollection != null ? docCollection.getLeader(shard) : null; if (replica != null && liveNodes.contains(replica.getNodeName())) { return replica; } return null; } public Replica getLeader(String collection, String shard) { if (clusterState != null) { DocCollection docCollection = clusterState.getCollectionOrNull(collection); Replica replica = docCollection != null ? docCollection.getLeader(shard) : null; if (replica != null && getClusterState().liveNodesContain(replica.getNodeName())) { return replica; } } return null; } public boolean isNodeLive(String node) { return liveNodes.contains(node); } /** * Get shard leader properties, with retry if none exist. */ public Replica getLeaderRetry(String collection, String shard) throws InterruptedException { return getLeaderRetry(collection, shard, GET_LEADER_RETRY_DEFAULT_TIMEOUT); } /** * Get shard leader properties, with retry if none exist. */ public Replica getLeaderRetry(String collection, String shard, int timeout) throws InterruptedException { AtomicReference<DocCollection> coll = new AtomicReference<>(); AtomicReference<Replica> leader = new AtomicReference<>(); try { waitForState(collection, timeout, TimeUnit.MILLISECONDS, (n, c) -> { if (c == null) return false; coll.set(c); Replica l = getLeader(n, c, shard); if (l != null) { log.debug("leader found for {}/{} to be {}", collection, shard, l); leader.set(l); return true; } return false; }); } catch (TimeoutException e) { throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "No registered leader was found after waiting for " + timeout + "ms " + ", collection: " + collection + " slice: " + shard + " saw state=" + clusterState.getCollectionOrNull(collection) + " with live_nodes=" + clusterState.getLiveNodes()); } return leader.get(); } /** * Get path where shard leader properties live in zookeeper. */ public static String getShardLeadersPath(String collection, String shardId) { return COLLECTIONS_ZKNODE + "/" + collection + "/" + SHARD_LEADERS_ZKNODE + (shardId != null ? ("/" + shardId) : "") + "/leader"; } /** * Get path where shard leader elections ephemeral nodes are. */ public static String getShardLeadersElectPath(String collection, String shardId) { return COLLECTIONS_ZKNODE + "/" + collection + "/" + LEADER_ELECT_ZKNODE + (shardId != null ? ("/" + shardId + "/" + ELECTION_NODE) : ""); } public List<ZkCoreNodeProps> getReplicaProps(String collection, String shardId, String thisCoreNodeName) { return getReplicaProps(collection, shardId, thisCoreNodeName, null); } public List<ZkCoreNodeProps> getReplicaProps(String collection, String shardId, String thisCoreNodeName, Replica.State mustMatchStateFilter) { return getReplicaProps(collection, shardId, thisCoreNodeName, mustMatchStateFilter, null); } public List<ZkCoreNodeProps> getReplicaProps(String collection, String shardId, String thisCoreNodeName, Replica.State mustMatchStateFilter, Replica.State mustNotMatchStateFilter) { //TODO: We don't need all these getReplicaProps method overloading. Also, it's odd that the default is to return replicas of type TLOG and NRT only return getReplicaProps(collection, shardId, thisCoreNodeName, mustMatchStateFilter, null, EnumSet.of(Replica.Type.TLOG, Replica.Type.NRT)); } public List<ZkCoreNodeProps> getReplicaProps(String collection, String shardId, String thisCoreNodeName, Replica.State mustMatchStateFilter, Replica.State mustNotMatchStateFilter, final EnumSet<Replica.Type> acceptReplicaType) { assert thisCoreNodeName != null; ClusterState clusterState = this.clusterState; if (clusterState == null) { return null; } final DocCollection docCollection = clusterState.getCollectionOrNull(collection); if (docCollection == null || docCollection.getSlicesMap() == null) { throw new ZooKeeperException(ErrorCode.BAD_REQUEST, "Could not find collection in zk: " + collection); } Map<String, Slice> slices = docCollection.getSlicesMap(); Slice replicas = slices.get(shardId); if (replicas == null) { throw new ZooKeeperException(ErrorCode.BAD_REQUEST, "Could not find shardId in zk: " + shardId); } Map<String, Replica> shardMap = replicas.getReplicasMap(); List<ZkCoreNodeProps> nodes = new ArrayList<>(shardMap.size()); for (Entry<String, Replica> entry : shardMap.entrySet().stream().filter((e) -> acceptReplicaType.contains(e.getValue().getType())).collect(Collectors.toList())) { ZkCoreNodeProps nodeProps = new ZkCoreNodeProps(entry.getValue()); String coreNodeName = entry.getValue().getName(); if (clusterState.liveNodesContain(nodeProps.getNodeName()) && !coreNodeName.equals(thisCoreNodeName)) { if (mustMatchStateFilter == null || mustMatchStateFilter == Replica.State.getState(nodeProps.getState())) { if (mustNotMatchStateFilter == null || mustNotMatchStateFilter != Replica.State.getState(nodeProps.getState())) { nodes.add(nodeProps); } } } } if (nodes.size() == 0) { // no replicas return null; } return nodes; } public SolrZkClient getZkClient() { return zkClient; } /** * Get a cluster property * <p> * N.B. Cluster properties are updated via ZK watchers, and so may not necessarily * be completely up-to-date. If you need to get the latest version, then use a * {@link ClusterProperties} instance. * * @param key the property to read * @param defaultValue a default value to use if no such property exists * @param <T> the type of the property * @return the cluster property, or a default if the property is not set */ @SuppressWarnings("unchecked") public <T> T getClusterProperty(String key, T defaultValue) { T value = (T) Utils.getObjectByPath(clusterProperties, false, key); if (value == null) return defaultValue; return value; } /** * Same as the above but allows a full json path as a list of parts * * @param keyPath path to the property example ["collectionDefauls", "numShards"] * @param defaultValue a default value to use if no such property exists * @return the cluster property, or a default if the property is not set */ @SuppressWarnings({"unchecked"}) public <T> T getClusterProperty(List<String> keyPath, T defaultValue) { T value = (T) Utils.getObjectByPath(clusterProperties, false, keyPath); if (value == null) return defaultValue; return value; } /** * Get all cluster properties for this cluster * <p> * N.B. Cluster properties are updated via ZK watchers, and so may not necessarily * be completely up-to-date. If you need to get the latest version, then use a * {@link ClusterProperties} instance. * * @return a Map of cluster properties */ public Map<String, Object> getClusterProperties() { return Collections.unmodifiableMap(clusterProperties); } private final Watcher clusterPropertiesWatcher = event -> { // session events are not change events, and do not remove the watcher if (Watcher.Event.EventType.None.equals(event.getType())) { return; } loadClusterProperties(); }; @SuppressWarnings("unchecked") private void loadClusterProperties() { try { while (true) { try { byte[] data = zkClient.getData(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, new Stat(), true); this.clusterProperties = ClusterProperties.convertCollectionDefaultsToNestedFormat((Map<String, Object>) Utils.fromJSON(data)); log.debug("Loaded cluster properties: {}", this.clusterProperties); // Make the urlScheme globally accessible UrlScheme.INSTANCE.setUrlScheme(getClusterProperty(ZkStateReader.URL_SCHEME, HTTP)); for (ClusterPropertiesListener listener : clusterPropertiesListeners) { listener.onChange(getClusterProperties()); } return; } catch (KeeperException.NoNodeException e) { this.clusterProperties = Collections.emptyMap(); log.debug("Loaded empty cluster properties"); // set an exists watch, and if the node has been created since the last call, // read the data again if (zkClient.exists(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, true) == null) return; } } } catch (KeeperException | InterruptedException e) { log.error("Error reading cluster properties from zookeeper", SolrZkClient.checkInterrupted(e)); } } /** * Get collection properties for a given collection. If the collection is watched, simply return it from the cache, * otherwise fetch it directly from zookeeper. This is a convenience for {@code getCollectionProperties(collection,0)} * * @param collection the collection for which properties are desired * @return a map representing the key/value properties for the collection. */ public Map<String, String> getCollectionProperties(final String collection) { return getCollectionProperties(collection, 0); } /** * Get and cache collection properties for a given collection. If the collection is watched, or still cached * simply return it from the cache, otherwise fetch it directly from zookeeper and retain the value for at * least cacheForMillis milliseconds. Cached properties are watched in zookeeper and updated automatically. * This version of {@code getCollectionProperties} should be used when properties need to be consulted * frequently in the absence of an active {@link CollectionPropsWatcher}. * * @param collection The collection for which properties are desired * @param cacheForMillis The minimum number of milliseconds to maintain a cache for the specified collection's * properties. Setting a {@code CollectionPropsWatcher} will override this value and retain * the cache for the life of the watcher. A lack of changes in zookeeper may allow the * caching to remain for a greater duration up to the cycle time of {@link CacheCleaner}. * Passing zero for this value will explicitly remove the cached copy if and only if it is * due to expire and no watch exists. Any positive value will extend the expiration time * if required. * @return a map representing the key/value properties for the collection. */ public Map<String, String> getCollectionProperties(final String collection, long cacheForMillis) { synchronized (watchedCollectionProps) { // making decisions based on the result of a get... Watcher watcher = null; if (cacheForMillis > 0) { watcher = collectionPropsWatchers.compute(collection, (c, w) -> w == null ? new PropsWatcher(c, cacheForMillis) : w.renew(cacheForMillis)); } VersionedCollectionProps vprops = watchedCollectionProps.get(collection); boolean haveUnexpiredProps = vprops != null && vprops.cacheUntilNs > System.nanoTime(); long untilNs = System.nanoTime() + TimeUnit.NANOSECONDS.convert(cacheForMillis, TimeUnit.MILLISECONDS); Map<String, String> properties; if (haveUnexpiredProps) { properties = vprops.props; vprops.cacheUntilNs = Math.max(vprops.cacheUntilNs, untilNs); } else { try { VersionedCollectionProps vcp = fetchCollectionProperties(collection, watcher); properties = vcp.props; if (cacheForMillis > 0) { vcp.cacheUntilNs = untilNs; watchedCollectionProps.put(collection, vcp); } else { // we're synchronized on watchedCollectionProps and we can only get here if we have found an expired // vprops above, so it is safe to remove the cached value and let the GC free up some mem a bit sooner. if (!collectionPropsObservers.containsKey(collection)) { watchedCollectionProps.remove(collection); } } } catch (Exception e) { throw new SolrException(ErrorCode.SERVER_ERROR, "Error reading collection properties", SolrZkClient.checkInterrupted(e)); } } return properties; } } private class VersionedCollectionProps { int zkVersion; Map<String, String> props; long cacheUntilNs = 0; VersionedCollectionProps(int zkVersion, Map<String, String> props) { this.zkVersion = zkVersion; this.props = props; } } static String getCollectionPropsPath(final String collection) { return COLLECTIONS_ZKNODE + '/' + collection + '/' + COLLECTION_PROPS_ZKNODE; } @SuppressWarnings("unchecked") private VersionedCollectionProps fetchCollectionProperties(String collection, Watcher watcher) throws KeeperException, InterruptedException { final String znodePath = getCollectionPropsPath(collection); // lazy init cache cleaner once we know someone is using collection properties. if (collectionPropsCacheCleaner == null) { synchronized (this) { // There can be only one! :) if (collectionPropsCacheCleaner == null) { collectionPropsCacheCleaner = notifications.submit(new CacheCleaner()); } } } while (true) { try { Stat stat = new Stat(); byte[] data = zkClient.getData(znodePath, watcher, stat, true); return new VersionedCollectionProps(stat.getVersion(), (Map<String, String>) Utils.fromJSON(data)); } catch (ClassCastException e) { throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to parse collection properties for collection " + collection, e); } catch (KeeperException.NoNodeException e) { if (watcher != null) { // Leave an exists watch in place in case a collectionprops.json is created later. Stat exists = zkClient.exists(znodePath, watcher, true); if (exists != null) { // Rare race condition, we tried to fetch the data and couldn't find it, then we found it exists. // Loop and try again. continue; } } return new VersionedCollectionProps(-1, EMPTY_MAP); } } } /** * Returns the content of /security.json from ZooKeeper as a Map * If the files doesn't exist, it returns null. */ @SuppressWarnings({"unchecked"}) public ConfigData getSecurityProps(boolean getFresh) { if (!getFresh) { if (securityData == null) return new ConfigData(EMPTY_MAP, -1); return new ConfigData(securityData.data, securityData.version); } try { Stat stat = new Stat(); if (getZkClient().exists(SOLR_SECURITY_CONF_PATH, true)) { final byte[] data = getZkClient().getData(ZkStateReader.SOLR_SECURITY_CONF_PATH, null, stat, true); return data != null && data.length > 0 ? new ConfigData((Map<String, Object>) Utils.fromJSON(data), stat.getVersion()) : null; } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new SolrException(ErrorCode.SERVER_ERROR, "Error reading security properties", e); } catch (KeeperException e) { throw new SolrException(ErrorCode.SERVER_ERROR, "Error reading security properties", e); } return null; } /** * Returns the baseURL corresponding to a given node's nodeName -- * NOTE: does not (currently) imply that the nodeName (or resulting * baseURL) exists in the cluster. * * @lucene.experimental */ public String getBaseUrlForNodeName(final String nodeName) { return Utils.getBaseUrlForNodeName(nodeName, getClusterProperty(URL_SCHEME, "http")); } /** * Watches a single collection's format2 state.json. */ class StateWatcher implements Watcher { private final String coll; private final String collectionPath; StateWatcher(String coll) { this.coll = coll; collectionPath = getCollectionPath(coll); } @Override public void process(WatchedEvent event) { // session events are not change events, and do not remove the watcher if (EventType.None.equals(event.getType())) { return; } if (!collectionWatches.containsKey(coll)) { // This collection is no longer interesting, stop watching. log.debug("Uninteresting collection {}", coll); return; } Set<String> liveNodes = ZkStateReader.this.liveNodes; if (log.isInfoEnabled()) { log.info("A cluster state change: [{}] for collection [{}] has occurred - updating... (live nodes size: [{}])", event, coll, liveNodes.size()); } refreshAndWatch(event.getType()); } public void refreshAndWatch() { refreshAndWatch(null); } /** * Refresh collection state from ZK and leave a watch for future changes. * As a side effect, updates {@link #clusterState} and {@link #watchedCollectionStates} * with the results of the refresh. */ public void refreshAndWatch(EventType eventType) { try { if (eventType == null || eventType == EventType.NodeChildrenChanged) { refreshAndWatchChildren(); if (eventType == EventType.NodeChildrenChanged) { //only per-replica states modified. return return; } } DocCollection newState = fetchCollectionState(coll, this); updateWatchedCollection(coll, newState); synchronized (getUpdateLock()) { constructState(Collections.singleton(coll)); } } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) { log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: ", e); } catch (KeeperException e) { log.error("Unwatched collection: [{}]", coll, e); throw new ZooKeeperException(ErrorCode.SERVER_ERROR, "A ZK error has occurred", e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); log.error("Unwatched collection: [{}]", coll, e); } } private void refreshAndWatchChildren() throws KeeperException, InterruptedException { Stat stat = new Stat(); List<String> replicaStates = null; try { replicaStates = zkClient.getChildren(collectionPath, this, stat, true); PerReplicaStates newStates = new PerReplicaStates(collectionPath, stat.getCversion(), replicaStates); DocCollection oldState = watchedCollectionStates.get(coll); final DocCollection newState = oldState != null ? oldState.copyWith(newStates) : fetchCollectionState(coll, null); updateWatchedCollection(coll, newState); synchronized (getUpdateLock()) { constructState(Collections.singleton(coll)); } if (log.isDebugEnabled()) { log.debug("updated per-replica states changed for: {}, ver: {} , new vals: {}", coll, stat.getCversion(), replicaStates); } } catch (NoNodeException e) { log.info("{} is deleted, stop watching children", collectionPath); } } } /** * Watches collection properties */ class PropsWatcher implements Watcher { private final String coll; private long watchUntilNs; PropsWatcher(String coll) { this.coll = coll; watchUntilNs = 0; } PropsWatcher(String coll, long forMillis) { this.coll = coll; watchUntilNs = System.nanoTime() + TimeUnit.NANOSECONDS.convert(forMillis, TimeUnit.MILLISECONDS); } public PropsWatcher renew(long forMillis) { watchUntilNs = System.nanoTime() + TimeUnit.NANOSECONDS.convert(forMillis, TimeUnit.MILLISECONDS); return this; } @Override public void process(WatchedEvent event) { // session events are not change events, and do not remove the watcher if (EventType.None.equals(event.getType())) { return; } boolean expired = System.nanoTime() > watchUntilNs; if (!collectionPropsObservers.containsKey(coll) && expired) { // No one can be notified of the change, we can ignore it and "unset" the watch log.debug("Ignoring property change for collection {}", coll); return; } log.info("A collection property change: [{}] for collection [{}] has occurred - updating...", event, coll); refreshAndWatch(true); } /** * Refresh collection properties from ZK and leave a watch for future changes. Updates the properties in * watchedCollectionProps with the results of the refresh. Optionally notifies watchers */ void refreshAndWatch(boolean notifyWatchers) { try { synchronized (watchedCollectionProps) { // making decisions based on the result of a get... VersionedCollectionProps vcp = fetchCollectionProperties(coll, this); Map<String, String> properties = vcp.props; VersionedCollectionProps existingVcp = watchedCollectionProps.get(coll); if (existingVcp == null || // never called before, record what we found vcp.zkVersion > existingVcp.zkVersion || // newer info we should update vcp.zkVersion == -1) { // node was deleted start over watchedCollectionProps.put(coll, vcp); if (notifyWatchers) { notifyPropsWatchers(coll, properties); } if (vcp.zkVersion == -1 && existingVcp != null) { // Collection DELETE detected // We should not be caching a collection that has been deleted. watchedCollectionProps.remove(coll); // core ref counting not relevant here, don't need canRemove(), we just sent // a notification of an empty set of properties, no reason to watch what doesn't exist. collectionPropsObservers.remove(coll); // This is the one time we know it's safe to throw this out. We just failed to set the watch // due to an NoNodeException, so it isn't held by ZK and can't re-set itself due to an update. collectionPropsWatchers.remove(coll); } } } } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) { log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: ", e); } catch (KeeperException e) { log.error("Lost collection property watcher for {} due to ZK error", coll, e); throw new ZooKeeperException(ErrorCode.SERVER_ERROR, "A ZK error has occurred", e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); log.error("Lost collection property watcher for {} due to the thread being interrupted", coll, e); } } } /** * Watches /collections children . */ class CollectionsChildWatcher implements Watcher { @Override public void process(WatchedEvent event) { if (ZkStateReader.this.closed) { return; } // session events are not change events, and do not remove the watcher if (EventType.None.equals(event.getType())) { return; } log.debug("A collections change: [{}], has occurred - updating...", event); refreshAndWatch(); synchronized (getUpdateLock()) { constructState(Collections.emptySet()); } } /** * Must hold {@link #getUpdateLock()} before calling this method. */ public void refreshAndWatch() { try { refreshCollectionList(this); } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) { log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: ", e); } catch (KeeperException e) { log.error("A ZK error has occurred", e); throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "A ZK error has occurred", e); } catch (InterruptedException e) { // Restore the interrupted status Thread.currentThread().interrupt(); log.warn("Interrupted", e); } } } /** * Watches the live_nodes and syncs changes. */ class LiveNodeWatcher implements Watcher { @Override public void process(WatchedEvent event) { // session events are not change events, and do not remove the watcher if (EventType.None.equals(event.getType())) { return; } if (log.isDebugEnabled()) { log.debug("A live node change: [{}], has occurred - updating... (live nodes size: [{}])", event, liveNodes.size()); } refreshAndWatch(); } public void refreshAndWatch() { try { refreshLiveNodes(this); } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) { log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: ", e); } catch (KeeperException e) { log.error("A ZK error has occurred", e); throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "A ZK error has occurred", e); } catch (InterruptedException e) { // Restore the interrupted status Thread.currentThread().interrupt(); log.warn("Interrupted", e); } } } public static DocCollection getCollectionLive(ZkStateReader zkStateReader, String coll) { try { return zkStateReader.fetchCollectionState(coll, null); } catch (KeeperException e) { throw new SolrException(ErrorCode.BAD_REQUEST, "Could not load collection from ZK: " + coll, e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new SolrException(ErrorCode.BAD_REQUEST, "Could not load collection from ZK: " + coll, e); } } private DocCollection fetchCollectionState(String coll, Watcher watcher) throws KeeperException, InterruptedException { String collectionPath = getCollectionPath(coll); while (true) { ClusterState.initReplicaStateProvider(() -> { try { PerReplicaStates replicaStates = PerReplicaStates.fetch(collectionPath, zkClient, null); log.info("per-replica-state ver: {} fetched for initializing {} ", replicaStates.cversion, collectionPath); return replicaStates; } catch (Exception e) { //TODO throw new RuntimeException(e); } }); try { Stat stat = new Stat(); byte[] data = zkClient.getData(collectionPath, watcher, stat, true); ClusterState state = ClusterState.createFromJson(stat.getVersion(), data, Collections.emptySet()); ClusterState.CollectionRef collectionRef = state.getCollectionStates().get(coll); return collectionRef == null ? null : collectionRef.get(); } catch (KeeperException.NoNodeException e) { if (watcher != null) { // Leave an exists watch in place in case a state.json is created later. Stat exists = zkClient.exists(collectionPath, watcher, true); if (exists != null) { // Rare race condition, we tried to fetch the data and couldn't find it, then we found it exists. // Loop and try again. continue; } } return null; } finally { ClusterState.clearReplicaStateProvider(); } } } public static String getCollectionPathRoot(String coll) { return COLLECTIONS_ZKNODE + "/" + coll; } public static String getCollectionPath(String coll) { return getCollectionPathRoot(coll) + "/state.json"; } /** * Notify this reader that a local Core is a member of a collection, and so that collection * state should be watched. * <p> * Not a public API. This method should only be called from ZkController. * <p> * The number of cores per-collection is tracked, and adding multiple cores from the same * collection does not increase the number of watches. * * @param collection the collection that the core is a member of * @see ZkStateReader#unregisterCore(String) */ public void registerCore(String collection) { AtomicBoolean reconstructState = new AtomicBoolean(false); collectionWatches.compute(collection, (k, v) -> { if (v == null) { reconstructState.set(true); v = new CollectionWatch<>(); } v.coreRefCount++; return v; }); if (reconstructState.get()) { new StateWatcher(collection).refreshAndWatch(); } } /** * Notify this reader that a local core that is a member of a collection has been closed. * <p> * Not a public API. This method should only be called from ZkController. * <p> * If no cores are registered for a collection, and there are no {@link CollectionStateWatcher}s * for that collection either, the collection watch will be removed. * * @param collection the collection that the core belongs to */ public void unregisterCore(String collection) { AtomicBoolean reconstructState = new AtomicBoolean(false); collectionWatches.compute(collection, (k, v) -> { if (v == null) return null; if (v.coreRefCount > 0) v.coreRefCount--; if (v.canBeRemoved()) { watchedCollectionStates.remove(collection); lazyCollectionStates.put(collection, new LazyCollectionRef(collection)); reconstructState.set(true); return null; } return v; }); if (reconstructState.get()) { synchronized (getUpdateLock()) { constructState(Collections.emptySet()); } } } /** * Register a CollectionStateWatcher to be called when the state of a collection changes * <em>or</em> the set of live nodes changes. * * <p> * The Watcher will automatically be removed when it's * <code>onStateChanged</code> returns <code>true</code> * </p> * * <p> * This is method is just syntactic sugar for registering both a {@link DocCollectionWatcher} and * a {@link LiveNodesListener}. Callers that only care about one or the other (but not both) are * encouraged to use the more specific methods register methods as it may reduce the number of * ZooKeeper watchers needed, and reduce the amount of network/cpu used. * </p> * * @see #registerDocCollectionWatcher * @see #registerLiveNodesListener */ public void registerCollectionStateWatcher(String collection, CollectionStateWatcher stateWatcher) { final DocCollectionAndLiveNodesWatcherWrapper wrapper = new DocCollectionAndLiveNodesWatcherWrapper(collection, stateWatcher); registerDocCollectionWatcher(collection, wrapper); registerLiveNodesListener(wrapper); DocCollection state = clusterState.getCollectionOrNull(collection); if (stateWatcher.onStateChanged(liveNodes, state) == true) { removeCollectionStateWatcher(collection, stateWatcher); } } /** * Register a DocCollectionWatcher to be called when the state of a collection changes * * <p> * The Watcher will automatically be removed when it's * <code>onStateChanged</code> returns <code>true</code> * </p> */ public void registerDocCollectionWatcher(String collection, DocCollectionWatcher stateWatcher) { AtomicBoolean watchSet = new AtomicBoolean(false); collectionWatches.compute(collection, (k, v) -> { if (v == null) { v = new CollectionWatch<>(); watchSet.set(true); } v.stateWatchers.add(stateWatcher); return v; }); if (watchSet.get()) { new StateWatcher(collection).refreshAndWatch(); } DocCollection state = clusterState.getCollectionOrNull(collection); state = updatePerReplicaState(state); if (stateWatcher.onStateChanged(state) == true) { removeDocCollectionWatcher(collection, stateWatcher); } } private DocCollection updatePerReplicaState(DocCollection c) { if (c == null || !c.isPerReplicaState()) return c; PerReplicaStates current = c.getPerReplicaStates(); PerReplicaStates newPrs = PerReplicaStates.fetch(c.getZNode(), zkClient, current); if (newPrs != current) { if(log.isDebugEnabled()) { log.debug("update for a fresh per-replica-state {}", c.getName()); } DocCollection modifiedColl = c.copyWith(newPrs); updateWatchedCollection(c.getName(), modifiedColl); return modifiedColl; } else { return c; } } /** * Block until a CollectionStatePredicate returns true, or the wait times out * * <p> * Note that the predicate may be called again even after it has returned true, so * implementors should avoid changing state within the predicate call itself. * </p> * * <p> * This implementation utilizes {@link CollectionStateWatcher} internally. * Callers that don't care about liveNodes are encouraged to use a {@link DocCollection} {@link Predicate} * instead * </p> * * @param collection the collection to watch * @param wait how long to wait * @param unit the units of the wait parameter * @param predicate the predicate to call on state changes * @throws InterruptedException on interrupt * @throws TimeoutException on timeout * @see #waitForState(String, long, TimeUnit, Predicate) * @see #registerCollectionStateWatcher */ public void waitForState(final String collection, long wait, TimeUnit unit, CollectionStatePredicate predicate) throws InterruptedException, TimeoutException { if (closed) { throw new AlreadyClosedException(); } final CountDownLatch latch = new CountDownLatch(1); waitLatches.add(latch); AtomicReference<DocCollection> docCollection = new AtomicReference<>(); CollectionStateWatcher watcher = (n, c) -> { docCollection.set(c); boolean matches = predicate.matches(n, c); if (matches) latch.countDown(); return matches; }; registerCollectionStateWatcher(collection, watcher); try { // wait for the watcher predicate to return true, or time out if (!latch.await(wait, unit)) throw new TimeoutException("Timeout waiting to see state for collection=" + collection + " :" + docCollection.get()); } finally { removeCollectionStateWatcher(collection, watcher); waitLatches.remove(latch); } } /** * Block until a Predicate returns true, or the wait times out * * <p> * Note that the predicate may be called again even after it has returned true, so * implementors should avoid changing state within the predicate call itself. * The predicate may also be called concurrently when multiple state changes are seen in rapid succession. * </p> * * @param collection the collection to watch * @param wait how long to wait * @param unit the units of the wait parameter * @param predicate the predicate to call on state changes * @return the state of the doc collection after the predicate succeeds * @throws InterruptedException on interrupt * @throws TimeoutException on timeout */ public DocCollection waitForState(final String collection, long wait, TimeUnit unit, Predicate<DocCollection> predicate) throws InterruptedException, TimeoutException { if (log.isDebugEnabled()) { log.debug("Waiting up to {}ms for state {}", unit.toMillis(wait), predicate); } if (closed) { throw new AlreadyClosedException(); } final CountDownLatch latch = new CountDownLatch(1); waitLatches.add(latch); AtomicReference<DocCollection> docCollection = new AtomicReference<>(); DocCollectionWatcher watcher = (c) -> { docCollection.set(c); boolean matches = predicate.test(c); if (matches) latch.countDown(); return matches; }; registerDocCollectionWatcher(collection, watcher); try { // wait for the watcher predicate to return true, or time out if (!latch.await(wait, unit)) throw new TimeoutException("Timeout waiting to see state for collection=" + collection + " :" + docCollection.get()); return docCollection.get(); } finally { removeDocCollectionWatcher(collection, watcher); waitLatches.remove(latch); if (log.isDebugEnabled()) { log.debug("Completed wait for {}", predicate); } } } /** * Block until a LiveNodesStatePredicate returns true, or the wait times out * <p> * Note that the predicate may be called again even after it has returned true, so * implementors should avoid changing state within the predicate call itself. * </p> * * @param wait how long to wait * @param unit the units of the wait parameter * @param predicate the predicate to call on state changes * @throws InterruptedException on interrupt * @throws TimeoutException on timeout */ public void waitForLiveNodes(long wait, TimeUnit unit, LiveNodesPredicate predicate) throws InterruptedException, TimeoutException { if (closed) { throw new AlreadyClosedException(); } final CountDownLatch latch = new CountDownLatch(1); waitLatches.add(latch); LiveNodesListener listener = (o, n) -> { boolean matches = predicate.matches(o, n); if (matches) latch.countDown(); return matches; }; registerLiveNodesListener(listener); try { // wait for the watcher predicate to return true, or time out if (!latch.await(wait, unit)) throw new TimeoutException("Timeout waiting for live nodes, currently they are: " + getClusterState().getLiveNodes()); } finally { removeLiveNodesListener(listener); waitLatches.remove(latch); } } /** * Remove a watcher from a collection's watch list. * <p> * This allows Zookeeper watches to be removed if there is no interest in the * collection. * </p> * * @param collection the collection * @param watcher the watcher * @see #registerCollectionStateWatcher */ public void removeCollectionStateWatcher(String collection, CollectionStateWatcher watcher) { final DocCollectionAndLiveNodesWatcherWrapper wrapper = new DocCollectionAndLiveNodesWatcherWrapper(collection, watcher); removeDocCollectionWatcher(collection, wrapper); removeLiveNodesListener(wrapper); } /** * Remove a watcher from a collection's watch list. * <p> * This allows Zookeeper watches to be removed if there is no interest in the * collection. * </p> * * @param collection the collection * @param watcher the watcher * @see #registerDocCollectionWatcher */ public void removeDocCollectionWatcher(String collection, DocCollectionWatcher watcher) { AtomicBoolean reconstructState = new AtomicBoolean(false); collectionWatches.compute(collection, (k, v) -> { if (v == null) return null; v.stateWatchers.remove(watcher); if (v.canBeRemoved()) { watchedCollectionStates.remove(collection); lazyCollectionStates.put(collection, new LazyCollectionRef(collection)); reconstructState.set(true); return null; } return v; }); if (reconstructState.get()) { synchronized (getUpdateLock()) { constructState(Collections.emptySet()); } } } /* package-private for testing */ Set<DocCollectionWatcher> getStateWatchers(String collection) { final Set<DocCollectionWatcher> watchers = new HashSet<>(); collectionWatches.compute(collection, (k, v) -> { if (v != null) { watchers.addAll(v.stateWatchers); } return v; }); return watchers; } // returns true if the state has changed private boolean updateWatchedCollection(String coll, DocCollection newState) { if (newState == null) { log.debug("Removing cached collection state for [{}]", coll); watchedCollectionStates.remove(coll); return true; } boolean updated = false; // CAS update loop while (true) { if (!collectionWatches.containsKey(coll)) { break; } DocCollection oldState = watchedCollectionStates.get(coll); if (oldState == null) { if (watchedCollectionStates.putIfAbsent(coll, newState) == null) { if (log.isDebugEnabled()) { log.debug("Add data for [{}] ver [{}]", coll, newState.getZNodeVersion()); } updated = true; break; } } else { int oldCVersion = oldState.getPerReplicaStates() == null ? -1 : oldState.getPerReplicaStates().cversion; int newCVersion = newState.getPerReplicaStates() == null ? -1 : newState.getPerReplicaStates().cversion; if (oldState.getZNodeVersion() >= newState.getZNodeVersion() && oldCVersion >= newCVersion) { // no change to state, but we might have been triggered by the addition of a // state watcher, so run notifications updated = true; break; } if (watchedCollectionStates.replace(coll, oldState, newState)) { if (log.isDebugEnabled()) { log.debug("Updating data for [{}] from [{}] to [{}]", coll, oldState.getZNodeVersion(), newState.getZNodeVersion()); } updated = true; break; } } } // Resolve race with unregisterCore. if (!collectionWatches.containsKey(coll)) { watchedCollectionStates.remove(coll); log.debug("Removing uninteresting collection [{}]", coll); } return updated; } public void registerCollectionPropsWatcher(final String collection, CollectionPropsWatcher propsWatcher) { AtomicBoolean watchSet = new AtomicBoolean(false); collectionPropsObservers.compute(collection, (k, v) -> { if (v == null) { v = new CollectionWatch<>(); watchSet.set(true); } v.stateWatchers.add(propsWatcher); return v; }); if (watchSet.get()) { collectionPropsWatchers.computeIfAbsent(collection, PropsWatcher::new).refreshAndWatch(false); } } public void removeCollectionPropsWatcher(String collection, CollectionPropsWatcher watcher) { collectionPropsObservers.compute(collection, (k, v) -> { if (v == null) return null; v.stateWatchers.remove(watcher); if (v.canBeRemoved()) { // don't want this to happen in middle of other blocks that might add it back. synchronized (watchedCollectionProps) { watchedCollectionProps.remove(collection); } return null; } return v; }); } public static class ConfigData { public Map<String, Object> data; public int version; public ConfigData() { } public ConfigData(Map<String, Object> data, int version) { this.data = data; this.version = version; } } private void notifyStateWatchers(String collection, DocCollection collectionState) { if (this.closed) { return; } try { notifications.submit(new Notification(collection, collectionState)); } catch (RejectedExecutionException e) { if (closed == false) { log.error("Couldn't run collection notifications for {}", collection, e); } } } private class Notification implements Runnable { final String collection; final DocCollection collectionState; private Notification(String collection, DocCollection collectionState) { this.collection = collection; this.collectionState = collectionState; } @Override public void run() { List<DocCollectionWatcher> watchers = new ArrayList<>(); collectionWatches.compute(collection, (k, v) -> { if (v == null) return null; watchers.addAll(v.stateWatchers); return v; }); for (DocCollectionWatcher watcher : watchers) { try { if (watcher.onStateChanged(collectionState)) { removeDocCollectionWatcher(collection, watcher); } } catch (Exception exception) { log.warn("Error on calling watcher", exception); } } } } // // Aliases related // /** * Access to the {@link Aliases}. */ public final AliasesManager aliasesManager = new AliasesManager(); /** * Get an immutable copy of the present state of the aliases. References to this object should not be retained * in any context where it will be important to know if aliases have changed. * * @return The current aliases, Aliases.EMPTY if not solr cloud, or no aliases have existed yet. Never returns null. */ public Aliases getAliases() { return aliasesManager.getAliases(); } // called by createClusterStateWatchersAndUpdate() private void refreshAliases(AliasesManager watcher) throws KeeperException, InterruptedException { synchronized (getUpdateLock()) { constructState(Collections.emptySet()); zkClient.exists(ALIASES, watcher, true); } aliasesManager.update(); } /** * A class to manage the aliases instance, including watching for changes. * There should only ever be one instance of this class * per instance of ZkStateReader. Normally it will not be useful to create a new instance since * this watcher automatically re-registers itself every time it is updated. */ public class AliasesManager implements Watcher { // the holder is a Zk watcher // note: as of this writing, this class if very generic. Is it useful to use for other ZK managed things? private final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private volatile Aliases aliases = Aliases.EMPTY; public Aliases getAliases() { return aliases; // volatile read } /** * Writes an updated {@link Aliases} to zk. * It will retry if there are races with other modifications, giving up after 30 seconds with a SolrException. * The caller should understand it's possible the aliases has further changed if it examines it. */ public void applyModificationAndExportToZk(UnaryOperator<Aliases> op) { // The current aliases hasn't been update()'ed yet -- which is impossible? Any way just update it first. if (aliases.getZNodeVersion() == -1) { try { boolean updated = update(); assert updated; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new ZooKeeperException(ErrorCode.SERVER_ERROR, e.toString(), e); } catch (KeeperException e) { throw new ZooKeeperException(ErrorCode.SERVER_ERROR, e.toString(), e); } } final long deadlineNanos = System.nanoTime() + TimeUnit.SECONDS.toNanos(30); // note: triesLeft tuning is based on ConcurrentCreateRoutedAliasTest for (int triesLeft = 30; triesLeft > 0; triesLeft--) { // we could synchronize on "this" but there doesn't seem to be a point; we have a retry loop. Aliases curAliases = getAliases(); Aliases modAliases = op.apply(curAliases); final byte[] modAliasesJson = modAliases.toJSON(); if (curAliases == modAliases) { log.debug("Current aliases has the desired modification; no further ZK interaction needed."); return; } try { try { final Stat stat = getZkClient().setData(ALIASES, modAliasesJson, curAliases.getZNodeVersion(), true); setIfNewer(Aliases.fromJSON(modAliasesJson, stat.getVersion())); return; } catch (KeeperException.BadVersionException e) { log.debug("{}", e, e); log.warn("Couldn't save aliases due to race with another modification; will update and retry until timeout"); // considered a backoff here, but we really do want to compete strongly since the normal case is // that we will do one update and succeed. This is left as a hot loop for limited tries intentionally. // More failures than that here probably indicate a bug or a very strange high write frequency usage for // aliases.json, timeouts mean zk is being very slow to respond, or this node is being crushed // by other processing and just can't find any cpu cycles at all. update(); if (deadlineNanos < System.nanoTime()) { throw new SolrException(ErrorCode.SERVER_ERROR, "Timed out trying to update aliases! " + "Either zookeeper or this node may be overloaded."); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new ZooKeeperException(ErrorCode.SERVER_ERROR, e.toString(), e); } catch (KeeperException e) { throw new ZooKeeperException(ErrorCode.SERVER_ERROR, e.toString(), e); } } throw new SolrException(ErrorCode.SERVER_ERROR, "Too many successive version failures trying to update aliases"); } /** * Ensures the internal aliases is up to date. If there is a change, return true. * * @return true if an update was performed */ public boolean update() throws KeeperException, InterruptedException { log.debug("Checking ZK for most up to date Aliases {}", ALIASES); // Call sync() first to ensure the subsequent read (getData) is up to date. zkClient.getSolrZooKeeper().sync(ALIASES, null, null); Stat stat = new Stat(); final byte[] data = zkClient.getData(ALIASES, null, stat, true); return setIfNewer(Aliases.fromJSON(data, stat.getVersion())); } // ZK Watcher interface @Override public void process(WatchedEvent event) { // session events are not change events, and do not remove the watcher if (EventType.None.equals(event.getType())) { return; } try { log.debug("Aliases: updating"); // re-register the watch Stat stat = new Stat(); final byte[] data = zkClient.getData(ALIASES, this, stat, true); // note: it'd be nice to avoid possibly needlessly parsing if we don't update aliases but not a big deal setIfNewer(Aliases.fromJSON(data, stat.getVersion())); } catch (NoNodeException e) { // /aliases.json will not always exist } catch (KeeperException.ConnectionLossException | KeeperException.SessionExpiredException e) { // note: aliases.json is required to be present log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: ", e); } catch (KeeperException e) { log.error("A ZK error has occurred", e); throw new ZooKeeperException(ErrorCode.SERVER_ERROR, "A ZK error has occurred", e); } catch (InterruptedException e) { // Restore the interrupted status Thread.currentThread().interrupt(); log.warn("Interrupted", e); } } /** * Update the internal aliases reference with a new one, provided that its ZK version has increased. * * @param newAliases the potentially newer version of Aliases * @return true if aliases have been updated to a new version, false otherwise */ private boolean setIfNewer(Aliases newAliases) { assert newAliases.getZNodeVersion() >= 0; synchronized (this) { int cmp = Integer.compare(aliases.getZNodeVersion(), newAliases.getZNodeVersion()); if (cmp < 0) { log.debug("Aliases: cmp={}, new definition is: {}", cmp, newAliases); aliases = newAliases; this.notifyAll(); return true; } else { log.debug("Aliases: cmp={}, not overwriting ZK version.", cmp); assert cmp != 0 || Arrays.equals(aliases.toJSON(), newAliases.toJSON()) : aliases + " != " + newAliases; return false; } } } } private void notifyPropsWatchers(String collection, Map<String, String> properties) { try { collectionPropsNotifications.submit(new PropsNotification(collection, properties)); } catch (RejectedExecutionException e) { if (!closed) { log.error("Couldn't run collection properties notifications for {}", collection, e); } } } private class PropsNotification implements Runnable { private final String collection; private final Map<String, String> collectionProperties; private final List<CollectionPropsWatcher> watchers = new ArrayList<>(); private PropsNotification(String collection, Map<String, String> collectionProperties) { this.collection = collection; this.collectionProperties = collectionProperties; // guarantee delivery of notification regardless of what happens to collectionPropsObservers // while we wait our turn in the executor by capturing the list on creation. collectionPropsObservers.compute(collection, (k, v) -> { if (v == null) return null; watchers.addAll(v.stateWatchers); return v; }); } @Override public void run() { for (CollectionPropsWatcher watcher : watchers) { if (watcher.onStateChanged(collectionProperties)) { removeCollectionPropsWatcher(collection, watcher); } } } } private class CacheCleaner implements Runnable { public void run() { while (!Thread.interrupted()) { try { Thread.sleep(60000); } catch (InterruptedException e) { // Executor shutdown will send us an interrupt break; } watchedCollectionProps.entrySet().removeIf(entry -> entry.getValue().cacheUntilNs < System.nanoTime() && !collectionPropsObservers.containsKey(entry.getKey())); } } } /** * Helper class that acts as both a {@link DocCollectionWatcher} and a {@link LiveNodesListener} * while wraping and delegating to a {@link CollectionStateWatcher} */ private final class DocCollectionAndLiveNodesWatcherWrapper implements DocCollectionWatcher, LiveNodesListener { private final String collectionName; private final CollectionStateWatcher delegate; public int hashCode() { return collectionName.hashCode() * delegate.hashCode(); } public boolean equals(Object other) { if (other instanceof DocCollectionAndLiveNodesWatcherWrapper) { DocCollectionAndLiveNodesWatcherWrapper that = (DocCollectionAndLiveNodesWatcherWrapper) other; return this.collectionName.equals(that.collectionName) && this.delegate.equals(that.delegate); } return false; } public DocCollectionAndLiveNodesWatcherWrapper(final String collectionName, final CollectionStateWatcher delegate) { this.collectionName = collectionName; this.delegate = delegate; } @Override public boolean onStateChanged(DocCollection collectionState) { final boolean result = delegate.onStateChanged(ZkStateReader.this.liveNodes, collectionState); if (result) { // it might be a while before live nodes changes, so proactively remove ourselves removeLiveNodesListener(this); } return result; } @Override public boolean onChange(SortedSet<String> oldLiveNodes, SortedSet<String> newLiveNodes) { final DocCollection collection = ZkStateReader.this.clusterState.getCollectionOrNull(collectionName); final boolean result = delegate.onStateChanged(newLiveNodes, collection); if (result) { // it might be a while before collection changes, so proactively remove ourselves removeDocCollectionWatcher(collectionName, this); } return result; } } public DocCollection getCollection(String collection) { return clusterState == null ? null : clusterState.getCollectionOrNull(collection); } }
1
40,397
Probably doesn't go here because I think it's specific to the HTTP API layer. This class is too internal to declare such a name.
apache-lucene-solr
java
@@ -60,8 +60,11 @@ public class ITZipkinMetricsHealth { } @Test public void healthIsOK() throws Exception { - assertThat(get("/health").isSuccessful()) - .isTrue(); + Response check = get("/health"); + assertThat(check.isSuccessful()).isTrue(); + assertThat(check.body().string()).isEqualTo( + "{\"status\":\"UP\",\"zipkin\":{\"status\":\"UP\",\"details\":{\"InMemoryStorage{}\":{\"status\":\"UP\"}}}}" + ); // ensure we don't track health in prometheus assertThat(scrape())
1
/* * Copyright 2015-2019 The OpenZipkin Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package zipkin2.server.internal; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.jayway.jsonpath.JsonPath; import com.linecorp.armeria.server.Server; import io.micrometer.prometheus.PrometheusMeterRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.List; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; import zipkin.server.ZipkinServer; import zipkin2.Span; import zipkin2.codec.SpanBytesEncoder; import zipkin2.storage.InMemoryStorage; import static java.util.Arrays.asList; import static org.assertj.core.api.Assertions.assertThat; import static zipkin2.TestObjects.LOTS_OF_SPANS; import static zipkin2.server.internal.ITZipkinServer.url; @SpringBootTest( classes = ZipkinServer.class, webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, properties = "spring.config.name=zipkin-server" ) @RunWith(SpringRunner.class) public class ITZipkinMetricsHealth { @Autowired InMemoryStorage storage; @Autowired PrometheusMeterRegistry registry; @Autowired Server server; OkHttpClient client = new OkHttpClient.Builder().followRedirects(true).build(); @Before public void init() { storage.clear(); } @Test public void healthIsOK() throws Exception { assertThat(get("/health").isSuccessful()) .isTrue(); // ensure we don't track health in prometheus assertThat(scrape()) .doesNotContain("health"); } @Test public void metricsIsOK() throws Exception { assertThat(get("/metrics").isSuccessful()) .isTrue(); // ensure we don't track metrics in prometheus assertThat(scrape()) .doesNotContain("metrics"); } @Test public void actuatorIsOK() throws Exception { assertThat(get("/actuator").isSuccessful()) .isTrue(); // ensure we don't track actuator in prometheus assertThat(scrape()) .doesNotContain("actuator"); } @Test public void prometheusIsOK() throws Exception { assertThat(get("/prometheus").isSuccessful()) .isTrue(); // ensure we don't track prometheus, UI requests in prometheus assertThat(scrape()) .doesNotContain("prometheus") .doesNotContain("uri=\"/zipkin") .doesNotContain("uri=\"/\""); } @Test public void notFound_prometheus() throws Exception { assertThat(get("/doo-wop").isSuccessful()) .isFalse(); assertThat(scrape()) .contains("uri=\"NOT_FOUND\"") .doesNotContain("uri=\"/doo-wop"); } @Test public void redirected_prometheus() throws Exception { assertThat(get("/").isSuccessful()) .isTrue(); // follows redirects assertThat(scrape()) .contains("uri=\"REDIRECTION\"") .contains("uri=\"/zipkin/index.html\"") .doesNotContain("uri=\"/\""); } @Test public void apiTemplate_prometheus() throws Exception { List<Span> spans = asList(LOTS_OF_SPANS[0]); byte[] body = SpanBytesEncoder.JSON_V2.encodeList(spans); assertThat(post("/api/v2/spans", body).isSuccessful()) .isTrue(); assertThat(get("/api/v2/trace/" + LOTS_OF_SPANS[0].traceId()).isSuccessful()) .isTrue(); assertThat(scrape()) .contains("uri=\"/api/v2/trace/{traceId}\"") .doesNotContain(LOTS_OF_SPANS[0].traceId()); } @Test public void forwardedRoute_prometheus() throws Exception { assertThat(get("/zipkin/api/v2/services").isSuccessful()) .isTrue(); assertThat(scrape()) .contains("uri=\"/api/v2/services\"") .doesNotContain("uri=\"/zipkin/api/v2/services\""); } String scrape() throws InterruptedException { Thread.sleep(100); return registry.scrape(); } /** Makes sure the prometheus filter doesn't count twice */ @Test public void writeSpans_updatesPrometheusMetrics() throws Exception { List<Span> spans = asList(LOTS_OF_SPANS[0], LOTS_OF_SPANS[1], LOTS_OF_SPANS[2]); byte[] body = SpanBytesEncoder.JSON_V2.encodeList(spans); post("/api/v2/spans", body); post("/api/v2/spans", body); Thread.sleep(100); // sometimes travis flakes getting the "http.server.requests" timer double messagesCount = registry.counter("zipkin_collector.spans", "transport", "http").count(); // Get the http count from the registry and it should match the summation previous count // and count of calls below long httpCount = registry .find("http.server.requests") .tag("uri", "/api/v2/spans") .timer() .count(); // ensure unscoped counter does not exist assertThat(scrape()) .doesNotContain("zipkin_collector_spans_total " + messagesCount) .contains("zipkin_collector_spans_total{transport=\"http\",} " + messagesCount) .contains( "http_server_requests_seconds_count{method=\"POST\",status=\"202\",uri=\"/api/v2/spans\",} " + httpCount); } @Test public void readsHealth() throws Exception { String json = getAsString("/health"); assertThat(readString(json, "$.status")) .isIn("UP", "DOWN", "UNKNOWN"); assertThat(readString(json, "$.zipkin.status")) .isIn("UP", "DOWN", "UNKNOWN"); } @Test public void writesSpans_readMetricsFormat() throws Exception { byte[] span = {'z', 'i', 'p', 'k', 'i', 'n'}; List<Span> spans = asList(LOTS_OF_SPANS[0], LOTS_OF_SPANS[1], LOTS_OF_SPANS[2]); byte[] body = SpanBytesEncoder.JSON_V2.encodeList(spans); post("/api/v2/spans", body); post("/api/v2/spans", body); post("/api/v2/spans", span); Thread.sleep(1500); String metrics = getAsString("/metrics"); assertThat(readJson(metrics)) .containsExactlyInAnyOrder( "gauge.zipkin_collector.message_spans.http" , "gauge.zipkin_collector.message_bytes.http" , "counter.zipkin_collector.messages.http" , "counter.zipkin_collector.bytes.http" , "counter.zipkin_collector.spans.http" , "counter.zipkin_collector.messages_dropped.http" , "counter.zipkin_collector.spans_dropped.http" ); } private String getAsString(String path) throws IOException { Response response = get(path); assertThat(response.isSuccessful()) .withFailMessage(response.toString()) .isTrue(); return response.body().string(); } private Response get(String path) throws IOException { return client.newCall(new Request.Builder().url(url(server, path)).build()).execute(); } private Response post(String path, byte[] body) throws IOException { return client.newCall(new Request.Builder() .url(url(server, path)) .post(RequestBody.create(null, body)) .build()).execute(); } static String readString(String json, String jsonPath) { return JsonPath.compile(jsonPath).read(json); } static List readJson(String json) throws Exception { ObjectMapper mapper = new ObjectMapper(); JsonNode jsonNode = mapper.readTree(json); List<String> fieldsList = new ArrayList<>(); jsonNode.fieldNames().forEachRemaining(fieldsList::add); return fieldsList; } }
1
15,845
in preparation of directly writing the /health endpoint one day.. we should have content checks.
openzipkin-zipkin
java
@@ -10,7 +10,7 @@ import ( type MultipleIDMatchesError struct{ prefix string } func (e *MultipleIDMatchesError) Error() string { - return fmt.Sprintf("multiple IDs with prefix %s found", e.prefix) + return fmt.Sprintf("multiple IDs with prefix %q found", e.prefix) } // A NoIDByPrefixError is returned by Find() when no ID for a given prefix
1
package restic import ( "context" "fmt" ) // A MultipleIDMatchesError is returned by Find() when multiple IDs with a // given prefix are found. type MultipleIDMatchesError struct{ prefix string } func (e *MultipleIDMatchesError) Error() string { return fmt.Sprintf("multiple IDs with prefix %s found", e.prefix) } // A NoIDByPrefixError is returned by Find() when no ID for a given prefix // could be found. type NoIDByPrefixError struct{ prefix string } func (e *NoIDByPrefixError) Error() string { return fmt.Sprintf("no matching ID found for prefix %q", e.prefix) } // Find loads the list of all files of type t and searches for names which // start with prefix. If none is found, nil and ErrNoIDPrefixFound is returned. // If more than one is found, nil and ErrMultipleIDMatches is returned. func Find(ctx context.Context, be Lister, t FileType, prefix string) (string, error) { match := "" ctx, cancel := context.WithCancel(ctx) defer cancel() err := be.List(ctx, t, func(fi FileInfo) error { if len(fi.Name) >= len(prefix) && prefix == fi.Name[:len(prefix)] { if match == "" { match = fi.Name } else { return &MultipleIDMatchesError{prefix} } } return nil }) if err != nil { return "", err } if match != "" { return match, nil } return "", &NoIDByPrefixError{prefix} } const minPrefixLength = 8 // PrefixLength returns the number of bytes required so that all prefixes of // all names of type t are unique. func PrefixLength(ctx context.Context, be Lister, t FileType) (int, error) { // load all IDs of the given type list := make([]string, 0, 100) ctx, cancel := context.WithCancel(ctx) defer cancel() err := be.List(ctx, t, func(fi FileInfo) error { list = append(list, fi.Name) return nil }) if err != nil { return 0, err } // select prefixes of length l, test if the last one is the same as the current one var id ID outer: for l := minPrefixLength; l < len(id); l++ { var last string for _, name := range list { if last == name[:l] { continue outer } last = name[:l] } return l, nil } return len(id), nil }
1
15,273
question (non-blocking): Is there a benefit from using `%q` instead of `%s`?
restic-restic
go
@@ -26,7 +26,6 @@ std::string MetaUtils::spaceKey(GraphSpaceID spaceId) { return key; } - std::string MetaUtils::spaceVal(int32_t partsNum, int32_t replicaFactor, const std::string& name) { std::string val; val.reserve(256);
1
/* Copyright (c) 2018 - present, VE Software Inc. All rights reserved * * This source code is licensed under Apache 2.0 License * (found in the LICENSE.Apache file in the root directory) */ #include "meta/MetaUtils.h" #include <thrift/lib/cpp2/protocol/Serializer.h> #include <thrift/lib/cpp2/protocol/CompactProtocol.h> namespace nebula { namespace meta { const std::string kSpacesTable = "__spaces__"; // NOLINT const std::string kPartsTable = "__parts__"; // NOLINT const std::string kHostsTable = "__hosts__"; // NOLINT const std::string kTagsTable = "__tags__"; // NOLINT const std::string kEdgesTable = "__edges__"; // NOLINT const std::string kIndexTable = "__index__"; // NOLINT std::string MetaUtils::spaceKey(GraphSpaceID spaceId) { std::string key; key.reserve(256); key.append(kSpacesTable.data(), kSpacesTable.size()); key.append(reinterpret_cast<const char*>(&spaceId), sizeof(spaceId)); return key; } std::string MetaUtils::spaceVal(int32_t partsNum, int32_t replicaFactor, const std::string& name) { std::string val; val.reserve(256); val.append(reinterpret_cast<const char*>(&partsNum), sizeof(partsNum)); val.append(reinterpret_cast<const char*>(&replicaFactor), sizeof(replicaFactor)); val.append(name); return val; } const std::string& MetaUtils::spacePrefix() { return kSpacesTable; } GraphSpaceID MetaUtils::spaceId(folly::StringPiece rawKey) { return *reinterpret_cast<const GraphSpaceID*>(rawKey.data() + kSpacesTable.size()); } folly::StringPiece MetaUtils::spaceName(folly::StringPiece rawVal) { return rawVal.subpiece(sizeof(int32_t)*2); } std::string MetaUtils::partKey(GraphSpaceID spaceId, PartitionID partId) { std::string key; key.reserve(128); key.append(kPartsTable.data(), kPartsTable.size()); key.append(reinterpret_cast<const char*>(&spaceId), sizeof(GraphSpaceID)); key.append(reinterpret_cast<const char*>(&partId), sizeof(PartitionID)); return key; } std::string MetaUtils::partVal(const std::vector<nebula::cpp2::HostAddr>& hosts) { std::string val; val.reserve(128); for (auto& h : hosts) { val.append(reinterpret_cast<const char*>(&h.ip), sizeof(h.ip)); val.append(reinterpret_cast<const char*>(&h.port), sizeof(h.port)); } return val; } std::string MetaUtils::partPrefix(GraphSpaceID spaceId) { std::string prefix; prefix.reserve(128); prefix.append(kPartsTable.data(), kPartsTable.size()); prefix.append(reinterpret_cast<const char*>(&spaceId), sizeof(GraphSpaceID)); return prefix; } std::vector<nebula::cpp2::HostAddr> MetaUtils::parsePartVal(folly::StringPiece val) { std::vector<nebula::cpp2::HostAddr> hosts; static const size_t unitSize = sizeof(int32_t) * 2; auto hostsNum = val.size() / unitSize; hosts.reserve(hostsNum); VLOG(3) << "Total size:" << val.size() << ", host size:" << unitSize << ", host num:" << hostsNum; for (decltype(hostsNum) i = 0; i < hostsNum; i++) { nebula::cpp2::HostAddr h; h.set_ip(*reinterpret_cast<const int32_t*>(val.data() + i * unitSize)); h.set_port(*reinterpret_cast<const int32_t*>(val.data() + i * unitSize + sizeof(int32_t))); hosts.emplace_back(std::move(h)); } return hosts; } std::string MetaUtils::hostKey(IPv4 ip, Port port) { std::string key; key.reserve(128); key.append(kHostsTable.data(), kHostsTable.size()); key.append(reinterpret_cast<const char*>(&ip), sizeof(ip)); key.append(reinterpret_cast<const char*>(&port), sizeof(port)); return key; } std::string MetaUtils::hostVal() { return ""; } const std::string& MetaUtils::hostPrefix() { return kHostsTable; } nebula::cpp2::HostAddr MetaUtils::parseHostKey(folly::StringPiece key) { nebula::cpp2::HostAddr host; memcpy(&host, key.data() + kHostsTable.size(), sizeof(host)); return host; } std::string MetaUtils::schemaEdgeKey(GraphSpaceID spaceId, EdgeType edgeType, int64_t version) { std::string key; key.reserve(128); key.append(kEdgesTable.data(), kEdgesTable.size()); key.append(reinterpret_cast<const char*>(&spaceId), sizeof(spaceId)); key.append(reinterpret_cast<const char*>(&edgeType), sizeof(edgeType)); key.append(reinterpret_cast<const char*>(&version), sizeof(version)); return key; } std::string MetaUtils::schemaEdgeVal(nebula::cpp2::Schema schema) { std::string val; apache::thrift::CompactSerializer::serialize(schema, &val); return val; } std::string MetaUtils::schemaTagKey(GraphSpaceID spaceId, TagID tagId, int64_t version) { std::string key; key.reserve(128); key.append(kTagsTable.data(), kTagsTable.size()); key.append(reinterpret_cast<const char*>(&spaceId), sizeof(spaceId)); key.append(reinterpret_cast<const char*>(&tagId), sizeof(tagId)); key.append(reinterpret_cast<const char*>(&version), sizeof(version)); return key; } std::string MetaUtils::schemaTagVal(nebula::cpp2::Schema schema) { std::string val; apache::thrift::CompactSerializer::serialize(schema, &val); return val; } nebula::cpp2::Schema MetaUtils::parseSchema(folly::StringPiece rawData) { nebula::cpp2::Schema schema; apache::thrift::CompactSerializer::deserialize(rawData, schema); return schema; } std::string MetaUtils::indexKey(EntryType type, const std::string& name) { std::string key; key.reserve(128); key.append(kIndexTable.data(), kIndexTable.size()); key.append(reinterpret_cast<const char*>(&type), sizeof(type)); key.append(name); return key; } } // namespace meta } // namespace nebula
1
15,890
I would suggest to keep two empty lines between functions in cpp files
vesoft-inc-nebula
cpp
@@ -178,7 +178,7 @@ void runTests(int64_t numElements) { { test(0x01, C_d, C_h, numElements, syncNone, true /*expectMismatch*/); test(0x02, C_d, C_h, numElements, syncNullStream, false /*expectMismatch*/); - test(0x04, C_d, C_h, numElements, syncOtherStream, true /*expectMismatch*/); + test(0x04, C_d, C_h, numElements, syncOtherStream, false /*expectMismatch*/); test(0x08, C_d, C_h, numElements, syncDevice, false /*expectMismatch*/); // Sending a marker to to null stream may synchronize the otherStream
1
/* Copyright (c) 2015-2016 Advanced Micro Devices, Inc. All rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /* HIT_START * BUILD: %t %s ../../test_common.cpp * RUN: %t * HIT_END */ #include "test_common.h" enum SyncMode { syncNone, syncNullStream, syncOtherStream, syncMarkerThenOtherStream, syncMarkerThenOtherNonBlockingStream, syncDevice }; const char* syncModeString(int syncMode) { switch (syncMode) { case syncNone: return "syncNone"; case syncNullStream: return "syncNullStream"; case syncOtherStream: return "syncOtherStream"; case syncMarkerThenOtherStream: return "syncMarkerThenOtherStream"; case syncMarkerThenOtherNonBlockingStream: return "syncMarkerThenOtherNonBlockingStream"; case syncDevice: return "syncDevice"; default: return "unknown"; }; }; void test(unsigned testMask, int* C_d, int* C_h, int64_t numElements, SyncMode syncMode, bool expectMismatch) { // This test sends a long-running kernel to the null stream, then tests to see if the // specified synchronization technique is effective. // // Some syncMode are not expected to correctly sync (for example "syncNone"). in these // cases the test sets expectMismatch and the check logic below will attempt to ensure that // the undesired synchronization did not occur - ie ensure the kernel is still running and did // not yet update the stop event. This can be tricky since if the kernel runs fast enough it // may complete before the check. To prevent this, the addCountReverse has a count parameter // which causes it to loop repeatedly, and the results are checked in reverse order. // // Tests with expectMismatch=true should ensure the kernel finishes correctly. This results // are checked and we test to make sure stop event has completed. if (!(testMask & p_tests)) { return; } printf("\ntest 0x%02x: syncMode=%s expectMismatch=%d\n", testMask, syncModeString(syncMode), expectMismatch); size_t sizeBytes = numElements * sizeof(int); int count = 100; int init0 = 0; HIPCHECK(hipMemset(C_d, init0, sizeBytes)); for (int i = 0; i < numElements; i++) { C_h[i] = -1; // initialize } hipStream_t otherStream = 0; unsigned flags = (syncMode == syncMarkerThenOtherNonBlockingStream) ? hipStreamNonBlocking : hipStreamDefault; HIPCHECK(hipStreamCreateWithFlags(&otherStream, flags)); hipEvent_t stop, otherStreamEvent; HIPCHECK(hipEventCreate(&stop)); HIPCHECK(hipEventCreate(&otherStreamEvent)); unsigned blocks = HipTest::setNumBlocks(blocksPerCU, threadsPerBlock, numElements); // Launch kernel into null stream, should result in C_h == count. hipLaunchKernelGGL(HipTest::addCountReverse, dim3(blocks), dim3(threadsPerBlock), 0, 0 /*stream*/, static_cast<const int*>(C_d), C_h, numElements, count); HIPCHECK(hipEventRecord(stop, 0 /*default*/)); switch (syncMode) { case syncNone: break; case syncNullStream: HIPCHECK(hipStreamSynchronize(0)); // wait on host for null stream: break; case syncOtherStream: // Does this synchronize with the null stream? HIPCHECK(hipStreamSynchronize(otherStream)); break; case syncMarkerThenOtherStream: case syncMarkerThenOtherNonBlockingStream: // this may wait for NULL stream depending hipStreamNonBlocking flag above HIPCHECK(hipEventRecord(otherStreamEvent, otherStream)); HIPCHECK(hipStreamSynchronize(otherStream)); break; case syncDevice: HIPCHECK(hipDeviceSynchronize()); break; default: assert(0); }; hipError_t done = hipEventQuery(stop); if (expectMismatch) { assert(done == hipErrorNotReady); } else { assert(done == hipSuccess); } int mismatches = 0; int expected = init0 + count; for (int i = 0; i < numElements; i++) { bool compareEqual = (C_h[i] == expected); if (!compareEqual) { mismatches++; if (!expectMismatch) { printf("C_h[%d] (%d) != %d\n", i, C_h[i], expected); assert(C_h[i] == expected); } } } if (expectMismatch) { assert(mismatches > 0); } HIPCHECK(hipStreamDestroy(otherStream)); HIPCHECK(hipEventDestroy(stop)); HIPCHECK(hipEventDestroy(otherStreamEvent)); HIPCHECK(hipDeviceSynchronize()); printf("test: OK - %d mismatches (%6.2f%%)\n", mismatches, ((double)(mismatches)*100.0) / numElements); } void runTests(int64_t numElements) { size_t sizeBytes = numElements * sizeof(int); printf("\n\ntest: starting sequence with sizeBytes=%zu bytes, %6.2f MB\n", sizeBytes, sizeBytes / 1024.0 / 1024.0); int *C_h, *C_d; HIPCHECK(hipMalloc(&C_d, sizeBytes)); HIPCHECK(hipHostMalloc(&C_h, sizeBytes)); { test(0x01, C_d, C_h, numElements, syncNone, true /*expectMismatch*/); test(0x02, C_d, C_h, numElements, syncNullStream, false /*expectMismatch*/); test(0x04, C_d, C_h, numElements, syncOtherStream, true /*expectMismatch*/); test(0x08, C_d, C_h, numElements, syncDevice, false /*expectMismatch*/); // Sending a marker to to null stream may synchronize the otherStream // - other created with hipStreamNonBlocking=0 : synchronization, should match // - other created with hipStreamNonBlocking=1 : no synchronization, may mismatch test(0x10, C_d, C_h, numElements, syncMarkerThenOtherStream, false /*expectMismatch*/); // TODO - review why this test seems flaky // test (0x20, C_d, C_h, numElements, syncMarkerThenOtherNonBlockingStream, true // /*expectMismatch*/); } HIPCHECK(hipFree(C_d)); HIPCHECK(hipHostFree(C_h)); } int main(int argc, char* argv[]) { // Can' destroy the default stream:// TODO - move to another test HIPCHECK_API(hipStreamDestroy(0), hipErrorInvalidResourceHandle); HipTest::parseStandardArguments(argc, argv, true /*failOnUndefinedArg*/); runTests(40000000); passed(); }
1
7,061
This change causes the test to fail on both rocm-1.9.x and rocm-head.
ROCm-Developer-Tools-HIP
cpp
@@ -0,0 +1,17 @@ +/** + * BSD-style license; for more info see http://pmd.sourceforge.net/license.html + */ + +package net.sourceforge.pmd; + +/** + * This interface allows to determine which rule violations are fixable, and with which class the fixes will be made. + */ +public interface AutoFixableRuleViolation extends RuleViolation { + + /** + * Obtain the class which will attempt to fix the AST. + * @return + */ + Class<? extends RuleViolationFix> getRuleViolationFixer(); +}
1
1
13,423
I'd consider moving all fix related stuff to a distinct package to avoid contaminating the base package
pmd-pmd
java
@@ -266,7 +266,18 @@ def parse_compile_commands_json(logfile, parseLogOptions): results = option_parser.parse_options(command) action.original_command = command - action.analyzer_options = results.compile_opts + + # If the original include directory could not be found + # in the filesystem, it is possible that it was provided + # relative to the working directory in the compile json. + compile_opts = results.compile_opts + for i, opt in enumerate(compile_opts): + if opt.startswith('-I'): + inc_dir = opt[2:].strip() + if not os.path.isdir(inc_dir): + compile_opts[i] = '-I' + \ + os.path.join(entry['directory'], inc_dir) + action.analyzer_options = compile_opts action.lang = results.lang action.target = results.arch
1
# ------------------------------------------------------------------------- # The CodeChecker Infrastructure # This file is distributed under the University of Illinois Open Source # License. See LICENSE.TXT for details. # ------------------------------------------------------------------------- import json import os import re import shlex import subprocess import sys import traceback # TODO: This is a cross-subpackage import! from libcodechecker.log import build_action from libcodechecker.log import option_parser from libcodechecker.logger import get_logger LOG = get_logger('buildlogger') # If these options are present in the original build command, they must # be forwarded to get_compiler_includes and get_compiler_defines so the # resulting includes point to the target that was used in the build. COMPILE_OPTS_FWD_TO_DEFAULTS_GETTER = frozenset( ['^-m(32|64)', '^-std=.*']) compiler_includes_dump_file = "compiler_includes.json" compiler_target_dump_file = "compiler_target.json" def get_compiler_err(cmd): """ Returns the stderr of a compiler invocation as string. """ try: proc = subprocess.Popen(shlex.split(cmd), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, err = proc.communicate("") return err except OSError as oerr: LOG.error("Error during process execution: " + cmd + '\n' + oerr.strerror + "\n") def parse_compiler_includes(lines): ''' Parse the compiler include paths from a string ''' start_mark = "#include <...> search starts here:" end_mark = "End of search list." include_paths = [] do_append = False for line in lines.splitlines(True): line = line.strip() if line.startswith(end_mark): break if do_append: # On OSX there are framework includes, # where we need to strip the "(framework directory)" string. # For instance: # /System/Library/Frameworks (framework directory) fpos = line.find("(framework directory)") if fpos == -1: include_paths.append(line) else: include_paths.append(line[0:fpos-1]) if line.startswith(start_mark): do_append = True return include_paths def filter_compiler_includes(include_dirs): """ Filter the list of compiler includes. We want to elide GCC's include-fixed and instrinsic directory. See docs/gcc_incompatibilities.md """ def contains_intrinsic_headers(include_dir): """ Returns True if the given directory contains at least one intrinsic header. """ if not os.path.exists(include_dir): return False for f in os.listdir(include_dir): if f.endswith("intrin.h"): return True return False result = [] for include_dir in include_dirs: # Skip GCC's fixinclude dir if os.path.basename(os.path.normpath(include_dir)) == "include-fixed": continue if contains_intrinsic_headers(include_dir): continue result.append(include_dir) return result def prepend_isystem_and_normalize(include_dirs): return ["-isystem " + os.path.normpath(idir) for idir in include_dirs] def parse_compiler_target(lines): """ Parse the compiler target from a string. """ target_label = "Target:" target = "" for line in lines.splitlines(True): line = line.strip().split() if len(line) > 1 and line[0] == target_label: target = line[1] return target def dump_compiler_info(output_path, filename, data): filename = os.path.join(output_path, filename) all_data = dict() if os.path.exists(filename): with open(filename, 'r') as f: all_data = json.load(f) all_data.update(data) with open(filename, 'w') as f: f.write(json.dumps(all_data)) def load_compiler_info(filename, compiler): with open(filename, 'r') as f: data = json.load(f) value = data.get(compiler) if value is None: LOG.error("Could not find compiler %s in file %s" % (compiler, filename)) return value def get_compiler_includes(parseLogOptions, compiler, lang, compile_opts, extra_opts=None): """ Returns a list of default includes of the given compiler. """ if extra_opts is None: extra_opts = [] # The first sysroot flag found among the compilation options is added # to the command below to give a more precise default include list. # Absence of any sysroot flags results in an empty string. sysroot = next( (item for item in compile_opts if item.startswith("--sysroot=")), "") cmd = compiler + " " + ' '.join(extra_opts) + " -E -x " + lang + \ " " + sysroot + " - -v " err = "" if parseLogOptions.compiler_includes_file is None: LOG.debug("Retrieving default includes via '" + cmd + "'") err = get_compiler_err(cmd) else: err = load_compiler_info(parseLogOptions.compiler_includes_file, compiler) if parseLogOptions.output_path is not None: LOG.debug("Dumping default includes " + compiler) dump_compiler_info(parseLogOptions.output_path, compiler_includes_dump_file, {compiler: err}) return prepend_isystem_and_normalize( filter_compiler_includes(parse_compiler_includes(err))) def get_compiler_target(parseLogOptions, compiler): """ Returns the target triple of the given compiler as a string. """ err = "" if parseLogOptions.compiler_target_file is None: cmd = compiler + ' -v' LOG.debug("Retrieving target platform information via '" + cmd + "'") err = get_compiler_err(cmd) else: err = load_compiler_info(parseLogOptions.compiler_target_file, compiler) if parseLogOptions.output_path is not None: dump_compiler_info(parseLogOptions.output_path, compiler_target_dump_file, {compiler: err}) return parse_compiler_target(err) def remove_file_if_exists(filename): if os.path.isfile(filename): os.remove(filename) def parse_compile_commands_json(logfile, parseLogOptions): # The add-compiler-defaults is a deprecated argument # and we always perform target and include auto-detection. add_compiler_defaults = True LOG.debug('parse_compile_commands_json: ' + str(add_compiler_defaults)) output_path = parseLogOptions.output_path if output_path is not None: remove_file_if_exists(os.path.join(output_path, compiler_includes_dump_file)) remove_file_if_exists(os.path.join(output_path, compiler_target_dump_file)) actions = [] filtered_build_actions = {} data = json.load(logfile) compiler_includes = {} compiler_target = {} counter = 0 for entry in data: sourcefile = entry['file'] if not os.path.isabs(sourcefile): # Newest versions of intercept-build can create the 'file' in the # JSON Compilation Database as a relative path. sourcefile = os.path.join(os.path.abspath(entry['directory']), sourcefile) lang = option_parser.get_language(sourcefile[sourcefile.rfind('.'):]) if not lang: continue action = build_action.BuildAction(counter) if 'command' in entry: command = entry['command'] # Old versions of intercept-build (confirmed to those shipping # with upstream clang-5.0) do escapes in another way: # -DVARIABLE="a b" becomes -DVARIABLE=\"a b\" in the output. # This would be messed up later on by options_parser, so need a # fix here. (Should be removed once we are sure noone uses this # intercept-build anymore!) if r'\"' in command: command = command.replace(r'\"', '"') elif 'arguments' in entry: # Newest versions of intercept-build create an argument vector # instead of a command string. command = ' '.join(entry['arguments']) else: raise KeyError("No valid 'command' or 'arguments' entry found!") results = option_parser.parse_options(command) action.original_command = command action.analyzer_options = results.compile_opts action.lang = results.lang action.target = results.arch action.output = results.output # Store the compiler built in include paths and defines. if add_compiler_defaults and results.compiler: if not (results.compiler in compiler_includes): # Fetch defaults from the compiler, # make sure we use the correct architecture. extra_opts = [] for regex in COMPILE_OPTS_FWD_TO_DEFAULTS_GETTER: pattern = re.compile(regex) for comp_opt in action.analyzer_options: if re.match(pattern, comp_opt): extra_opts.append(comp_opt) compiler_includes[results.compiler] = \ get_compiler_includes(parseLogOptions, results.compiler, results.lang, results.compile_opts, extra_opts) if not (results.compiler in compiler_target): compiler_target[results.compiler] = \ get_compiler_target(parseLogOptions, results.compiler) action.compiler_includes = compiler_includes[results.compiler] action.target = compiler_target[results.compiler] if results.action != option_parser.ActionType.COMPILE: action.skip = True # TODO: Check arch. action.directory = entry['directory'] action.sources = sourcefile # Filter out duplicate compilation commands. unique_key = action.cmp_key if filtered_build_actions.get(unique_key) is None: filtered_build_actions[unique_key] = action del action counter += 1 for _, ba in filtered_build_actions.items(): actions.append(ba) return actions def parse_log(logfilepath, parseLogOptions): ''' @param output_path: The report directory. Files with the compiler includes and targets will be written into this dir if add_compiler_defaults is set. ''' LOG.debug('Parsing log file: ' + logfilepath) with open(logfilepath) as logfile: try: actions = parse_compile_commands_json(logfile, parseLogOptions) except (ValueError, KeyError, TypeError) as ex: if os.stat(logfilepath).st_size == 0: LOG.error('The compile database is empty.') else: LOG.error('The compile database is not valid.') LOG.debug(traceback.format_exc()) LOG.debug(ex) sys.exit(1) LOG.debug('Parsing log file done.') return [build_action for build_action in actions if not build_action.skip]
1
9,546
Use `os.path.join` for path concatenation.
Ericsson-codechecker
c
@@ -18,11 +18,13 @@ from .retina_head import RetinaHead from .retina_sepbn_head import RetinaSepBNHead from .rpn_head import RPNHead from .ssd_head import SSDHead +from .yolact_head import YolactHead, YolactProtonet, YolactSegmHead __all__ = [ 'AnchorFreeHead', 'AnchorHead', 'GuidedAnchorHead', 'FeatureAdaption', 'RPNHead', 'GARPNHead', 'RetinaHead', 'RetinaSepBNHead', 'GARetinaHead', 'SSDHead', 'FCOSHead', 'RepPointsHead', 'FoveaHead', 'FreeAnchorRetinaHead', 'ATSSHead', 'FSAFHead', 'NASFCOSHead', - 'PISARetinaHead', 'PISASSDHead', 'GFLHead', 'CornerHead' + 'PISARetinaHead', 'PISASSDHead', 'GFLHead', 'CornerHead', 'YolactHead', + 'YolactSegmHead', 'YolactProtonet' ]
1
from .anchor_free_head import AnchorFreeHead from .anchor_head import AnchorHead from .atss_head import ATSSHead from .corner_head import CornerHead from .fcos_head import FCOSHead from .fovea_head import FoveaHead from .free_anchor_retina_head import FreeAnchorRetinaHead from .fsaf_head import FSAFHead from .ga_retina_head import GARetinaHead from .ga_rpn_head import GARPNHead from .gfl_head import GFLHead from .guided_anchor_head import FeatureAdaption, GuidedAnchorHead from .nasfcos_head import NASFCOSHead from .pisa_retinanet_head import PISARetinaHead from .pisa_ssd_head import PISASSDHead from .reppoints_head import RepPointsHead from .retina_head import RetinaHead from .retina_sepbn_head import RetinaSepBNHead from .rpn_head import RPNHead from .ssd_head import SSDHead __all__ = [ 'AnchorFreeHead', 'AnchorHead', 'GuidedAnchorHead', 'FeatureAdaption', 'RPNHead', 'GARPNHead', 'RetinaHead', 'RetinaSepBNHead', 'GARetinaHead', 'SSDHead', 'FCOSHead', 'RepPointsHead', 'FoveaHead', 'FreeAnchorRetinaHead', 'ATSSHead', 'FSAFHead', 'NASFCOSHead', 'PISARetinaHead', 'PISASSDHead', 'GFLHead', 'CornerHead' ]
1
20,907
Use upper case: YOLACTHead, YOLACTProtonet, YOLACTSegmHead
open-mmlab-mmdetection
py
@@ -43,7 +43,7 @@ type harness struct { } func (h *harness) MakeDriver(ctx context.Context) (driver.Crypter, error) { - return &Crypter{ + return &crypter{ keyID: &KeyID{ ProjectID: projectID, Location: location,
1
// Copyright 2018 The Go Cloud Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package gcpkms import ( "context" "testing" cloudkms "cloud.google.com/go/kms/apiv1" "gocloud.dev/internal/secrets/driver" "gocloud.dev/internal/secrets/drivertest" "gocloud.dev/internal/testing/setup" "google.golang.org/api/option" ) // These constants capture values that were used during the last --record. // If you want to use --record mode, // 1. Update projectID to your GCP project name (not number!) // 2. Enable the Cloud KMS API. // 3. Create a key ring and a key, change their name below accordingly. const ( projectID = "pledged-solved-practically" location = "global" keyRing = "test" keyID = "password" ) type harness struct { client *cloudkms.KeyManagementClient close func() } func (h *harness) MakeDriver(ctx context.Context) (driver.Crypter, error) { return &Crypter{ keyID: &KeyID{ ProjectID: projectID, Location: location, KeyRing: keyRing, Key: keyID, }, client: h.client, }, nil } func (h *harness) Close() { h.close() } func newHarness(ctx context.Context, t *testing.T) (drivertest.Harness, error) { conn, done := setup.NewGCPgRPCConn(ctx, t, endPoint, "secrets") client, err := cloudkms.NewKeyManagementClient(ctx, option.WithGRPCConn(conn)) if err != nil { return nil, err } return &harness{ client: client, close: func() { client.Close() done() }, }, nil } func TestConformance(t *testing.T) { drivertest.RunConformanceTests(t, newHarness) }
1
13,537
The driver tests should be updated to use the concrete type instead of the driver directly; right now your test coverage of the concrete type is 0%.
google-go-cloud
go
@@ -1038,7 +1038,14 @@ class CommentAnalyzer if ($method_tree_child->children) { - $param_type = Type::getTypeFromTree($method_tree_child->children[0], $codebase); + try { + $param_type = Type::getTypeFromTree($method_tree_child->children[0], $codebase); + } catch (TypeParseTreeException $e) { + $msg = 'Badly-formatted @method parameters for ' . $method_entry; + $msg .= '; have you checked if a variable passed by reference has a space between'; + $msg .= ' the ampersand and the start of the variable?'; + throw new DocblockParseException($msg); + } $docblock_lines[] = '@param \\' . $param_type . ' ' . ($method_tree_child->variadic ? '...' : '') . $method_tree_child->name;
1
<?php namespace Psalm\Internal\Analyzer; use PhpParser; use Psalm\Aliases; use Psalm\DocComment; use Psalm\Exception\DocblockParseException; use Psalm\Exception\IncorrectDocblockException; use Psalm\Exception\TypeParseTreeException; use Psalm\FileSource; use Psalm\Internal\Scanner\ClassLikeDocblockComment; use Psalm\Internal\Scanner\FunctionDocblockComment; use Psalm\Internal\Scanner\VarDocblockComment; use Psalm\Internal\Type\ParseTree; use Psalm\Type; use function trim; use function substr_count; use function strlen; use function preg_replace; use function str_replace; use function preg_match; use function count; use function reset; use function preg_split; use const PREG_SPLIT_DELIM_CAPTURE; use const PREG_SPLIT_NO_EMPTY; use function array_shift; use function implode; use function substr; use function strpos; use function strtolower; use function in_array; use function explode; use function array_merge; use const PREG_OFFSET_CAPTURE; use function rtrim; /** * @internal */ class CommentAnalyzer { const TYPE_REGEX = '(\??\\\?[\(\)A-Za-z0-9_&\<\.=,\>\[\]\-\{\}:|?\\\\]*|\$[a-zA-Z_0-9_]+)'; /** * @param array<string, array<string, array{Type\Union}>>|null $template_type_map * @param array<string, array<int, array{0: string, 1: int}>> $type_aliases * * @throws DocblockParseException if there was a problem parsing the docblock * * @return VarDocblockComment[] */ public static function getTypeFromComment( PhpParser\Comment\Doc $comment, FileSource $source, Aliases $aliases, array $template_type_map = null, ?array $type_aliases = null ) { $parsed_docblock = DocComment::parsePreservingLength($comment); return self::arrayToDocblocks( $comment, $parsed_docblock, $source, $aliases, $template_type_map, $type_aliases ); } /** * @param array<string, array<string, array{Type\Union}>>|null $template_type_map * @param array<string, array<int, array{0: string, 1: int}>> $type_aliases * @param array{description:string, specials:array<string, array<int, string>>} $parsed_docblock * * @return VarDocblockComment[] * * @throws DocblockParseException if there was a problem parsing the docblock */ public static function arrayToDocblocks( PhpParser\Comment\Doc $comment, array $parsed_docblock, FileSource $source, Aliases $aliases, array $template_type_map = null, ?array $type_aliases = null ) : array { $var_id = null; $var_type_tokens = null; $original_type = null; $var_comments = []; $comment_text = $comment->getText(); $var_line_number = $comment->getLine(); if ($parsed_docblock) { $all_vars = ($parsed_docblock['specials']['var'] ?? []) + ($parsed_docblock['specials']['phpstan-var'] ?? []) + ($parsed_docblock['specials']['psalm-var'] ?? []); foreach ($all_vars as $offset => $var_line) { $var_line = trim($var_line); if (!$var_line) { continue; } $type_start = null; $type_end = null; $line_parts = self::splitDocLine($var_line); $line_number = $comment->getLine() + substr_count($comment_text, "\n", 0, $offset); if ($line_parts && $line_parts[0]) { $type_start = $offset + $comment->getFilePos(); $type_end = $type_start + strlen($line_parts[0]); $line_parts[0] = self::sanitizeDocblockType($line_parts[0]); if ($line_parts[0] === '' || ($line_parts[0][0] === '$' && !preg_match('/^\$this(\||$)/', $line_parts[0])) ) { throw new IncorrectDocblockException('Misplaced variable'); } try { $var_type_tokens = Type::fixUpLocalType( $line_parts[0], $aliases, $template_type_map, $type_aliases ); } catch (TypeParseTreeException $e) { throw new DocblockParseException($line_parts[0] . ' is not a valid type'); } $original_type = $line_parts[0]; $var_line_number = $line_number; if (count($line_parts) > 1 && $line_parts[1][0] === '$') { $var_id = $line_parts[1]; } } if (!$var_type_tokens || !$original_type) { continue; } try { $defined_type = Type::parseTokens($var_type_tokens, null, $template_type_map ?: []); } catch (TypeParseTreeException $e) { throw new DocblockParseException( $line_parts[0] . ' is not a valid type' . ' (from ' . $source->getFilePath() . ':' . $comment->getLine() . ')' ); } $defined_type->setFromDocblock(); $var_comment = new VarDocblockComment(); $var_comment->type = $defined_type; $var_comment->original_type = $original_type; $var_comment->var_id = $var_id; $var_comment->line_number = $var_line_number; $var_comment->type_start = $type_start; $var_comment->type_end = $type_end; $var_comment->deprecated = isset($parsed_docblock['specials']['deprecated']); $var_comment->internal = isset($parsed_docblock['specials']['internal']); $var_comment->readonly = isset($parsed_docblock['specials']['readonly']) || isset($parsed_docblock['specials']['psalm-readonly']) || isset($parsed_docblock['specials']['psalm-readonly-allow-private-mutation']); $var_comment->allow_private_mutation = isset($parsed_docblock['specials']['psalm-allow-private-mutation']) || isset($parsed_docblock['specials']['psalm-readonly-allow-private-mutation']); $var_comment->remove_taint = isset($parsed_docblock['specials']['psalm-remove-taint']); if (isset($parsed_docblock['specials']['psalm-internal'])) { $psalm_internal = reset($parsed_docblock['specials']['psalm-internal']); if ($psalm_internal) { $var_comment->psalm_internal = $psalm_internal; } else { throw new DocblockParseException('psalm-internal annotation used without specifying namespace'); } $var_comment->psalm_internal = reset($parsed_docblock['specials']['psalm-internal']); if (!$var_comment->internal) { throw new DocblockParseException('@psalm-internal annotation used without @internal'); } } $var_comments[] = $var_comment; } } if (!$var_comments && (isset($parsed_docblock['specials']['deprecated']) || isset($parsed_docblock['specials']['internal']) || isset($parsed_docblock['specials']['readonly']) || isset($parsed_docblock['specials']['psalm-readonly']) || isset($parsed_docblock['specials']['psalm-readonly-allow-private-mutation']) || isset($parsed_docblock['specials']['psalm-remove-taint'])) ) { $var_comment = new VarDocblockComment(); $var_comment->deprecated = isset($parsed_docblock['specials']['deprecated']); $var_comment->internal = isset($parsed_docblock['specials']['internal']); $var_comment->readonly = isset($parsed_docblock['specials']['readonly']) || isset($parsed_docblock['specials']['psalm-readonly']) || isset($parsed_docblock['specials']['psalm-readonly-allow-private-mutation']); $var_comment->allow_private_mutation = isset($parsed_docblock['specials']['psalm-allow-private-mutation']) || isset($parsed_docblock['specials']['psalm-readonly-allow-private-mutation']); $var_comment->remove_taint = isset($parsed_docblock['specials']['psalm-remove-taint']); $var_comments[] = $var_comment; } return $var_comments; } private static function sanitizeDocblockType(string $docblock_type) : string { $docblock_type = preg_replace('@^[ \t]*\*@m', '', $docblock_type); $docblock_type = preg_replace('/,\n\s+\}/', '}', $docblock_type); return str_replace("\n", '', $docblock_type); } /** * @param Aliases $aliases * @param array<string, array<int, array{0: string, 1: int}>> $type_aliases * * @throws DocblockParseException if there was a problem parsing the docblock * * @return array<string, list<array{0: string, 1: int}>> */ public static function getTypeAliasesFromComment( PhpParser\Comment\Doc $comment, Aliases $aliases, array $type_aliases = null ) { $parsed_docblock = DocComment::parsePreservingLength($comment); if (!isset($parsed_docblock['specials']['psalm-type'])) { return []; } return self::getTypeAliasesFromCommentLines( $parsed_docblock['specials']['psalm-type'], $aliases, $type_aliases ); } /** * @param array<string> $type_alias_comment_lines * @param Aliases $aliases * @param array<string, array<int, array{0: string, 1: int}>> $type_aliases * * @throws DocblockParseException if there was a problem parsing the docblock * * @return array<string, list<array{0: string, 1: int}>> */ private static function getTypeAliasesFromCommentLines( array $type_alias_comment_lines, Aliases $aliases, array $type_aliases = null ) { $type_alias_tokens = []; foreach ($type_alias_comment_lines as $var_line) { $var_line = trim($var_line); if (!$var_line) { continue; } $var_line = preg_replace('/[ \t]+/', ' ', preg_replace('@^[ \t]*\*@m', '', $var_line)); $var_line = preg_replace('/,\n\s+\}/', '}', $var_line); $var_line = str_replace("\n", '', $var_line); $var_line_parts = preg_split('/( |=)/', $var_line, -1, PREG_SPLIT_DELIM_CAPTURE | PREG_SPLIT_NO_EMPTY); if (!$var_line_parts) { continue; } $type_alias = array_shift($var_line_parts); if (!isset($var_line_parts[0])) { continue; } if ($var_line_parts[0] === ' ') { array_shift($var_line_parts); } if ($var_line_parts[0] === '=') { array_shift($var_line_parts); } if (!isset($var_line_parts[0])) { continue; } if ($var_line_parts[0] === ' ') { array_shift($var_line_parts); } $type_string = str_replace("\n", '', implode('', $var_line_parts)); $type_string = preg_replace('/>[^>^\}]*$/', '>', $type_string); $type_string = preg_replace('/\}[^>^\}]*$/', '}', $type_string); try { $type_tokens = Type::fixUpLocalType( $type_string, $aliases, null, $type_alias_tokens + $type_aliases ); } catch (TypeParseTreeException $e) { throw new DocblockParseException($type_string . ' is not a valid type'); } $type_alias_tokens[$type_alias] = $type_tokens; } return $type_alias_tokens; } /** * @param int $line_number * * @throws DocblockParseException if there was a problem parsing the docblock * * @return FunctionDocblockComment */ public static function extractFunctionDocblockInfo(PhpParser\Comment\Doc $comment) { $parsed_docblock = DocComment::parsePreservingLength($comment); $comment_text = $comment->getText(); $info = new FunctionDocblockComment(); if (isset($parsed_docblock['specials']['return']) || isset($parsed_docblock['specials']['psalm-return']) || isset($parsed_docblock['specials']['phpstan-return']) ) { if (isset($parsed_docblock['specials']['psalm-return'])) { $return_specials = $parsed_docblock['specials']['psalm-return']; } elseif (isset($parsed_docblock['specials']['phpstan-return'])) { $return_specials = $parsed_docblock['specials']['phpstan-return']; } else { $return_specials = $parsed_docblock['specials']['return']; } self::extractReturnType( $comment, $return_specials, $info ); } if (isset($parsed_docblock['specials']['param']) || isset($parsed_docblock['specials']['psalm-param']) || isset($parsed_docblock['specials']['phpstan-param']) ) { $all_params = (isset($parsed_docblock['specials']['param']) ? $parsed_docblock['specials']['param'] : []) + (isset($parsed_docblock['specials']['phpstan-param']) ? $parsed_docblock['specials']['phpstan-param'] : []) + (isset($parsed_docblock['specials']['psalm-param']) ? $parsed_docblock['specials']['psalm-param'] : []); foreach ($all_params as $offset => $param) { $line_parts = self::splitDocLine($param); if (count($line_parts) === 1 && isset($line_parts[0][0]) && $line_parts[0][0] === '$') { continue; } if (count($line_parts) > 1) { if (preg_match('/^&?(\.\.\.)?&?\$[A-Za-z0-9_]+,?$/', $line_parts[1]) && $line_parts[0][0] !== '{' ) { $line_parts[1] = str_replace('&', '', $line_parts[1]); $line_parts[1] = preg_replace('/,$/', '', $line_parts[1]); $start = $offset + $comment->getFilePos(); $end = $start + strlen($line_parts[0]); $line_parts[0] = self::sanitizeDocblockType($line_parts[0]); if ($line_parts[0] === '' || ($line_parts[0][0] === '$' && !preg_match('/^\$this(\||$)/', $line_parts[0])) ) { throw new IncorrectDocblockException('Misplaced variable'); } $info->params[] = [ 'name' => trim($line_parts[1]), 'type' => $line_parts[0], 'line_number' => $comment->getLine() + substr_count($comment_text, "\n", 0, $offset), 'start' => $start, 'end' => $end, ]; } } else { throw new DocblockParseException('Badly-formatted @param'); } } } if (isset($parsed_docblock['specials']['param-out'])) { foreach ($parsed_docblock['specials']['param-out'] as $offset => $param) { $line_parts = self::splitDocLine($param); if (count($line_parts) === 1 && isset($line_parts[0][0]) && $line_parts[0][0] === '$') { continue; } if (count($line_parts) > 1) { if (!preg_match('/\[[^\]]+\]/', $line_parts[0]) && preg_match('/^(\.\.\.)?&?\$[A-Za-z0-9_]+,?$/', $line_parts[1]) && $line_parts[0][0] !== '{' ) { if ($line_parts[1][0] === '&') { $line_parts[1] = substr($line_parts[1], 1); } $line_parts[0] = str_replace("\n", '', preg_replace('@^[ \t]*\*@m', '', $line_parts[0])); if ($line_parts[0] === '' || ($line_parts[0][0] === '$' && !preg_match('/^\$this(\||$)/', $line_parts[0])) ) { throw new IncorrectDocblockException('Misplaced variable'); } $line_parts[1] = preg_replace('/,$/', '', $line_parts[1]); $info->params_out[] = [ 'name' => trim($line_parts[1]), 'type' => str_replace("\n", '', $line_parts[0]), 'line_number' => $comment->getLine() + substr_count($comment_text, "\n", 0, $offset), ]; } } else { throw new DocblockParseException('Badly-formatted @param'); } } } if (isset($parsed_docblock['specials']['psalm-taint-sink'])) { foreach ($parsed_docblock['specials']['psalm-taint-sink'] as $param) { $param = trim($param); $info->taint_sink_params[] = ['name' => $param]; } } if (isset($parsed_docblock['specials']['psalm-assert-untainted'])) { foreach ($parsed_docblock['specials']['psalm-assert-untainted'] as $param) { $param = trim($param); $info->assert_untainted_params[] = ['name' => $param]; } } if (isset($parsed_docblock['specials']['global'])) { foreach ($parsed_docblock['specials']['global'] as $offset => $global) { $line_parts = self::splitDocLine($global); if (count($line_parts) === 1 && isset($line_parts[0][0]) && $line_parts[0][0] === '$') { continue; } if (count($line_parts) > 1) { if (!preg_match('/\[[^\]]+\]/', $line_parts[0]) && preg_match('/^(\.\.\.)?&?\$[A-Za-z0-9_]+,?$/', $line_parts[1]) && $line_parts[0][0] !== '{' ) { if ($line_parts[1][0] === '&') { $line_parts[1] = substr($line_parts[1], 1); } if ($line_parts[0][0] === '$' && !preg_match('/^\$this(\||$)/', $line_parts[0])) { throw new IncorrectDocblockException('Misplaced variable'); } $line_parts[1] = preg_replace('/,$/', '', $line_parts[1]); $info->globals[] = [ 'name' => $line_parts[1], 'type' => $line_parts[0], 'line_number' => $comment->getLine() + substr_count($comment_text, "\n", 0, $offset), ]; } } else { throw new DocblockParseException('Badly-formatted @param'); } } } if (isset($parsed_docblock['specials']['deprecated'])) { $info->deprecated = true; } if (isset($parsed_docblock['specials']['internal'])) { $info->internal = true; } if (isset($parsed_docblock['specials']['psalm-internal'])) { $psalm_internal = reset($parsed_docblock['specials']['psalm-internal']); if ($psalm_internal) { $info->psalm_internal = $psalm_internal; } else { throw new DocblockParseException('@psalm-internal annotation used without specifying namespace'); } $info->psalm_internal = reset($parsed_docblock['specials']['psalm-internal']); if (! $info->internal) { throw new DocblockParseException('@psalm-internal annotation used without @internal'); } } if (isset($parsed_docblock['specials']['psalm-remove-taint'])) { $info->remove_taint = true; } if (isset($parsed_docblock['specials']['psalm-suppress'])) { foreach ($parsed_docblock['specials']['psalm-suppress'] as $offset => $suppress_entry) { $info->suppressed_issues[$offset + $comment->getFilePos()] = preg_split('/[\s]+/', $suppress_entry)[0]; } } if (isset($parsed_docblock['specials']['throws'])) { foreach ($parsed_docblock['specials']['throws'] as $offset => $throws_entry) { $throws_class = preg_split('/[\s]+/', $throws_entry)[0]; if (!$throws_class) { throw new IncorrectDocblockException('Unexpectedly empty @throws'); } $info->throws[] = [ $throws_class, $offset + $comment->getFilePos(), $comment->getLine() + substr_count($comment->getText(), "\n", 0, $offset) ]; } } if (strpos(strtolower($parsed_docblock['description']), '@inheritdoc') !== false || isset($parsed_docblock['specials']['inheritdoc']) || isset($parsed_docblock['specials']['inheritDoc'])) { $info->inheritdoc = true; } if (isset($parsed_docblock['specials']['template']) || isset($parsed_docblock['specials']['psalm-template']) || isset($parsed_docblock['specials']['phpstan-template']) ) { $all_templates = (isset($parsed_docblock['specials']['template']) ? $parsed_docblock['specials']['template'] : []) + (isset($parsed_docblock['specials']['psalm-template']) ? $parsed_docblock['specials']['psalm-template'] : []) + (isset($parsed_docblock['specials']['phpstan-template']) ? $parsed_docblock['specials']['phpstan-template'] : []); foreach ($all_templates as $template_line) { $template_type = preg_split('/[\s]+/', preg_replace('@^[ \t]*\*@m', '', $template_line)); $template_name = array_shift($template_type); if (!$template_name) { throw new IncorrectDocblockException('Empty @template tag'); } if (count($template_type) > 1 && in_array(strtolower($template_type[0]), ['as', 'super', 'of'], true) ) { $template_modifier = strtolower(array_shift($template_type)); $info->templates[] = [ $template_name, $template_modifier, implode(' ', $template_type), false ]; } else { $info->templates[] = [$template_name, null, null, false]; } } } if (isset($parsed_docblock['specials']['template-typeof'])) { foreach ($parsed_docblock['specials']['template-typeof'] as $template_typeof) { $typeof_parts = preg_split('/[\s]+/', preg_replace('@^[ \t]*\*@m', '', $template_typeof)); if ($typeof_parts === false || count($typeof_parts) < 2 || $typeof_parts[1][0] !== '$') { throw new IncorrectDocblockException('Misplaced variable'); } $info->template_typeofs[] = [ 'template_type' => $typeof_parts[0], 'param_name' => substr($typeof_parts[1], 1), ]; } } if (isset($parsed_docblock['specials']['psalm-assert'])) { foreach ($parsed_docblock['specials']['psalm-assert'] as $assertion) { $line_parts = self::splitDocLine($assertion); if (count($line_parts) < 2 || $line_parts[1][0] !== '$') { throw new IncorrectDocblockException('Misplaced variable'); } $line_parts[0] = self::sanitizeDocblockType($line_parts[0]); $info->assertions[] = [ 'type' => $line_parts[0], 'param_name' => substr($line_parts[1], 1), ]; } } if (isset($parsed_docblock['specials']['psalm-assert-if-true'])) { foreach ($parsed_docblock['specials']['psalm-assert-if-true'] as $assertion) { $line_parts = self::splitDocLine($assertion); if (count($line_parts) < 2 || $line_parts[1][0] !== '$') { throw new IncorrectDocblockException('Misplaced variable'); } $info->if_true_assertions[] = [ 'type' => $line_parts[0], 'param_name' => substr($line_parts[1], 1), ]; } } if (isset($parsed_docblock['specials']['psalm-assert-if-false'])) { foreach ($parsed_docblock['specials']['psalm-assert-if-false'] as $assertion) { $line_parts = self::splitDocLine($assertion); if (count($line_parts) < 2 || $line_parts[1][0] !== '$') { throw new IncorrectDocblockException('Misplaced variable'); } $info->if_false_assertions[] = [ 'type' => $line_parts[0], 'param_name' => substr($line_parts[1], 1), ]; } } $info->variadic = isset($parsed_docblock['specials']['psalm-variadic']); $info->pure = isset($parsed_docblock['specials']['psalm-pure']) || isset($parsed_docblock['specials']['pure']); if (isset($parsed_docblock['specials']['psalm-mutation-free'])) { $info->mutation_free = true; } if (isset($parsed_docblock['specials']['psalm-external-mutation-free'])) { $info->external_mutation_free = true; } $info->ignore_nullable_return = isset($parsed_docblock['specials']['psalm-ignore-nullable-return']); $info->ignore_falsable_return = isset($parsed_docblock['specials']['psalm-ignore-falsable-return']); return $info; } /** * @param array<int, string> $return_specials * @return void */ private static function extractReturnType( PhpParser\Comment\Doc $comment, array $return_specials, FunctionDocblockComment $info ) { foreach ($return_specials as $offset => $return_block) { $return_lines = explode("\n", $return_block); if (!trim($return_lines[0])) { return; } $return_block = trim($return_block); if (!$return_block) { return; } $line_parts = self::splitDocLine($return_block); if ($line_parts[0][0] !== '{') { if ($line_parts[0][0] === '$' && !preg_match('/^\$this(\||$)/', $line_parts[0])) { throw new IncorrectDocblockException('Misplaced variable'); } $start = $offset + $comment->getFilePos(); $end = $start + strlen($line_parts[0]); $line_parts[0] = self::sanitizeDocblockType($line_parts[0]); $info->return_type = array_shift($line_parts); $info->return_type_description = $line_parts ? implode(' ', $line_parts) : null; $info->return_type_line_number = $comment->getLine() + substr_count($comment->getText(), "\n", 0, $offset); $info->return_type_start = $start; $info->return_type_end = $end; } else { throw new DocblockParseException('Badly-formatted @return type'); } break; } } /** * @throws DocblockParseException if there was a problem parsing the docblock * * @return ClassLikeDocblockComment * @psalm-suppress MixedArrayAccess */ public static function extractClassLikeDocblockInfo( \PhpParser\Node $node, PhpParser\Comment\Doc $comment, Aliases $aliases ) { $parsed_docblock = DocComment::parsePreservingLength($comment); $codebase = ProjectAnalyzer::getInstance()->getCodebase(); $info = new ClassLikeDocblockComment(); if (isset($parsed_docblock['specials']['template']) || isset($parsed_docblock['specials']['psalm-template']) || isset($parsed_docblock['specials']['phpstan-template']) ) { $all_templates = (isset($parsed_docblock['specials']['template']) ? $parsed_docblock['specials']['template'] : []) + (isset($parsed_docblock['specials']['phpstan-template']) ? $parsed_docblock['specials']['phpstan-template'] : []) + (isset($parsed_docblock['specials']['psalm-template']) ? $parsed_docblock['specials']['psalm-template'] : []); foreach ($all_templates as $offset => $template_line) { $template_type = preg_split('/[\s]+/', preg_replace('@^[ \t]*\*@m', '', $template_line)); $template_name = array_shift($template_type); if (!$template_name) { throw new IncorrectDocblockException('Empty @template tag'); } if (count($template_type) > 1 && in_array(strtolower($template_type[0]), ['as', 'super', 'of'], true) ) { $template_modifier = strtolower(array_shift($template_type)); $info->templates[] = [ $template_name, $template_modifier, implode(' ', $template_type), false, $offset ]; } else { $info->templates[] = [$template_name, null, null, false, $offset]; } } } if (isset($parsed_docblock['specials']['template-covariant']) || isset($parsed_docblock['specials']['psalm-template-covariant']) || isset($parsed_docblock['specials']['phpstan-template-covariant']) ) { $all_templates = (isset($parsed_docblock['specials']['template-covariant']) ? $parsed_docblock['specials']['template-covariant'] : []) + (isset($parsed_docblock['specials']['phpstan-template-covariant']) ? $parsed_docblock['specials']['phpstan-template-covariant'] : []) + (isset($parsed_docblock['specials']['psalm-template-covariant']) ? $parsed_docblock['specials']['psalm-template-covariant'] : []); foreach ($all_templates as $offset => $template_line) { $template_type = preg_split('/[\s]+/', preg_replace('@^[ \t]*\*@m', '', $template_line)); $template_name = array_shift($template_type); if (!$template_name) { throw new IncorrectDocblockException('Empty @template-covariant tag'); } if (count($template_type) > 1 && in_array(strtolower($template_type[0]), ['as', 'super', 'of'], true) ) { $template_modifier = strtolower(array_shift($template_type)); $info->templates[] = [ $template_name, $template_modifier, implode(' ', $template_type), true, $offset ]; } else { $info->templates[] = [$template_name, null, null, true, $offset]; } } } if (isset($parsed_docblock['specials']['template-extends']) || isset($parsed_docblock['specials']['inherits']) || isset($parsed_docblock['specials']['extends']) || isset($parsed_docblock['specials']['psalm-extends']) || isset($parsed_docblock['specials']['phpstan-extends']) ) { $all_inheritance = ($parsed_docblock['specials']['template-extends'] ?? []) + ($parsed_docblock['specials']['inherits'] ?? []) + ($parsed_docblock['specials']['extends'] ?? []) + ($parsed_docblock['specials']['psalm-extends'] ?? []) + ($parsed_docblock['specials']['phpstan-extends'] ?? []); foreach ($all_inheritance as $template_line) { $info->template_extends[] = trim(preg_replace('@^[ \t]*\*@m', '', $template_line)); } } if (isset($parsed_docblock['specials']['template-implements']) || isset($parsed_docblock['specials']['implements']) || isset($parsed_docblock['specials']['phpstan-implements']) || isset($parsed_docblock['specials']['psalm-implements']) ) { $all_inheritance = ($parsed_docblock['specials']['template-implements'] ?? []) + ($parsed_docblock['specials']['implements'] ?? []) + ($parsed_docblock['specials']['phpstan-implements'] ?? []) + ($parsed_docblock['specials']['psalm-implements'] ?? []); foreach ($all_inheritance as $template_line) { $info->template_implements[] = trim(preg_replace('@^[ \t]*\*@m', '', $template_line)); } } if (isset($parsed_docblock['specials']['psalm-yield']) ) { $yield = reset($parsed_docblock['specials']['psalm-yield']); $info->yield = trim(preg_replace('@^[ \t]*\*@m', '', $yield)); } if (isset($parsed_docblock['specials']['deprecated'])) { $info->deprecated = true; } if (isset($parsed_docblock['specials']['internal'])) { $info->internal = true; } if (isset($parsed_docblock['specials']['psalm-internal'])) { $psalm_internal = reset($parsed_docblock['specials']['psalm-internal']); if ($psalm_internal) { $info->psalm_internal = $psalm_internal; } else { throw new DocblockParseException('psalm-internal annotation used without specifying namespace'); } if (! $info->internal) { throw new DocblockParseException('@psalm-internal annotation used without @internal'); } } if (isset($parsed_docblock['specials']['mixin'])) { $mixin = trim(reset($parsed_docblock['specials']['mixin'])); $mixin = explode(' ', $mixin)[0]; if ($mixin) { $info->mixin = $mixin; } else { throw new DocblockParseException('@mixin annotation used without specifying class'); } } if (isset($parsed_docblock['specials']['psalm-seal-properties'])) { $info->sealed_properties = true; } if (isset($parsed_docblock['specials']['psalm-seal-methods'])) { $info->sealed_methods = true; } if (isset($parsed_docblock['specials']['psalm-immutable']) || isset($parsed_docblock['specials']['psalm-mutation-free']) ) { $info->mutation_free = true; $info->external_mutation_free = true; } if (isset($parsed_docblock['specials']['psalm-external-mutation-free'])) { $info->external_mutation_free = true; } if (isset($parsed_docblock['specials']['psalm-override-property-visibility'])) { $info->override_property_visibility = true; } if (isset($parsed_docblock['specials']['psalm-override-method-visibility'])) { $info->override_method_visibility = true; } if (isset($parsed_docblock['specials']['psalm-suppress'])) { foreach ($parsed_docblock['specials']['psalm-suppress'] as $offset => $suppress_entry) { $info->suppressed_issues[$offset + $comment->getFilePos()] = preg_split('/[\s]+/', $suppress_entry)[0]; } } if (isset($parsed_docblock['specials']['method']) || isset($parsed_docblock['specials']['psalm-method'])) { $all_methods = (isset($parsed_docblock['specials']['method']) ? $parsed_docblock['specials']['method'] : []) + (isset($parsed_docblock['specials']['psalm-method']) ? $parsed_docblock['specials']['psalm-method'] : []); foreach ($all_methods as $offset => $method_entry) { $method_entry = preg_replace('/[ \t]+/', ' ', trim($method_entry)); $docblock_lines = []; $is_static = false; if (!preg_match('/^([a-z_A-Z][a-z_0-9A-Z]+) *\(/', $method_entry, $matches)) { $doc_line_parts = self::splitDocLine($method_entry); if ($doc_line_parts[0] === 'static' && !strpos($doc_line_parts[1], '(')) { $is_static = true; array_shift($doc_line_parts); } if (count($doc_line_parts) > 1) { $docblock_lines[] = '@return ' . array_shift($doc_line_parts); $method_entry = implode(' ', $doc_line_parts); } } $method_entry = trim(preg_replace('/\/\/.*/', '', $method_entry)); $method_entry = preg_replace( '/array\(([0-9a-zA-Z_\'\" ]+,)*([0-9a-zA-Z_\'\" ]+)\)/', '[]', $method_entry ); $end_of_method_regex = '/(?<!array\()\) ?(\: ?(\??[\\\\a-zA-Z0-9_]+))?/'; if (preg_match($end_of_method_regex, $method_entry, $matches, PREG_OFFSET_CAPTURE)) { $method_entry = substr($method_entry, 0, (int) $matches[0][1] + strlen((string) $matches[0][0])); } $method_entry = str_replace([', ', '( '], [',', '('], $method_entry); $method_entry = preg_replace('/ (?!(\$|\.\.\.|&))/', '', trim($method_entry)); // replace array bracket contents $method_entry = preg_replace('/\[([0-9a-zA-Z_\'\" ]+,)*([0-9a-zA-Z_\'\" ]+)\]/', '[]', $method_entry); try { $method_tree = ParseTree::createFromTokens( Type::fixUpLocalType( $method_entry, $aliases, null ) ); } catch (TypeParseTreeException $e) { throw new DocblockParseException($method_entry . ' is not a valid method'); } if (!$method_tree instanceof ParseTree\MethodWithReturnTypeTree && !$method_tree instanceof ParseTree\MethodTree) { throw new DocblockParseException($method_entry . ' is not a valid method'); } if ($method_tree instanceof ParseTree\MethodWithReturnTypeTree) { $docblock_lines[] = '@return ' . Type::getTypeFromTree( $method_tree->children[1], $codebase ); $method_tree = $method_tree->children[0]; } if (!$method_tree instanceof ParseTree\MethodTree) { throw new DocblockParseException($method_entry . ' is not a valid method'); } $args = []; foreach ($method_tree->children as $method_tree_child) { if (!$method_tree_child instanceof ParseTree\MethodParamTree) { throw new DocblockParseException($method_entry . ' is not a valid method'); } $args[] = ($method_tree_child->byref ? '&' : '') . ($method_tree_child->variadic ? '...' : '') . $method_tree_child->name . ($method_tree_child->default != '' ? ' = ' . $method_tree_child->default : ''); if ($method_tree_child->children) { $param_type = Type::getTypeFromTree($method_tree_child->children[0], $codebase); $docblock_lines[] = '@param \\' . $param_type . ' ' . ($method_tree_child->variadic ? '...' : '') . $method_tree_child->name; } } $function_string = 'function ' . $method_tree->value . '(' . implode(', ', $args) . ')'; if ($is_static) { $function_string = 'static ' . $function_string; } $function_docblock = $docblock_lines ? "/**\n * " . implode("\n * ", $docblock_lines) . "\n*/\n" : ""; $php_string = '<?php class A { ' . $function_docblock . ' public ' . $function_string . '{} }'; try { $statements = \Psalm\Internal\Provider\StatementsProvider::parseStatements($php_string); } catch (\Exception $e) { throw new DocblockParseException('Badly-formatted @method string ' . $method_entry); } if (!$statements || !$statements[0] instanceof \PhpParser\Node\Stmt\Class_ || !isset($statements[0]->stmts[0]) || !$statements[0]->stmts[0] instanceof \PhpParser\Node\Stmt\ClassMethod ) { throw new DocblockParseException('Badly-formatted @method string ' . $method_entry); } /** @var \PhpParser\Comment\Doc */ $node_doc_comment = $node->getDocComment(); $statements[0]->stmts[0]->setAttribute('startLine', $node_doc_comment->getLine()); $statements[0]->stmts[0]->setAttribute('startFilePos', $node_doc_comment->getFilePos()); $statements[0]->stmts[0]->setAttribute('endFilePos', $node->getAttribute('startFilePos')); if ($doc_comment = $statements[0]->stmts[0]->getDocComment()) { $statements[0]->stmts[0]->setDocComment( new \PhpParser\Comment\Doc( $doc_comment->getText(), $comment->getLine() + substr_count($comment->getText(), "\n", 0, $offset), $node_doc_comment->getFilePos() ) ); } $info->methods[] = $statements[0]->stmts[0]; } } self::addMagicPropertyToInfo($comment, $info, $parsed_docblock['specials'], 'property'); self::addMagicPropertyToInfo($comment, $info, $parsed_docblock['specials'], 'psalm-property'); self::addMagicPropertyToInfo($comment, $info, $parsed_docblock['specials'], 'property-read'); self::addMagicPropertyToInfo($comment, $info, $parsed_docblock['specials'], 'psalm-property-read'); self::addMagicPropertyToInfo($comment, $info, $parsed_docblock['specials'], 'property-write'); self::addMagicPropertyToInfo($comment, $info, $parsed_docblock['specials'], 'psalm-property-write'); return $info; } /** * @param ClassLikeDocblockComment $info * @param array<string, array<int, string>> $specials * @param 'property'|'psalm-property'|'property-read'| * 'psalm-property-read'|'property-write'|'psalm-property-write' $property_tag * * @throws DocblockParseException * * @return void */ protected static function addMagicPropertyToInfo( PhpParser\Comment\Doc $comment, ClassLikeDocblockComment $info, array $specials, string $property_tag ) : void { $magic_property_comments = isset($specials[$property_tag]) ? $specials[$property_tag] : []; foreach ($magic_property_comments as $offset => $property) { $line_parts = self::splitDocLine($property); if (count($line_parts) === 1 && isset($line_parts[0][0]) && $line_parts[0][0] === '$') { continue; } if (count($line_parts) > 1) { if (preg_match('/^&?\$[A-Za-z0-9_]+,?$/', $line_parts[1]) && $line_parts[0][0] !== '{' ) { $line_parts[1] = str_replace('&', '', $line_parts[1]); $line_parts[1] = preg_replace('/,$/', '', $line_parts[1]); $start = $offset + $comment->getFilePos(); $end = $start + strlen($line_parts[0]); $line_parts[0] = str_replace("\n", '', preg_replace('@^[ \t]*\*@m', '', $line_parts[0])); if ($line_parts[0] === '' || ($line_parts[0][0] === '$' && !preg_match('/^\$this(\||$)/', $line_parts[0])) ) { throw new IncorrectDocblockException('Misplaced variable'); } $name = trim($line_parts[1]); if (!preg_match('/^\$([a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*)$/', $name)) { throw new DocblockParseException('Badly-formatted @property name'); } $info->properties[] = [ 'name' => $name, 'type' => $line_parts[0], 'line_number' => $comment->getLine() + substr_count($comment->getText(), "\n", 0, $offset), 'tag' => $property_tag, 'start' => $start, 'end' => $end, ]; } } else { throw new DocblockParseException('Badly-formatted @property'); } } } /** * @param string $return_block * * @throws DocblockParseException if an invalid string is found * * @return array<string> */ public static function splitDocLine($return_block) { $brackets = ''; $type = ''; $expects_callable_return = false; $return_block = str_replace("\t", ' ', $return_block); $quote_char = null; $escaped = false; for ($i = 0, $l = strlen($return_block); $i < $l; ++$i) { $char = $return_block[$i]; $next_char = $i < $l - 1 ? $return_block[$i + 1] : null; $last_char = $i > 0 ? $return_block[$i - 1] : null; if ($quote_char) { if ($char === $quote_char && $i > 1 && !$escaped) { $quote_char = null; $type .= $char; continue; } if ($char === '\\' && !$escaped && ($next_char === $quote_char || $next_char === '\\')) { $escaped = true; $type .= $char; continue; } $escaped = false; $type .= $char; continue; } if ($char === '"' || $char === '\'') { $quote_char = $char; $type .= $char; continue; } if ($char === ':' && $last_char === ')') { $expects_callable_return = true; $type .= $char; continue; } if ($char === '[' || $char === '{' || $char === '(' || $char === '<') { $brackets .= $char; } elseif ($char === ']' || $char === '}' || $char === ')' || $char === '>') { $last_bracket = substr($brackets, -1); $brackets = substr($brackets, 0, -1); if (($char === ']' && $last_bracket !== '[') || ($char === '}' && $last_bracket !== '{') || ($char === ')' && $last_bracket !== '(') || ($char === '>' && $last_bracket !== '<') ) { throw new DocblockParseException('Invalid string ' . $return_block); } } elseif ($char === ' ') { if ($brackets) { $expects_callable_return = false; $type .= ' '; continue; } if ($next_char === '|' || $next_char === '&') { $nexter_char = $i < $l - 2 ? $return_block[$i + 2] : null; if ($nexter_char === ' ') { ++$i; $type .= $next_char . ' '; continue; } } if ($last_char === '|' || $last_char === '&') { $type .= ' '; continue; } if ($next_char === ':') { ++$i; $type .= ' :'; $expects_callable_return = true; continue; } if ($expects_callable_return) { $type .= ' '; $expects_callable_return = false; continue; } $remaining = trim(preg_replace('@^[ \t]*\* *@m', ' ', substr($return_block, $i + 1))); if ($remaining) { /** @var array<string> */ return array_merge([rtrim($type)], preg_split('/[ \s]+/', $remaining)); } return [$type]; } $expects_callable_return = false; $type .= $char; } return [$type]; } }
1
8,481
I would prefer the message to be more actionable. Like 'There should be no space between & and the variable name' or something similar.
vimeo-psalm
php
@@ -83,7 +83,16 @@ public class AntlrBaseNode extends ParserRuleContext implements AntlrNode { @Override public Node jjtGetChild(final int index) { - return (Node) children.get(index); // TODO: review if all children are Nodes + try { + return (Node) children.get(index); + } catch (final ClassCastException e) { + return new AntlrBaseNode(); + } + } + + @Override + public String getImage() { + return getText(); } @Override
1
/** * BSD-style license; for more info see http://pmd.sourceforge.net/license.html */ package net.sourceforge.pmd.lang.ast; import org.antlr.v4.runtime.ParserRuleContext; import net.sourceforge.pmd.lang.dfa.DataFlowNode; public class AntlrBaseNode extends ParserRuleContext implements AntlrNode { // TODO: what should we do with parent? how do we handle data flows in this scenario? it's ok to ignore // TODO: our parent data flow in case we don't have one? // protected Node parent; private DataFlowNode dataFlowNode; private Object userData; /** * Constructor required by {@link ParserRuleContext} */ @SuppressWarnings("unused") public AntlrBaseNode() { // Nothing to be done } /** * Constructor required by {@link ParserRuleContext} * * @param parent The parent * @param invokingStateNumber the invokingState defined by {@link org.antlr.v4.runtime.RuleContext} parent */ @SuppressWarnings("unused") public AntlrBaseNode(final ParserRuleContext parent, final int invokingStateNumber) { super(parent, invokingStateNumber); } @Override public Node jjtGetParent() { return (Node) parent; // TODO: review if all parents are Nodes } @Override public int getBeginLine() { return start.getLine(); // This goes from 1 to n } @Override public int getEndLine() { return stop.getLine(); // This goes from 1 to n } @Override public int getBeginColumn() { return start.getCharPositionInLine(); // This goes from 0 to (n - 1) } @Override public int getEndColumn() { return stop.getCharPositionInLine(); // This goes from 0 to (n - 1) } @Override public DataFlowNode getDataFlowNode() { return dataFlowNode; } @Override public void setDataFlowNode(final DataFlowNode dataFlowNode) { this.dataFlowNode = dataFlowNode; } @Override public Object getUserData() { return userData; } @Override public void setUserData(final Object userData) { this.userData = userData; } @Override public Node jjtGetChild(final int index) { return (Node) children.get(index); // TODO: review if all children are Nodes } @Override public int jjtGetNumChildren() { return children == null ? 0 : children.size(); } // TODO: should we make it abstract due to the comment in AbstractNode ? @Override public String getXPathNodeName() { return toString(); } }
1
16,052
The image attribute is not supposed to be the text of the node. I'd rather keep it separate (the previous `@Text` attribute was fine). A practical reason for that is in the future, other languages may have a way to get the text of their node, in which case that wouldn't be fetched with `getImage`, for compatibility, but probably with a `getText` method, or `getSourceCode` like in #1728. Also we *may* deprecate or otherwise put `getImage` out of use as soon as 7.0, given its contract is super loose.
pmd-pmd
java
@@ -12,14 +12,14 @@ module RSpec def initialize @full_backtrace = false - @exclusion_patterns = [] << Regexp.union( + @system_exclusion_patterns = [] << Regexp.union( *["/lib\d*/ruby/", "org/jruby/", "bin/", "/gems/", "lib/rspec/(core|expectations|matchers|mocks)"]. - map {|s| Regexp.new(s.gsub("/", File::SEPARATOR))} - ) + map {|s| Regexp.new(s.gsub("/", File::SEPARATOR))}) + @exclusion_patterns = [] + @system_exclusion_patterns @inclusion_patterns = [Regexp.new(Dir.getwd)] end
1
module RSpec module Core class BacktraceFormatter # This is only used externally by rspec-expectations. Can be removed once # rspec-expectations uses # RSpec.configuration.backtrace_formatter.format_backtrace instead. def self.format_backtrace(backtrace, options = {}) RSpec.configuration.backtrace_formatter.format_backtrace(backtrace, options) end attr_accessor :exclusion_patterns, :inclusion_patterns def initialize @full_backtrace = false @exclusion_patterns = [] << Regexp.union( *["/lib\d*/ruby/", "org/jruby/", "bin/", "/gems/", "lib/rspec/(core|expectations|matchers|mocks)"]. map {|s| Regexp.new(s.gsub("/", File::SEPARATOR))} ) @inclusion_patterns = [Regexp.new(Dir.getwd)] end def full_backtrace=(full_backtrace) @full_backtrace = full_backtrace end def full_backtrace? @full_backtrace || @exclusion_patterns.empty? end def format_backtrace(backtrace, options = {}) return backtrace if options[:full_backtrace] backtrace. take_while {|l| l != RSpec::Core::Runner::AT_EXIT_HOOK_BACKTRACE_LINE}. map {|l| backtrace_line(l)}. compact. tap do |filtered| if filtered.empty? filtered.concat backtrace filtered << "" filtered << " Showing full backtrace because every line was filtered out." filtered << " See docs for RSpec::Configuration#backtrace_exclusion_patterns and" filtered << " RSpec::Configuration#backtrace_inclusion_patterns for more information." end end end # @api private def backtrace_line(line) RSpec::Core::Metadata::relative_path(line) unless exclude?(line) rescue SecurityError nil end # @api private def exclude?(line) return false if @full_backtrace @exclusion_patterns.any? {|p| p =~ line} && @inclusion_patterns.none? {|p| p =~ line} end end end end
1
11,766
The `[] +` seems weird to me. Why is it there?
rspec-rspec-core
rb
@@ -1,7 +1,7 @@ class FollowUp < ActiveRecord::Base belongs_to :course - validates_presence_of :email - validates_format_of :email, with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i, on: :create + EMAIL_FORMAT = /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i + validates :email, presence: true, format: { with: EMAIL_FORMAT }, on: :create scope :have_not_notified, where(notified_at: nil)
1
class FollowUp < ActiveRecord::Base belongs_to :course validates_presence_of :email validates_format_of :email, with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i, on: :create scope :have_not_notified, where(notified_at: nil) def notify(section) Mailer.follow_up(self, section).deliver self.notified_at = Time.now self.save end end
1
6,764
Not sure if this constant is defined at the right place style-wise. Maybe move it up to before 'belongs_to' like DISCOUNT_TYPES in coupon.rb?
thoughtbot-upcase
rb
@@ -251,6 +251,17 @@ func checksum(bytes []byte) string { return hex.EncodeToString(d[:]) } +func isEmptyYaml(yaml []byte) bool { + isEmpty := true + lines := bytes.Split(yaml, []byte("\n")) + for _, k := range lines { + if string(k) != "---" && !bytes.HasPrefix(k, []byte("#")) && string(k) != "" { + isEmpty = false + } + } + return isEmpty +} + func yamlToObjects(in io.Reader) ([]runtime.Object, error) { var result []runtime.Object reader := yamlDecoder.NewYAMLReader(bufio.NewReaderSize(in, 4096))
1
package deploy import ( "bufio" "bytes" "context" "crypto/sha256" "encoding/hex" "io" "io/ioutil" "os" "path/filepath" "strings" "time" errors2 "github.com/pkg/errors" v1 "github.com/rancher/k3s/types/apis/k3s.cattle.io/v1" "github.com/rancher/norman" "github.com/rancher/norman/objectclient" "github.com/rancher/norman/pkg/objectset" "github.com/rancher/norman/types" "github.com/sirupsen/logrus" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" yamlDecoder "k8s.io/apimachinery/pkg/util/yaml" "k8s.io/client-go/discovery" "k8s.io/client-go/rest" ) const ( ns = "kube-system" startKey = "_start_" ) func WatchFiles(ctx context.Context, skips []string, bases ...string) error { server := norman.GetServer(ctx) addons := v1.ClientsFrom(ctx).Addon w := &watcher{ addonCache: addons.Cache(), addons: addons, skips: skips, bases: bases, restConfig: *server.Runtime.LocalConfig, discovery: server.K8sClient.Discovery(), clients: map[schema.GroupVersionKind]*objectclient.ObjectClient{}, } addons.Enqueue("", startKey) addons.Interface().AddHandler(ctx, "addon-start", func(key string, _ *v1.Addon) (runtime.Object, error) { if key == startKey { go w.start(ctx) } return nil, nil }) return nil } type watcher struct { addonCache v1.AddonClientCache addons v1.AddonClient bases []string skips []string restConfig rest.Config discovery discovery.DiscoveryInterface clients map[schema.GroupVersionKind]*objectclient.ObjectClient namespaced map[schema.GroupVersionKind]bool } func (w *watcher) start(ctx context.Context) { force := true for { if err := w.listFiles(force); err == nil { force = false } else { logrus.Errorf("failed to process config: %v", err) } select { case <-ctx.Done(): return case <-time.After(15 * time.Second): } } } func (w *watcher) listFiles(force bool) error { var errs []error for _, base := range w.bases { if err := w.listFilesIn(base, force); err != nil { errs = append(errs, err) } } return types.NewErrors(errs...) } func (w *watcher) listFilesIn(base string, force bool) error { files, err := ioutil.ReadDir(base) if os.IsNotExist(err) { return nil } else if err != nil { return err } skips := map[string]bool{} for _, skip := range w.skips { skips[skip] = true } for _, file := range files { if strings.HasSuffix(file.Name(), ".skip") { skips[strings.TrimSuffix(file.Name(), ".skip")] = true } } var errs []error for _, file := range files { if strings.HasSuffix(file.Name(), ".skip") || skips[file.Name()] { continue } p := filepath.Join(base, file.Name()) if err := w.deploy(p, !force); err != nil { errs = append(errs, errors2.Wrapf(err, "failed to process %s", p)) } } return types.NewErrors(errs...) } func (w *watcher) deploy(path string, compareChecksum bool) error { content, err := ioutil.ReadFile(path) if err != nil { return err } name := name(path) addon, err := w.addon(name) if err != nil { return err } checksum := checksum(content) if compareChecksum && checksum == addon.Spec.Checksum { logrus.Debugf("Skipping existing deployment of %s, check=%v, checksum %s=%s", path, compareChecksum, checksum, addon.Spec.Checksum) return nil } objectSet, err := objectSet(content) if err != nil { return err } clients, err := w.apply(addon, objectSet) if err != nil { return err } if w.clients == nil { w.clients = map[schema.GroupVersionKind]*objectclient.ObjectClient{} } addon.Spec.Source = path addon.Spec.Checksum = checksum addon.Status.GVKs = nil for gvk, client := range clients { addon.Status.GVKs = append(addon.Status.GVKs, gvk) w.clients[gvk] = client } if addon.UID == "" { _, err := w.addons.Create(&addon) return err } _, err = w.addons.Update(&addon) return err } func (w *watcher) addon(name string) (v1.Addon, error) { addon, err := w.addonCache.Get(ns, name) if errors.IsNotFound(err) { addon = v1.NewAddon(ns, name, v1.Addon{}) } else if err != nil { return v1.Addon{}, err } return *addon, nil } func (w *watcher) apply(addon v1.Addon, set *objectset.ObjectSet) (map[schema.GroupVersionKind]*objectclient.ObjectClient, error) { var ( err error ) op := objectset.NewProcessor(addon.Name) op.AllowDiscovery(w.discovery, w.restConfig) ds := op.NewDesiredSet(nil, set) for _, gvk := range addon.Status.GVKs { var ( namespaced bool ) client, ok := w.clients[gvk] if ok { namespaced = w.namespaced[gvk] } else { client, namespaced, err = objectset.NewDiscoveredClient(gvk, w.restConfig, w.discovery) if err != nil { return nil, err } if w.namespaced == nil { w.namespaced = map[schema.GroupVersionKind]bool{} } w.namespaced[gvk] = namespaced } ds.AddDiscoveredClient(gvk, client, namespaced) } if err := ds.Apply(); err != nil { return nil, err } return ds.DiscoveredClients(), nil } func objectSet(content []byte) (*objectset.ObjectSet, error) { objs, err := yamlToObjects(bytes.NewBuffer(content)) if err != nil { return nil, err } os := objectset.NewObjectSet() os.Add(objs...) return os, nil } func name(path string) string { name := filepath.Base(path) return strings.SplitN(name, ".", 2)[0] } func checksum(bytes []byte) string { d := sha256.Sum256(bytes) return hex.EncodeToString(d[:]) } func yamlToObjects(in io.Reader) ([]runtime.Object, error) { var result []runtime.Object reader := yamlDecoder.NewYAMLReader(bufio.NewReaderSize(in, 4096)) for { raw, err := reader.Read() if err == io.EOF { break } if err != nil { return nil, err } obj, err := toObjects(raw) if err != nil { return nil, err } result = append(result, obj...) } return result, nil } func toObjects(bytes []byte) ([]runtime.Object, error) { bytes, err := yamlDecoder.ToJSON(bytes) if err != nil { return nil, err } obj, _, err := unstructured.UnstructuredJSONScheme.Decode(bytes, nil, nil) if err != nil { return nil, err } if l, ok := obj.(*unstructured.UnstructuredList); ok { var result []runtime.Object for _, obj := range l.Items { copy := obj result = append(result, &copy) } return result, nil } return []runtime.Object{obj}, nil }
1
7,276
What about a line with just spaces/tabs? Or a line with a couple spaces followed by a `#`?
k3s-io-k3s
go
@@ -376,7 +376,7 @@ module OrgAdmin # Load the funder's template(s) templates = Template.valid.publicly_visible.where(published: true, org_id: funder_id).to_a - if org_id.present? + unless org_id.blank? # Swap out any organisational cusotmizations of a funder template templates.each do |tmplt| customization = Template.valid.find_by(published: true, org_id: org_id, customization_of: tmplt.dmptemplate_id)
1
module OrgAdmin class TemplatesController < ApplicationController include Paginable include TemplateFilter after_action :verify_authorized # GET /org_admin/templates # ----------------------------------------------------- def index authorize Template # Apply scoping all_templates_hash = apply_scoping(params[:scope] || 'all', false, true) own_hash = apply_scoping(params[:scope] || 'all', false, false) customizable_hash = apply_scoping(params[:scope] || 'all', true, false) # Apply pagination all_templates_hash[:templates] = all_templates_hash[:templates].page(1) own_hash[:templates] = own_hash[:templates].page(1) customizable_hash[:templates] = customizable_hash[:templates].page(1) # Gather up all of the publication dates for the live versions of each template. published = get_publication_dates(all_templates_hash[:scopes][:dmptemplate_ids]) render 'index', locals: { all_templates: all_templates_hash[:templates], customizable_templates: customizable_hash[:templates], own_templates: own_hash[:templates], customized_templates: customizable_hash[:customizations], published: published, current_org: current_user.org, orgs: Org.all, scopes: { all: all_templates_hash[:scopes], orgs: own_hash[:scopes], funders: customizable_hash[:scopes] } } end # GET /org_admin/templates/new # ----------------------------------------------------- def new authorize Template @current_tab = params[:r] || 'all-templates' end # POST /org_admin/templates # ----------------------------------------------------- def create authorize Template # creates a new template with version 0 and new dmptemplate_id @template = Template.new(params[:template]) @template.org_id = current_user.org.id @template.description = params['template-desc'] @template.links = (params["template-links"].present? ? JSON.parse(params["template-links"]) : {"funder": [], "sample_plan": []}) if @template.save redirect_to edit_org_admin_template_path(@template), notice: success_message(_('template'), _('created')) else @hash = @template.to_hash flash[:alert] = failed_create_error(@template, _('template')) render action: "new" end end # GET /org_admin/templates/:id/edit # ----------------------------------------------------- def edit @template = Template.includes(:org, phases: [sections: [questions: [:question_options, :question_format, :annotations]]]).find(params[:id]) authorize @template @current = Template.current(@template.dmptemplate_id) if @template == @current if @template.published? new_version = @template.get_new_version if !new_version.nil? redirect_to(action: 'edit', id: new_version.id) return else flash[:alert] = _('Unable to create a new version of this template. You are currently working with a published copy.') end end else flash[:notice] = _('You are viewing a historical version of this template. You will not be able to make changes.') end # once the correct template has been generated, we convert it to hash @template_hash = @template.to_hash @current_tab = params[:r] || 'all-templates' render('container', locals: { partial_path: 'edit', template: @template, current: @current, template_hash: @template_hash, current_tab: @current_tab }) end # PUT /org_admin/templates/:id (AJAXable) # ----------------------------------------------------- def update @template = Template.find(params[:id]) authorize @template # NOTE if non-authorized error is raised, it performs a redirect to root_path and no JSON output is generated current = Template.current(@template.dmptemplate_id) # Only allow the current version to be updated if current != @template render(status: :forbidden, json: { msg: _('You can not edit a historical version of this template.')}) else template_links = nil begin template_links = JSON.parse(params["template-links"]) if params["template-links"].present? rescue JSON::ParserError render(status: :bad_request, json: { msg: _('Error parsing links for a template') }) return end # TODO dirty check at template model instead of here for reusability, i.e. method dirty? passing a template object if @template.description != params["template-desc"] || @template.title != params[:template][:title] || @template.links != template_links @template.dirty = true end @template.description = params["template-desc"] @template.links = template_links if template_links.present? # If the visibility checkbox is not checked and the user's org is a funder set the visibility to public # otherwise default it to organisationally_visible if current_user.org.funder? && params[:template_visibility].nil? @template.visibility = Template.visibilities[:publicly_visible] else @template.visibility = Template.visibilities[:organisationally_visible] end if @template.update_attributes(params[:template]) render(status: :ok, json: { msg: success_message(_('template'), _('saved'))}) else # Note failed_update_error may return HTML tags (e.g. <br/>) and therefore the client should parse them accordingly render(status: :bad_request, json: { msg: failed_update_error(@template, _('template'))}) end end end # DELETE /org_admin/templates/:id # ----------------------------------------------------- def destroy @template = Template.find(params[:id]) current_tab = params[:r] || 'all-templates' authorize @template if @template.plans.length <= 0 current = Template.current(@template.dmptemplate_id) # Only allow the current version to be destroyed if current == @template if @template.destroy flash[:notice] = success_message(_('template'), _('removed')) redirect_to org_admin_templates_path(r: current_tab) else @hash = @template.to_hash flash[:alert] = failed_destroy_error(@template, _('template')) redirect_to org_admin_templates_path(r: current_tab) end else flash[:alert] = _('You cannot delete historical versions of this template.') redirect_to org_admin_templates_path(r: current_tab) end else flash[:alert] = _('You cannot delete a template that has been used to create plans.') redirect_to org_admin_templates_path(r: current_tab) end end # GET /org_admin/templates/:id/history # ----------------------------------------------------- def history @template = Template.find(params[:id]) authorize @template @templates = Template.where(dmptemplate_id: @template.dmptemplate_id).order(version: :desc) @current = Template.current(@template.dmptemplate_id) @current_tab = params[:r] || 'all-templates' end # GET /org_admin/templates/:id/customize # ----------------------------------------------------- def customize @template = Template.find(params[:id]) @current_tab = params[:r] || 'all-templates' authorize @template customisation = Template.deep_copy(@template) customisation.org = current_user.org customisation.version = 0 customisation.customization_of = @template.dmptemplate_id customisation.dmptemplate_id = loop do random = rand 2147483647 break random unless Template.exists?(dmptemplate_id: random) end customisation.dirty = true customisation.save customisation.phases.includes(:sections, :questions).each do |phase| phase.modifiable = false phase.save! phase.sections.each do |section| section.modifiable = false section.save! section.questions.each do |question| question.modifiable = false question.save! end end end redirect_to edit_org_admin_template_path(customisation, r: 'funder-templates') end # GET /org_admin/templates/:id/transfer_customization # the funder template's id is passed through here # ----------------------------------------------------- def transfer_customization @template = Template.includes(:org).find(params[:id]) @current_tab = params[:r] || 'all-templates' authorize @template new_customization = Template.deep_copy(@template) new_customization.org_id = current_user.org_id new_customization.published = false new_customization.customization_of = @template.dmptemplate_id new_customization.dirty = true new_customization.phases.includes(sections: :questions).each do |phase| phase.modifiable = false phase.save phase.sections.each do |section| section.modifiable = false section.save section.questions.each do |question| question.modifiable = false question.save end end end customizations = Template.includes(:org, phases:[sections: [questions: :annotations]]).where(org_id: current_user.org_id, customization_of: @template.dmptemplate_id).order(version: :desc) # existing version to port over max_version = customizations.first new_customization.dmptemplate_id = max_version.dmptemplate_id new_customization.version = max_version.version + 1 # here we rip the customizations out of the old template # First, we find any customzed phases or sections max_version.phases.each do |phase| # check if the phase was added as a customization if phase.modifiable # deep copy the phase and add it to the template phase_copy = Phase.deep_copy(phase) phase_copy.number = new_customization.phases.length + 1 phase_copy.template_id = new_customization.id phase_copy.save! else # iterate over the sections to see if any of them are customizations phase.sections.each do |section| if section.modifiable # this is a custom section section_copy = Section.deep_copy(section) customization_phase = new_customization.phases.includes(:sections).where(number: phase.number).first section_copy.phase_id = customization_phase.id # custom sections get added to the end section_copy.number = customization_phase.sections.length + 1 # section from phase with corresponding number in the main_template section_copy.save! else # not a customized section, iterate over questions customization_phase = new_customization.phases.includes(sections: [questions: :annotations]).where(number: phase.number).first customization_section = customization_phase.sections.where(number: section.number).first section.questions.each do |question| # find corresponding question in new template customization_question = customization_section.questions.where(number: question.number).first # apply annotations question.annotations.where(org_id: current_user.org_id).each do |annotation| annotation_copy = Annotation.deep_copy(annotation) annotation_copy.question_id = customization_question.id annotation_copy.save! end end end end end end new_customization.save redirect_to edit_org_admin_template_path(new_customization, r: 'funder-templates') end # PUT /org_admin/templates/:id/copy (AJAX) # ----------------------------------------------------- def copy @template = Template.find(params[:id]) current_tab = params[:r] || 'all-templates' authorize @template new_copy = Template.deep_copy(@template) new_copy.title = "Copy of " + @template.title new_copy.version = 0 new_copy.published = false new_copy.dmptemplate_id = loop do random = rand 2147483647 break random unless Template.exists?(dmptemplate_id: random) end if new_copy.save flash[:notice] = 'Template was successfully copied.' redirect_to edit_org_admin_template_path(id: new_copy.id, edit: true, r: 'organisation-templates'), notice: _('Information was successfully created.') else flash[:alert] = failed_create_error(new_copy, _('template')) end end # GET /org_admin/templates/:id/publish (AJAX) TODO convert to PUT verb # ----------------------------------------------------- def publish template = Template.find(params[:id]) authorize template current = Template.current(template.dmptemplate_id) # Only allow the current version to be updated if current != template redirect_to org_admin_templates_path, alert: _('You can not publish a historical version of this template.') else # Unpublish the older published version if there is one live = Template.live(template.dmptemplate_id) if !live.nil? and self != live live.published = false live.save! end # Set the dirty flag to false template.dirty = false template.published = true template.save flash[:notice] = _('Your template has been published and is now available to users.') redirect_to "#{org_admin_templates_path}#{template.customization_of.present? ? '#funder-templates' : '#organisation-templates'}" end end # GET /org_admin/templates/:id/unpublish (AJAX) TODO convert to PUT verb # ----------------------------------------------------- def unpublish template = Template.find(params[:id]) authorize template if template.nil? flash[:alert] = _('That template is not currently published.') else template.published = false template.save flash[:notice] = _('Your template is no longer published. Users will not be able to create new DMPs for this template until you re-publish it') end redirect_to "#{org_admin_templates_path}#{template.customization_of.present? ? '#funder-templates' : '#organisation-templates'}" end # PUT /org_admin/template_options (AJAX) # Collect all of the templates available for the org+funder combination # -------------------------------------------------------------------------- def template_options() org_id = (plan_params[:org_id] == '-1' ? '' : plan_params[:org_id]) funder_id = (plan_params[:funder_id] == '-1' ? '' : plan_params[:funder_id]) authorize Template.new templates = [] if org_id.present? || funder_id.present? unless funder_id.blank? # Load the funder's template(s) templates = Template.valid.publicly_visible.where(published: true, org_id: funder_id).to_a if org_id.present? # Swap out any organisational cusotmizations of a funder template templates.each do |tmplt| customization = Template.valid.find_by(published: true, org_id: org_id, customization_of: tmplt.dmptemplate_id) if customization.present? && tmplt.created_at < customization.created_at templates.delete(tmplt) templates << customization end end end end # Load the org's template(s) if org_id.present? # If the Research Org is not also a Funder OR the selected Research Org # matches the selected Funder (Use case where Org is both a Funder and Org) if !Org.find(org_id).funder? || org_id == funder_id templates << Template.organisationally_visible.valid.where(published: true, org_id: org_id, customization_of: nil).to_a end templates = templates.flatten.uniq end end # If no templates were available use the generic templates if templates.empty? templates << Template.where(is_default: true, published: true).first end templates = (templates.count > 0 ? templates.sort{|x,y| x.title <=> y.title} : []) render json: {"templates": templates.collect{|t| {id: t.id, title: t.title} }}.to_json end # ====================================================== private def plan_params params.require(:plan).permit(:org_id, :funder_id) end end end
1
17,369
if funder_is is not blank (L375) there is not need to check if org_id is not blank (L379) since you will never enter in the if (L374).
DMPRoadmap-roadmap
rb
@@ -3403,6 +3403,13 @@ void Client::Handle_OP_AutoFire(const EQApplicationPacket *app) DumpPacket(app); return; } + + if (GetTarget() == this) { + this->MessageString(Chat::TooFarAway, TRY_ATTACKING_SOMEONE); + auto_fire = false; + return; + } + bool *af = (bool*)app->pBuffer; auto_fire = *af; auto_attack = false;
1
/* EQEMu: Everquest Server Emulator Copyright (C) 2001-2016 EQEMu Development Team (http://eqemulator.net) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; version 2 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY except by those people which sell it, which are required to give you total support for your newly bought product; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "../common/global_define.h" #include "../common/eqemu_logsys.h" #include "../common/opcodemgr.h" #include <iomanip> #include <iostream> #include <math.h> #include <set> #include <stdio.h> #include <string.h> #include <zlib.h> #ifdef _WINDOWS #define snprintf _snprintf #define strncasecmp _strnicmp #define strcasecmp _stricmp #else #include <pthread.h> #include <sys/socket.h> #include <netinet/in.h> #include <unistd.h> #endif #include "../common/crc32.h" #include "../common/data_verification.h" #include "../common/faction.h" #include "../common/guilds.h" #include "../common/rdtsc.h" #include "../common/rulesys.h" #include "../common/skills.h" #include "../common/spdat.h" #include "../common/string_util.h" #include "../common/zone_numbers.h" #include "data_bucket.h" #include "event_codes.h" #include "expedition.h" #include "expedition_database.h" #include "guild_mgr.h" #include "merc.h" #include "petitions.h" #include "pets.h" #include "queryserv.h" #include "quest_parser_collection.h" #include "string_ids.h" #include "titles.h" #include "water_map.h" #include "worldserver.h" #include "zone.h" #include "mob_movement_manager.h" #include "../common/repositories/character_instance_safereturns_repository.h" #include "../common/repositories/criteria/content_filter_criteria.h" #include "../common/shared_tasks.h" #include "gm_commands/door_manipulation.h" #ifdef BOTS #include "bot.h" #endif extern QueryServ* QServ; extern Zone* zone; extern volatile bool is_zone_loaded; extern WorldServer worldserver; extern PetitionList petition_list; extern EntityList entity_list; typedef void (Client::*ClientPacketProc)(const EQApplicationPacket *app); //Use a map for connecting opcodes since it dosent get used a lot and is sparse std::map<uint32, ClientPacketProc> ConnectingOpcodes; //Use a static array for connected, for speed ClientPacketProc ConnectedOpcodes[_maxEmuOpcode]; void MapOpcodes() { ConnectingOpcodes.clear(); memset(ConnectedOpcodes, 0, sizeof(ConnectedOpcodes)); // Now put all the opcodes into their home... // connecting opcode handler assignments: ConnectingOpcodes[OP_ApproveZone] = &Client::Handle_Connect_OP_ApproveZone; ConnectingOpcodes[OP_BlockedBuffs] = &Client::Handle_OP_BlockedBuffs; ConnectingOpcodes[OP_ClientError] = &Client::Handle_Connect_OP_ClientError; ConnectingOpcodes[OP_ClientReady] = &Client::Handle_Connect_OP_ClientReady; ConnectingOpcodes[OP_ClientUpdate] = &Client::Handle_Connect_OP_ClientUpdate; ConnectingOpcodes[OP_GetGuildsList] = &Client::Handle_OP_GetGuildsList; // temporary hack ConnectingOpcodes[OP_ReqClientSpawn] = &Client::Handle_Connect_OP_ReqClientSpawn; ConnectingOpcodes[OP_ReqNewZone] = &Client::Handle_Connect_OP_ReqNewZone; ConnectingOpcodes[OP_SendAAStats] = &Client::Handle_Connect_OP_SendAAStats; ConnectingOpcodes[OP_SendAATable] = &Client::Handle_Connect_OP_SendAATable; ConnectingOpcodes[OP_SendExpZonein] = &Client::Handle_Connect_OP_SendExpZonein; ConnectingOpcodes[OP_SendGuildTributes] = &Client::Handle_Connect_OP_SendGuildTributes; ConnectingOpcodes[OP_SendGuildTributes] = &Client::Handle_Connect_OP_SendGuildTributes; // I guess it didn't believe us with the first assignment? ConnectingOpcodes[OP_SendTributes] = &Client::Handle_Connect_OP_SendTributes; ConnectingOpcodes[OP_SetServerFilter] = &Client::Handle_Connect_OP_SetServerFilter; ConnectingOpcodes[OP_SpawnAppearance] = &Client::Handle_Connect_OP_SpawnAppearance; ConnectingOpcodes[OP_TGB] = &Client::Handle_Connect_OP_TGB; ConnectingOpcodes[OP_UpdateAA] = &Client::Handle_Connect_OP_UpdateAA; ConnectingOpcodes[OP_WearChange] = &Client::Handle_Connect_OP_WearChange; ConnectingOpcodes[OP_WorldObjectsSent] = &Client::Handle_Connect_OP_WorldObjectsSent; ConnectingOpcodes[OP_XTargetAutoAddHaters] = &Client::Handle_OP_XTargetAutoAddHaters; ConnectingOpcodes[OP_XTargetRequest] = &Client::Handle_OP_XTargetRequest; ConnectingOpcodes[OP_ZoneComplete] = &Client::Handle_Connect_OP_ZoneComplete; ConnectingOpcodes[OP_ZoneEntry] = &Client::Handle_Connect_OP_ZoneEntry; // connected opcode handler assignments: ConnectedOpcodes[OP_0x0193] = &Client::Handle_0x0193; ConnectedOpcodes[OP_AAAction] = &Client::Handle_OP_AAAction; ConnectedOpcodes[OP_AcceptNewTask] = &Client::Handle_OP_AcceptNewTask; ConnectedOpcodes[OP_AdventureInfoRequest] = &Client::Handle_OP_AdventureInfoRequest; ConnectedOpcodes[OP_AdventureLeaderboardRequest] = &Client::Handle_OP_AdventureLeaderboardRequest; ConnectedOpcodes[OP_AdventureMerchantPurchase] = &Client::Handle_OP_AdventureMerchantPurchase; ConnectedOpcodes[OP_AdventureMerchantRequest] = &Client::Handle_OP_AdventureMerchantRequest; ConnectedOpcodes[OP_AdventureMerchantSell] = &Client::Handle_OP_AdventureMerchantSell; ConnectedOpcodes[OP_AdventureRequest] = &Client::Handle_OP_AdventureRequest; ConnectedOpcodes[OP_AdventureStatsRequest] = &Client::Handle_OP_AdventureStatsRequest; ConnectedOpcodes[OP_AggroMeterLockTarget] = &Client::Handle_OP_AggroMeterLockTarget; ConnectedOpcodes[OP_AltCurrencyMerchantRequest] = &Client::Handle_OP_AltCurrencyMerchantRequest; ConnectedOpcodes[OP_AltCurrencyPurchase] = &Client::Handle_OP_AltCurrencyPurchase; ConnectedOpcodes[OP_AltCurrencyReclaim] = &Client::Handle_OP_AltCurrencyReclaim; ConnectedOpcodes[OP_AltCurrencySell] = &Client::Handle_OP_AltCurrencySell; ConnectedOpcodes[OP_AltCurrencySellSelection] = &Client::Handle_OP_AltCurrencySellSelection; ConnectedOpcodes[OP_Animation] = &Client::Handle_OP_Animation; ConnectedOpcodes[OP_ApplyPoison] = &Client::Handle_OP_ApplyPoison; ConnectedOpcodes[OP_Assist] = &Client::Handle_OP_Assist; ConnectedOpcodes[OP_AssistGroup] = &Client::Handle_OP_AssistGroup; ConnectedOpcodes[OP_AugmentInfo] = &Client::Handle_OP_AugmentInfo; ConnectedOpcodes[OP_AugmentItem] = &Client::Handle_OP_AugmentItem; ConnectedOpcodes[OP_AutoAttack] = &Client::Handle_OP_AutoAttack; ConnectedOpcodes[OP_AutoAttack2] = &Client::Handle_OP_AutoAttack2; ConnectedOpcodes[OP_AutoFire] = &Client::Handle_OP_AutoFire; ConnectedOpcodes[OP_Bandolier] = &Client::Handle_OP_Bandolier; ConnectedOpcodes[OP_BankerChange] = &Client::Handle_OP_BankerChange; ConnectedOpcodes[OP_Barter] = &Client::Handle_OP_Barter; ConnectedOpcodes[OP_BazaarInspect] = &Client::Handle_OP_BazaarInspect; ConnectedOpcodes[OP_BazaarSearch] = &Client::Handle_OP_BazaarSearch; ConnectedOpcodes[OP_Begging] = &Client::Handle_OP_Begging; ConnectedOpcodes[OP_Bind_Wound] = &Client::Handle_OP_Bind_Wound; ConnectedOpcodes[OP_BlockedBuffs] = &Client::Handle_OP_BlockedBuffs; ConnectedOpcodes[OP_BoardBoat] = &Client::Handle_OP_BoardBoat; ConnectedOpcodes[OP_Buff] = &Client::Handle_OP_Buff; ConnectedOpcodes[OP_BuffRemoveRequest] = &Client::Handle_OP_BuffRemoveRequest; ConnectedOpcodes[OP_Bug] = &Client::Handle_OP_Bug; ConnectedOpcodes[OP_Camp] = &Client::Handle_OP_Camp; ConnectedOpcodes[OP_CancelTask] = &Client::Handle_OP_CancelTask; ConnectedOpcodes[OP_CancelTrade] = &Client::Handle_OP_CancelTrade; ConnectedOpcodes[OP_CastSpell] = &Client::Handle_OP_CastSpell; ConnectedOpcodes[OP_ChannelMessage] = &Client::Handle_OP_ChannelMessage; ConnectedOpcodes[OP_ClearBlockedBuffs] = &Client::Handle_OP_ClearBlockedBuffs; ConnectedOpcodes[OP_ClearNPCMarks] = &Client::Handle_OP_ClearNPCMarks; ConnectedOpcodes[OP_ClearSurname] = &Client::Handle_OP_ClearSurname; ConnectedOpcodes[OP_ClickDoor] = &Client::Handle_OP_ClickDoor; ConnectedOpcodes[OP_ClickObject] = &Client::Handle_OP_ClickObject; ConnectedOpcodes[OP_ClickObjectAction] = &Client::Handle_OP_ClickObjectAction; ConnectedOpcodes[OP_ClientError] = &Client::Handle_OP_ClientError; ConnectedOpcodes[OP_ClientTimeStamp] = &Client::Handle_OP_ClientTimeStamp; ConnectedOpcodes[OP_ClientUpdate] = &Client::Handle_OP_ClientUpdate; ConnectedOpcodes[OP_CombatAbility] = &Client::Handle_OP_CombatAbility; ConnectedOpcodes[OP_ConfirmDelete] = &Client::Handle_OP_ConfirmDelete; ConnectedOpcodes[OP_Consent] = &Client::Handle_OP_Consent; ConnectedOpcodes[OP_ConsentDeny] = &Client::Handle_OP_ConsentDeny; ConnectedOpcodes[OP_Consider] = &Client::Handle_OP_Consider; ConnectedOpcodes[OP_ConsiderCorpse] = &Client::Handle_OP_ConsiderCorpse; ConnectedOpcodes[OP_Consume] = &Client::Handle_OP_Consume; ConnectedOpcodes[OP_ControlBoat] = &Client::Handle_OP_ControlBoat; ConnectedOpcodes[OP_CorpseDrag] = &Client::Handle_OP_CorpseDrag; ConnectedOpcodes[OP_CorpseDrop] = &Client::Handle_OP_CorpseDrop; ConnectedOpcodes[OP_CrashDump] = &Client::Handle_OP_CrashDump; ConnectedOpcodes[OP_CrystalCreate] = &Client::Handle_OP_CrystalCreate; ConnectedOpcodes[OP_CrystalReclaim] = &Client::Handle_OP_CrystalReclaim; ConnectedOpcodes[OP_Damage] = &Client::Handle_OP_Damage; ConnectedOpcodes[OP_Death] = &Client::Handle_OP_Death; ConnectedOpcodes[OP_DelegateAbility] = &Client::Handle_OP_DelegateAbility; ConnectedOpcodes[OP_DeleteItem] = &Client::Handle_OP_DeleteItem; ConnectedOpcodes[OP_DeleteSpawn] = &Client::Handle_OP_DeleteSpawn; ConnectedOpcodes[OP_DeleteSpell] = &Client::Handle_OP_DeleteSpell; ConnectedOpcodes[OP_Disarm] = &Client::Handle_OP_Disarm; ConnectedOpcodes[OP_DisarmTraps] = &Client::Handle_OP_DisarmTraps; ConnectedOpcodes[OP_DoGroupLeadershipAbility] = &Client::Handle_OP_DoGroupLeadershipAbility; ConnectedOpcodes[OP_DuelResponse] = &Client::Handle_OP_DuelResponse; ConnectedOpcodes[OP_DuelResponse2] = &Client::Handle_OP_DuelResponse2; ConnectedOpcodes[OP_DumpName] = &Client::Handle_OP_DumpName; ConnectedOpcodes[OP_Dye] = &Client::Handle_OP_Dye; ConnectedOpcodes[OP_DzAddPlayer] = &Client::Handle_OP_DzAddPlayer; ConnectedOpcodes[OP_DzChooseZoneReply] = &Client::Handle_OP_DzChooseZoneReply; ConnectedOpcodes[OP_DzExpeditionInviteResponse] = &Client::Handle_OP_DzExpeditionInviteResponse; ConnectedOpcodes[OP_DzListTimers] = &Client::Handle_OP_DzListTimers; ConnectedOpcodes[OP_DzMakeLeader] = &Client::Handle_OP_DzMakeLeader; ConnectedOpcodes[OP_DzPlayerList] = &Client::Handle_OP_DzPlayerList; ConnectedOpcodes[OP_DzRemovePlayer] = &Client::Handle_OP_DzRemovePlayer; ConnectedOpcodes[OP_DzSwapPlayer] = &Client::Handle_OP_DzSwapPlayer; ConnectedOpcodes[OP_DzQuit] = &Client::Handle_OP_DzQuit; ConnectedOpcodes[OP_Emote] = &Client::Handle_OP_Emote; ConnectedOpcodes[OP_EndLootRequest] = &Client::Handle_OP_EndLootRequest; ConnectedOpcodes[OP_EnvDamage] = &Client::Handle_OP_EnvDamage; ConnectedOpcodes[OP_FaceChange] = &Client::Handle_OP_FaceChange; ConnectedOpcodes[OP_FeignDeath] = &Client::Handle_OP_FeignDeath; ConnectedOpcodes[OP_FindPersonRequest] = &Client::Handle_OP_FindPersonRequest; ConnectedOpcodes[OP_Fishing] = &Client::Handle_OP_Fishing; ConnectedOpcodes[OP_FloatListThing] = &Client::Handle_OP_MovementHistoryList; ConnectedOpcodes[OP_Forage] = &Client::Handle_OP_Forage; ConnectedOpcodes[OP_FriendsWho] = &Client::Handle_OP_FriendsWho; ConnectedOpcodes[OP_GetGuildMOTD] = &Client::Handle_OP_GetGuildMOTD; ConnectedOpcodes[OP_GetGuildsList] = &Client::Handle_OP_GetGuildsList; ConnectedOpcodes[OP_GMBecomeNPC] = &Client::Handle_OP_GMBecomeNPC; ConnectedOpcodes[OP_GMDelCorpse] = &Client::Handle_OP_GMDelCorpse; ConnectedOpcodes[OP_GMEmoteZone] = &Client::Handle_OP_GMEmoteZone; ConnectedOpcodes[OP_GMEndTraining] = &Client::Handle_OP_GMEndTraining; ConnectedOpcodes[OP_GMFind] = &Client::Handle_OP_GMFind; ConnectedOpcodes[OP_GMGoto] = &Client::Handle_OP_GMGoto; ConnectedOpcodes[OP_GMHideMe] = &Client::Handle_OP_GMHideMe; ConnectedOpcodes[OP_GMKick] = &Client::Handle_OP_GMKick; ConnectedOpcodes[OP_GMKill] = &Client::Handle_OP_GMKill; ConnectedOpcodes[OP_GMLastName] = &Client::Handle_OP_GMLastName; ConnectedOpcodes[OP_GMNameChange] = &Client::Handle_OP_GMNameChange; ConnectedOpcodes[OP_GMSearchCorpse] = &Client::Handle_OP_GMSearchCorpse; ConnectedOpcodes[OP_GMServers] = &Client::Handle_OP_GMServers; ConnectedOpcodes[OP_GMSummon] = &Client::Handle_OP_GMSummon; ConnectedOpcodes[OP_GMToggle] = &Client::Handle_OP_GMToggle; ConnectedOpcodes[OP_GMTraining] = &Client::Handle_OP_GMTraining; ConnectedOpcodes[OP_GMTrainSkill] = &Client::Handle_OP_GMTrainSkill; ConnectedOpcodes[OP_GMZoneRequest] = &Client::Handle_OP_GMZoneRequest; ConnectedOpcodes[OP_GMZoneRequest2] = &Client::Handle_OP_GMZoneRequest2; ConnectedOpcodes[OP_GroundSpawn] = &Client::Handle_OP_CreateObject; ConnectedOpcodes[OP_GroupAcknowledge] = &Client::Handle_OP_GroupAcknowledge; ConnectedOpcodes[OP_GroupCancelInvite] = &Client::Handle_OP_GroupCancelInvite; ConnectedOpcodes[OP_GroupDelete] = &Client::Handle_OP_GroupDelete; ConnectedOpcodes[OP_GroupDisband] = &Client::Handle_OP_GroupDisband; ConnectedOpcodes[OP_GroupFollow] = &Client::Handle_OP_GroupFollow; ConnectedOpcodes[OP_GroupFollow2] = &Client::Handle_OP_GroupFollow2; ConnectedOpcodes[OP_GroupInvite] = &Client::Handle_OP_GroupInvite; ConnectedOpcodes[OP_GroupInvite2] = &Client::Handle_OP_GroupInvite2; ConnectedOpcodes[OP_GroupMakeLeader] = &Client::Handle_OP_GroupMakeLeader; ConnectedOpcodes[OP_GroupMentor] = &Client::Handle_OP_GroupMentor; ConnectedOpcodes[OP_GroupRoles] = &Client::Handle_OP_GroupRoles; ConnectedOpcodes[OP_GroupUpdate] = &Client::Handle_OP_GroupUpdate; ConnectedOpcodes[OP_GuildBank] = &Client::Handle_OP_GuildBank; ConnectedOpcodes[OP_GuildCreate] = &Client::Handle_OP_GuildCreate; ConnectedOpcodes[OP_GuildDelete] = &Client::Handle_OP_GuildDelete; ConnectedOpcodes[OP_GuildDemote] = &Client::Handle_OP_GuildDemote; ConnectedOpcodes[OP_GuildInvite] = &Client::Handle_OP_GuildInvite; ConnectedOpcodes[OP_GuildInviteAccept] = &Client::Handle_OP_GuildInviteAccept; ConnectedOpcodes[OP_GuildLeader] = &Client::Handle_OP_GuildLeader; ConnectedOpcodes[OP_GuildManageBanker] = &Client::Handle_OP_GuildManageBanker; ConnectedOpcodes[OP_GuildPeace] = &Client::Handle_OP_GuildPeace; ConnectedOpcodes[OP_GuildPromote] = &Client::Handle_OP_GuildPromote; ConnectedOpcodes[OP_GuildPublicNote] = &Client::Handle_OP_GuildPublicNote; ConnectedOpcodes[OP_GuildRemove] = &Client::Handle_OP_GuildRemove; ConnectedOpcodes[OP_GuildStatus] = &Client::Handle_OP_GuildStatus; ConnectedOpcodes[OP_GuildUpdateURLAndChannel] = &Client::Handle_OP_GuildUpdateURLAndChannel; ConnectedOpcodes[OP_GuildWar] = &Client::Handle_OP_GuildWar; ConnectedOpcodes[OP_Heartbeat] = &Client::Handle_OP_Heartbeat; ConnectedOpcodes[OP_Hide] = &Client::Handle_OP_Hide; ConnectedOpcodes[OP_HideCorpse] = &Client::Handle_OP_HideCorpse; ConnectedOpcodes[OP_Illusion] = &Client::Handle_OP_Illusion; ConnectedOpcodes[OP_InspectAnswer] = &Client::Handle_OP_InspectAnswer; ConnectedOpcodes[OP_InspectMessageUpdate] = &Client::Handle_OP_InspectMessageUpdate; ConnectedOpcodes[OP_InspectRequest] = &Client::Handle_OP_InspectRequest; ConnectedOpcodes[OP_InstillDoubt] = &Client::Handle_OP_InstillDoubt; ConnectedOpcodes[OP_ItemLinkClick] = &Client::Handle_OP_ItemLinkClick; ConnectedOpcodes[OP_ItemLinkResponse] = &Client::Handle_OP_ItemLinkResponse; ConnectedOpcodes[OP_ItemName] = &Client::Handle_OP_ItemName; ConnectedOpcodes[OP_ItemPreview] = &Client::Handle_OP_ItemPreview; ConnectedOpcodes[OP_ItemVerifyRequest] = &Client::Handle_OP_ItemVerifyRequest; ConnectedOpcodes[OP_ItemViewUnknown] = &Client::Handle_OP_Ignore; ConnectedOpcodes[OP_Jump] = &Client::Handle_OP_Jump; ConnectedOpcodes[OP_KeyRing] = &Client::Handle_OP_KeyRing; ConnectedOpcodes[OP_KickPlayers] = &Client::Handle_OP_KickPlayers; ConnectedOpcodes[OP_LDoNButton] = &Client::Handle_OP_LDoNButton; ConnectedOpcodes[OP_LDoNDisarmTraps] = &Client::Handle_OP_LDoNDisarmTraps; ConnectedOpcodes[OP_LDoNInspect] = &Client::Handle_OP_LDoNInspect; ConnectedOpcodes[OP_LDoNOpen] = &Client::Handle_OP_LDoNOpen; ConnectedOpcodes[OP_LDoNPickLock] = &Client::Handle_OP_LDoNPickLock; ConnectedOpcodes[OP_LDoNSenseTraps] = &Client::Handle_OP_LDoNSenseTraps; ConnectedOpcodes[OP_LeadershipExpToggle] = &Client::Handle_OP_LeadershipExpToggle; ConnectedOpcodes[OP_LeaveAdventure] = &Client::Handle_OP_LeaveAdventure; ConnectedOpcodes[OP_LeaveBoat] = &Client::Handle_OP_LeaveBoat; ConnectedOpcodes[OP_LFGCommand] = &Client::Handle_OP_LFGCommand; ConnectedOpcodes[OP_LFGGetMatchesRequest] = &Client::Handle_OP_LFGGetMatchesRequest; ConnectedOpcodes[OP_LFGuild] = &Client::Handle_OP_LFGuild; ConnectedOpcodes[OP_LFPCommand] = &Client::Handle_OP_LFPCommand; ConnectedOpcodes[OP_LFPGetMatchesRequest] = &Client::Handle_OP_LFPGetMatchesRequest; ConnectedOpcodes[OP_LoadSpellSet] = &Client::Handle_OP_LoadSpellSet; ConnectedOpcodes[OP_Logout] = &Client::Handle_OP_Logout; ConnectedOpcodes[OP_LootItem] = &Client::Handle_OP_LootItem; ConnectedOpcodes[OP_LootRequest] = &Client::Handle_OP_LootRequest; ConnectedOpcodes[OP_ManaChange] = &Client::Handle_OP_ManaChange; ConnectedOpcodes[OP_MemorizeSpell] = &Client::Handle_OP_MemorizeSpell; ConnectedOpcodes[OP_Mend] = &Client::Handle_OP_Mend; ConnectedOpcodes[OP_MercenaryCommand] = &Client::Handle_OP_MercenaryCommand; ConnectedOpcodes[OP_MercenaryDataRequest] = &Client::Handle_OP_MercenaryDataRequest; ConnectedOpcodes[OP_MercenaryDataUpdateRequest] = &Client::Handle_OP_MercenaryDataUpdateRequest; ConnectedOpcodes[OP_MercenaryDismiss] = &Client::Handle_OP_MercenaryDismiss; ConnectedOpcodes[OP_MercenaryHire] = &Client::Handle_OP_MercenaryHire; ConnectedOpcodes[OP_MercenarySuspendRequest] = &Client::Handle_OP_MercenarySuspendRequest; ConnectedOpcodes[OP_MercenaryTimerRequest] = &Client::Handle_OP_MercenaryTimerRequest; ConnectedOpcodes[OP_MoveCoin] = &Client::Handle_OP_MoveCoin; ConnectedOpcodes[OP_MoveItem] = &Client::Handle_OP_MoveItem; ConnectedOpcodes[OP_MoveMultipleItems] = &Client::Handle_OP_MoveMultipleItems; ConnectedOpcodes[OP_OpenContainer] = &Client::Handle_OP_OpenContainer; ConnectedOpcodes[OP_OpenGuildTributeMaster] = &Client::Handle_OP_OpenGuildTributeMaster; ConnectedOpcodes[OP_OpenInventory] = &Client::Handle_OP_OpenInventory; ConnectedOpcodes[OP_OpenTributeMaster] = &Client::Handle_OP_OpenTributeMaster; ConnectedOpcodes[OP_PDeletePetition] = &Client::Handle_OP_PDeletePetition; ConnectedOpcodes[OP_PetCommands] = &Client::Handle_OP_PetCommands; ConnectedOpcodes[OP_Petition] = &Client::Handle_OP_Petition; ConnectedOpcodes[OP_PetitionBug] = &Client::Handle_OP_PetitionBug; ConnectedOpcodes[OP_PetitionCheckIn] = &Client::Handle_OP_PetitionCheckIn; ConnectedOpcodes[OP_PetitionCheckout] = &Client::Handle_OP_PetitionCheckout; ConnectedOpcodes[OP_PetitionDelete] = &Client::Handle_OP_PetitionDelete; ConnectedOpcodes[OP_PetitionQue] = &Client::Handle_OP_PetitionQue; ConnectedOpcodes[OP_PetitionRefresh] = &Client::Handle_OP_PetitionRefresh; ConnectedOpcodes[OP_PetitionResolve] = &Client::Handle_OP_PetitionResolve; ConnectedOpcodes[OP_PetitionUnCheckout] = &Client::Handle_OP_PetitionUnCheckout; ConnectedOpcodes[OP_PlayerStateAdd] = &Client::Handle_OP_PlayerStateAdd; ConnectedOpcodes[OP_PlayerStateRemove] = &Client::Handle_OP_PlayerStateRemove; ConnectedOpcodes[OP_PickPocket] = &Client::Handle_OP_PickPocket; ConnectedOpcodes[OP_PopupResponse] = &Client::Handle_OP_PopupResponse; ConnectedOpcodes[OP_PotionBelt] = &Client::Handle_OP_PotionBelt; ConnectedOpcodes[OP_PurchaseLeadershipAA] = &Client::Handle_OP_PurchaseLeadershipAA; ConnectedOpcodes[OP_PVPLeaderBoardDetailsRequest] = &Client::Handle_OP_PVPLeaderBoardDetailsRequest; ConnectedOpcodes[OP_PVPLeaderBoardRequest] = &Client::Handle_OP_PVPLeaderBoardRequest; ConnectedOpcodes[OP_QueryUCSServerStatus] = &Client::Handle_OP_QueryUCSServerStatus; ConnectedOpcodes[OP_RaidInvite] = &Client::Handle_OP_RaidCommand; ConnectedOpcodes[OP_RandomReq] = &Client::Handle_OP_RandomReq; ConnectedOpcodes[OP_ReadBook] = &Client::Handle_OP_ReadBook; ConnectedOpcodes[OP_RecipeAutoCombine] = &Client::Handle_OP_RecipeAutoCombine; ConnectedOpcodes[OP_RecipeDetails] = &Client::Handle_OP_RecipeDetails; ConnectedOpcodes[OP_RecipesFavorite] = &Client::Handle_OP_RecipesFavorite; ConnectedOpcodes[OP_RecipesSearch] = &Client::Handle_OP_RecipesSearch; ConnectedOpcodes[OP_ReloadUI] = &Client::Handle_OP_ReloadUI; ConnectedOpcodes[OP_RemoveBlockedBuffs] = &Client::Handle_OP_RemoveBlockedBuffs; ConnectedOpcodes[OP_RemoveTrap] = &Client::Handle_OP_RemoveTrap; ConnectedOpcodes[OP_Report] = &Client::Handle_OP_Report; ConnectedOpcodes[OP_RequestDuel] = &Client::Handle_OP_RequestDuel; ConnectedOpcodes[OP_RequestTitles] = &Client::Handle_OP_RequestTitles; ConnectedOpcodes[OP_RespawnWindow] = &Client::Handle_OP_RespawnWindow; ConnectedOpcodes[OP_Rewind] = &Client::Handle_OP_Rewind; ConnectedOpcodes[OP_RezzAnswer] = &Client::Handle_OP_RezzAnswer; ConnectedOpcodes[OP_Sacrifice] = &Client::Handle_OP_Sacrifice; ConnectedOpcodes[OP_SafeFallSuccess] = &Client::Handle_OP_SafeFallSuccess; ConnectedOpcodes[OP_SafePoint] = &Client::Handle_OP_SafePoint; ConnectedOpcodes[OP_Save] = &Client::Handle_OP_Save; ConnectedOpcodes[OP_SaveOnZoneReq] = &Client::Handle_OP_SaveOnZoneReq; ConnectedOpcodes[OP_SelectTribute] = &Client::Handle_OP_SelectTribute; // Use or Ignore sense heading based on rule. bool train = RuleB(Skills, TrainSenseHeading); ConnectedOpcodes[OP_SenseHeading] = (train) ? &Client::Handle_OP_SenseHeading : &Client::Handle_OP_Ignore; ConnectedOpcodes[OP_SenseTraps] = &Client::Handle_OP_SenseTraps; ConnectedOpcodes[OP_SetGuildMOTD] = &Client::Handle_OP_SetGuildMOTD; ConnectedOpcodes[OP_SetRunMode] = &Client::Handle_OP_SetRunMode; ConnectedOpcodes[OP_SetServerFilter] = &Client::Handle_OP_SetServerFilter; ConnectedOpcodes[OP_SetStartCity] = &Client::Handle_OP_SetStartCity; ConnectedOpcodes[OP_SetTitle] = &Client::Handle_OP_SetTitle; ConnectedOpcodes[OP_Shielding] = &Client::Handle_OP_Shielding; ConnectedOpcodes[OP_ShopEnd] = &Client::Handle_OP_ShopEnd; ConnectedOpcodes[OP_ShopPlayerBuy] = &Client::Handle_OP_ShopPlayerBuy; ConnectedOpcodes[OP_ShopPlayerSell] = &Client::Handle_OP_ShopPlayerSell; ConnectedOpcodes[OP_ShopRequest] = &Client::Handle_OP_ShopRequest; ConnectedOpcodes[OP_Sneak] = &Client::Handle_OP_Sneak; ConnectedOpcodes[OP_SpawnAppearance] = &Client::Handle_OP_SpawnAppearance; ConnectedOpcodes[OP_Split] = &Client::Handle_OP_Split; ConnectedOpcodes[OP_Surname] = &Client::Handle_OP_Surname; ConnectedOpcodes[OP_SwapSpell] = &Client::Handle_OP_SwapSpell; ConnectedOpcodes[OP_TargetCommand] = &Client::Handle_OP_TargetCommand; ConnectedOpcodes[OP_TargetMouse] = &Client::Handle_OP_TargetMouse; ConnectedOpcodes[OP_TaskHistoryRequest] = &Client::Handle_OP_TaskHistoryRequest; ConnectedOpcodes[OP_TaskTimers] = &Client::Handle_OP_TaskTimers; ConnectedOpcodes[OP_Taunt] = &Client::Handle_OP_Taunt; ConnectedOpcodes[OP_TestBuff] = &Client::Handle_OP_TestBuff; ConnectedOpcodes[OP_TGB] = &Client::Handle_OP_TGB; ConnectedOpcodes[OP_Track] = &Client::Handle_OP_Track; ConnectedOpcodes[OP_TrackTarget] = &Client::Handle_OP_TrackTarget; ConnectedOpcodes[OP_TrackUnknown] = &Client::Handle_OP_TrackUnknown; ConnectedOpcodes[OP_TradeAcceptClick] = &Client::Handle_OP_TradeAcceptClick; ConnectedOpcodes[OP_TradeBusy] = &Client::Handle_OP_TradeBusy; ConnectedOpcodes[OP_Trader] = &Client::Handle_OP_Trader; ConnectedOpcodes[OP_TraderBuy] = &Client::Handle_OP_TraderBuy; ConnectedOpcodes[OP_TradeRequest] = &Client::Handle_OP_TradeRequest; ConnectedOpcodes[OP_TradeRequestAck] = &Client::Handle_OP_TradeRequestAck; ConnectedOpcodes[OP_TraderShop] = &Client::Handle_OP_TraderShop; ConnectedOpcodes[OP_TradeSkillCombine] = &Client::Handle_OP_TradeSkillCombine; ConnectedOpcodes[OP_Translocate] = &Client::Handle_OP_Translocate; ConnectedOpcodes[OP_TributeItem] = &Client::Handle_OP_TributeItem; ConnectedOpcodes[OP_TributeMoney] = &Client::Handle_OP_TributeMoney; ConnectedOpcodes[OP_TributeNPC] = &Client::Handle_OP_TributeNPC; ConnectedOpcodes[OP_TributeToggle] = &Client::Handle_OP_TributeToggle; ConnectedOpcodes[OP_TributeUpdate] = &Client::Handle_OP_TributeUpdate; ConnectedOpcodes[OP_VetClaimRequest] = &Client::Handle_OP_VetClaimRequest; ConnectedOpcodes[OP_VoiceMacroIn] = &Client::Handle_OP_VoiceMacroIn; ConnectedOpcodes[OP_UpdateAura] = &Client::Handle_OP_UpdateAura;; ConnectedOpcodes[OP_WearChange] = &Client::Handle_OP_WearChange; ConnectedOpcodes[OP_WhoAllRequest] = &Client::Handle_OP_WhoAllRequest; ConnectedOpcodes[OP_WorldUnknown001] = &Client::Handle_OP_Ignore; ConnectedOpcodes[OP_XTargetAutoAddHaters] = &Client::Handle_OP_XTargetAutoAddHaters; ConnectedOpcodes[OP_XTargetOpen] = &Client::Handle_OP_XTargetOpen; ConnectedOpcodes[OP_XTargetRequest] = &Client::Handle_OP_XTargetRequest; ConnectedOpcodes[OP_YellForHelp] = &Client::Handle_OP_YellForHelp; ConnectedOpcodes[OP_ZoneChange] = &Client::Handle_OP_ZoneChange; ConnectedOpcodes[OP_ResetAA] = &Client::Handle_OP_ResetAA; ConnectedOpcodes[OP_UnderWorld] = &Client::Handle_OP_UnderWorld; // shared tasks ConnectedOpcodes[OP_SharedTaskRemovePlayer] = &Client::Handle_OP_SharedTaskRemovePlayer; ConnectedOpcodes[OP_SharedTaskAddPlayer] = &Client::Handle_OP_SharedTaskAddPlayer; ConnectedOpcodes[OP_SharedTaskMakeLeader] = &Client::Handle_OP_SharedTaskMakeLeader; ConnectedOpcodes[OP_SharedTaskInviteResponse] = &Client::Handle_OP_SharedTaskInviteResponse; ConnectedOpcodes[OP_SharedTaskAcceptNew] = &Client::Handle_OP_SharedTaskAccept; ConnectedOpcodes[OP_SharedTaskQuit] = &Client::Handle_OP_SharedTaskQuit; ConnectedOpcodes[OP_SharedTaskPlayerList] = &Client::Handle_OP_SharedTaskPlayerList; } void ClearMappedOpcode(EmuOpcode op) { if (op >= _maxEmuOpcode) return; ConnectedOpcodes[op] = nullptr; auto iter = ConnectingOpcodes.find(op); if (iter != ConnectingOpcodes.end()) { ConnectingOpcodes.erase(iter); } } // client methods int Client::HandlePacket(const EQApplicationPacket *app) { if (LogSys.log_settings[Logs::LogCategory::Netcode].is_category_enabled == 1) { char buffer[64]; app->build_header_dump(buffer); Log(Logs::Detail, Logs::PacketClientServer, "Dispatch opcode: %s", buffer); } if (LogSys.log_settings[Logs::PacketClientServer].is_category_enabled == 1) Log(Logs::General, Logs::PacketClientServer, "[%s - 0x%04x] [Size: %u]", OpcodeManager::EmuToName(app->GetOpcode()), app->GetOpcode(), app->Size()); if (LogSys.log_settings[Logs::PacketClientServerWithDump].is_category_enabled == 1) Log(Logs::General, Logs::PacketClientServerWithDump, "[%s - 0x%04x] [Size: %u] %s", OpcodeManager::EmuToName(app->GetOpcode()), app->GetOpcode(), app->Size(), DumpPacketToString(app).c_str()); EmuOpcode opcode = app->GetOpcode(); if (opcode == OP_AckPacket) { return true; } #if EQDEBUG >= 9 std::cout << "Received 0x" << std::hex << std::setw(4) << std::setfill('0') << opcode << ", size=" << std::dec << app->size << std::endl; #endif switch (client_state) { case CLIENT_CONNECTING: { if (ConnectingOpcodes.count(opcode) != 1) { //Hate const cast but everything in lua needs to be non-const even if i make it non-mutable std::vector<EQ::Any> args; args.push_back(const_cast<EQApplicationPacket*>(app)); parse->EventPlayer(EVENT_UNHANDLED_OPCODE, this, "", 1, &args); break; } ClientPacketProc p; p = ConnectingOpcodes[opcode]; //call the processing routine (this->*p)(app); //special case where connecting code needs to boot client... if (client_state == CLIENT_KICKED) { return(false); } break; } case CLIENT_CONNECTED: { ClientPacketProc p; p = ConnectedOpcodes[opcode]; if (p == nullptr) { std::vector<EQ::Any> args; args.push_back(const_cast<EQApplicationPacket*>(app)); parse->EventPlayer(EVENT_UNHANDLED_OPCODE, this, "", 0, &args); if (LogSys.log_settings[Logs::PacketClientServerUnhandled].is_category_enabled == 1) { char buffer[64]; app->build_header_dump(buffer); Log(Logs::General, Logs::PacketClientServerUnhandled, "%s %s", buffer, DumpPacketToString(app).c_str()); } break; } //call the processing routine (this->*p)(app); break; } case CLIENT_KICKED: case DISCONNECTED: case CLIENT_LINKDEAD: break; default: LogDebug("Unknown client_state: [{}]\n", client_state); break; } return(true); } // Finish client connecting state void Client::CompleteConnect() { UpdateWho(); client_state = CLIENT_CONNECTED; SendAllPackets(); hpupdate_timer.Start(); autosave_timer.Start(); SetDuelTarget(0); SetDueling(false); EnteringMessages(this); LoadZoneFlags(); /* Sets GM Flag if needed & Sends Petition Queue */ UpdateAdmin(false); // Task Packets LoadClientTaskState(); if (IsInAGuild()) { uint8 rank = GuildRank(); if (ClientVersion() >= EQ::versions::ClientVersion::RoF) { switch (rank) { case 0: { rank = 5; break; } // GUILD_MEMBER 0 case 1: { rank = 3; break; } // GUILD_OFFICER 1 case 2: { rank = 1; break; } // GUILD_LEADER 2 default: { break; } // GUILD_NONE } } SendAppearancePacket(AT_GuildID, GuildID(), false); SendAppearancePacket(AT_GuildRank, rank, false); } // moved to dbload and translators since we iterate there also .. keep m_pp values whatever they are when they get here /*const auto sbs = EQ::spells::DynamicLookup(ClientVersion(), GetGM())->SpellbookSize; for (uint32 spellInt = 0; spellInt < sbs; ++spellInt) { if (m_pp.spell_book[spellInt] < 3 || m_pp.spell_book[spellInt] > EQ::spells::SPELL_ID_MAX) m_pp.spell_book[spellInt] = 0xFFFFFFFF; }*/ //SendAATable(); if (GetHideMe()) Message(Chat::Red, "[GM] You are currently hidden to all clients"); uint32 raidid = database.GetRaidID(GetName()); Raid *raid = nullptr; if (raidid > 0) { raid = entity_list.GetRaidByID(raidid); if (!raid) { raid = new Raid(raidid); if (raid->GetID() != 0) { entity_list.AddRaid(raid, raidid); raid->LoadLeadership(); // Recreating raid in new zone, get leadership from DB } else { safe_delete(raid); } } if (raid) { SetRaidGrouped(true); raid->LearnMembers(); raid->VerifyRaid(); raid->GetRaidDetails(); /* Only leader should get this; send to all for now till I figure out correct creation; can probably also send a no longer leader packet for non leaders but not important for now. */ raid->SendRaidCreate(this); raid->SendMakeLeaderPacketTo(raid->leadername, this); raid->SendRaidAdd(GetName(), this); raid->SendBulkRaid(this); raid->SendGroupUpdate(this); raid->SendRaidMOTD(this); if (raid->IsLeader(this)) { // We're a raid leader, lets update just in case! raid->UpdateRaidAAs(); raid->SendAllRaidLeadershipAA(); } uint32 grpID = raid->GetGroup(GetName()); if (grpID < 12) { raid->SendRaidGroupRemove(GetName(), grpID); raid->SendRaidGroupAdd(GetName(), grpID); raid->CheckGroupMentor(grpID, this); if (raid->IsGroupLeader(GetName())) { // group leader same thing! raid->UpdateGroupAAs(raid->GetGroup(this)); raid->GroupUpdate(grpID, false); } } raid->SendGroupLeadershipAA(this, grpID); // this may get sent an extra time ... SetXTargetAutoMgr(raid->GetXTargetAutoMgr()); if (!GetXTargetAutoMgr()->empty()) SetDirtyAutoHaters(); if (raid->IsLocked()) raid->SendRaidLockTo(this); raid->SendHPManaEndPacketsTo(this); } } else { Group *group = nullptr; group = this->GetGroup(); if (group) group->SendHPManaEndPacketsTo(this); } //bulk raid send in here eventually //reapply some buffs uint32 buff_count = GetMaxTotalSlots(); for (uint32 j1 = 0; j1 < buff_count; j1++) { if (!IsValidSpell(buffs[j1].spellid)) continue; const SPDat_Spell_Struct &spell = spells[buffs[j1].spellid]; int NimbusEffect = GetNimbusEffect(buffs[j1].spellid); if (NimbusEffect) { if (!IsNimbusEffectActive(NimbusEffect)) SendSpellEffect(NimbusEffect, 500, 0, 1, 3000, true); } for (int x1 = 0; x1 < EFFECT_COUNT; x1++) { switch (spell.effectid[x1]) { case SE_IllusionCopy: case SE_Illusion: { if (spell.base[x1] == -1) { if (gender == 1) gender = 0; else if (gender == 0) gender = 1; SendIllusionPacket(GetRace(), gender, 0xFF, 0xFF); } else if (spell.base[x1] == -2) // WTF IS THIS { if (GetRace() == 128 || GetRace() == 130 || GetRace() <= 12) SendIllusionPacket(GetRace(), GetGender(), spell.base2[x1], spell.max[x1]); } else if (spell.max[x1] > 0) { SendIllusionPacket(spell.base[x1], 0xFF, spell.base2[x1], spell.max[x1]); } else { SendIllusionPacket(spell.base[x1], 0xFF, 0xFF, 0xFF); } switch (spell.base[x1]) { case OGRE: SendAppearancePacket(AT_Size, 9); break; case TROLL: SendAppearancePacket(AT_Size, 8); break; case VAHSHIR: case BARBARIAN: SendAppearancePacket(AT_Size, 7); break; case HALF_ELF: case WOOD_ELF: case DARK_ELF: case FROGLOK: SendAppearancePacket(AT_Size, 5); break; case DWARF: SendAppearancePacket(AT_Size, 4); break; case HALFLING: case GNOME: SendAppearancePacket(AT_Size, 3); break; default: SendAppearancePacket(AT_Size, 6); break; } break; } case SE_SummonHorse: { SummonHorse(buffs[j1].spellid); //hasmount = true; //this was false, is that the correct thing? break; } case SE_Silence: { Silence(true); break; } case SE_Amnesia: { Amnesia(true); break; } case SE_DivineAura: { invulnerable = true; break; } case SE_Invisibility2: case SE_Invisibility: { invisible = true; SendAppearancePacket(AT_Invis, 1); break; } case SE_Levitate: { if (!zone->CanLevitate()) { if (!GetGM()) { SendAppearancePacket(AT_Levitate, 0); BuffFadeByEffect(SE_Levitate); Message(Chat::Red, "You can't levitate in this zone."); } } else { SendAppearancePacket(AT_Levitate, 2); } break; } case SE_InvisVsUndead2: case SE_InvisVsUndead: { invisible_undead = true; break; } case SE_InvisVsAnimals: { invisible_animals = true; break; } case SE_AddMeleeProc: case SE_WeaponProc: { AddProcToWeapon(GetProcID(buffs[j1].spellid, x1), false, 100 + spells[buffs[j1].spellid].base2[x1], buffs[j1].spellid, buffs[j1].casterlevel); break; } case SE_DefensiveProc: { AddDefensiveProc(GetProcID(buffs[j1].spellid, x1), 100 + spells[buffs[j1].spellid].base2[x1], buffs[j1].spellid); break; } case SE_RangedProc: { AddRangedProc(GetProcID(buffs[j1].spellid, x1), 100 + spells[buffs[j1].spellid].base2[x1], buffs[j1].spellid); break; } } } } /* Sends appearances for all mobs not doing anim_stand aka sitting, looting, playing dead */ entity_list.SendZoneAppearance(this); /* Sends the Nimbus particle effects (up to 3) for any mob using them */ entity_list.SendNimbusEffects(this); entity_list.SendUntargetable(this); int x; for (x = EQ::textures::textureBegin; x <= EQ::textures::LastTexture; x++) { SendWearChange(x); } // added due to wear change above UpdateActiveLight(); SendAppearancePacket(AT_Light, GetActiveLightType()); Mob *pet = GetPet(); if (pet != nullptr) { for (x = EQ::textures::textureBegin; x <= EQ::textures::LastTexture; x++) { pet->SendWearChange(x); } // added due to wear change above pet->UpdateActiveLight(); pet->SendAppearancePacket(AT_Light, pet->GetActiveLightType()); } entity_list.SendTraders(this); if (GetPet()) { GetPet()->SendPetBuffsToClient(); } if (GetGroup()) database.RefreshGroupFromDB(this); if (RuleB(TaskSystem, EnableTaskSystem)) TaskPeriodic_Timer.Start(); else TaskPeriodic_Timer.Disable(); conn_state = ClientConnectFinished; //enforce some rules.. if (!CanBeInZone()) { LogDebug("[CLIENT] Kicking char from zone, not allowed here"); GoToSafeCoords(ZoneID("arena"), 0); return; } if (zone) zone->weatherSend(this); TotalKarma = database.GetKarma(AccountID()); SendDisciplineTimers(); parse->EventPlayer(EVENT_ENTER_ZONE, this, "", 0); SetLastPositionBeforeBulkUpdate(GetPosition()); /* This sub event is for if a player logs in for the first time since entering world. */ if (firstlogon == 1) { parse->EventPlayer(EVENT_CONNECT, this, "", 0); /* QS: PlayerLogConnectDisconnect */ if (RuleB(QueryServ, PlayerLogConnectDisconnect)) { std::string event_desc = StringFormat("Connect :: Logged into zoneid:%i instid:%i", this->GetZoneID(), this->GetInstanceID()); QServ->PlayerLogEvent(Player_Log_Connect_State, this->CharacterID(), event_desc); } /** * Update last login since this doesn't get updated until a late save later so we can update online status */ database.QueryDatabase( StringFormat( "UPDATE `character_data` SET `last_login` = UNIX_TIMESTAMP() WHERE id = %u", CharacterID() ) ); } if (zone && zone->GetInstanceTimer()) { bool is_permanent = false; uint32 remaining_time_seconds = database.GetTimeRemainingInstance(zone->GetInstanceID(), is_permanent); uint32 day = (remaining_time_seconds / 86400); uint32 hour = (remaining_time_seconds / 3600) % 24; uint32 minute = (remaining_time_seconds / 60) % 60; uint32 second = (remaining_time_seconds / 1) % 60; if (day) { Message( Chat::Yellow, "%s (%u) will expire in %u days, %u hours, %u minutes, and %u seconds.", zone->GetLongName(), zone->GetInstanceID(), day, hour, minute, second ); } else if (hour) { Message( Chat::Yellow, "%s (%u) will expire in %u hours, %u minutes, and %u seconds.", zone->GetLongName(), zone->GetInstanceID(), hour, minute, second ); } else if (minute) { Message( Chat::Yellow, "%s (%u) will expire in %u minutes, and %u seconds.", zone->GetLongName(), zone->GetInstanceID(), minute, second ); } else { Message( Chat::Yellow, "%s (%u) will expire in in %u seconds.", zone->GetLongName(), zone->GetInstanceID(), second ); } } SendRewards(); SendAltCurrencies(); database.LoadAltCurrencyValues(CharacterID(), alternate_currency); SendAlternateCurrencyValues(); alternate_currency_loaded = true; ProcessAlternateCurrencyQueue(); /* This needs to be set, this determines whether or not data was loaded properly before a save */ client_data_loaded = true; CalcItemScale(); DoItemEnterZone(); if (zone->GetZoneID() == RuleI(World, GuildBankZoneID) && GuildBanks) GuildBanks->SendGuildBank(this); if (ClientVersion() >= EQ::versions::ClientVersion::SoD) entity_list.SendFindableNPCList(this); if (IsInAGuild()) { SendGuildRanks(); guild_mgr.SendGuildMemberUpdateToWorld(GetName(), GuildID(), zone->GetZoneID(), time(nullptr)); guild_mgr.RequestOnlineGuildMembers(this->CharacterID(), this->GuildID()); } SendDynamicZoneUpdates(); /** Request adventure info **/ auto pack = new ServerPacket(ServerOP_AdventureDataRequest, 64); strcpy((char*)pack->pBuffer, GetName()); worldserver.SendPacket(pack); delete pack; if (IsClient() && CastToClient()->ClientVersionBit() & EQ::versions::maskUFAndLater) { EQApplicationPacket *outapp = MakeBuffsPacket(false); CastToClient()->FastQueuePacket(&outapp); } // TODO: load these states // We at least will set them to the correct state for now if (m_ClientVersionBit & EQ::versions::maskUFAndLater && GetPet()) { SetPetCommandState(PET_BUTTON_SIT, 0); SetPetCommandState(PET_BUTTON_STOP, 0); SetPetCommandState(PET_BUTTON_REGROUP, 0); SetPetCommandState(PET_BUTTON_FOLLOW, 1); SetPetCommandState(PET_BUTTON_GUARD, 0); // Taunt saved on client side for logging on with pet // In our db for when we zone. SetPetCommandState(PET_BUTTON_HOLD, 0); SetPetCommandState(PET_BUTTON_GHOLD, 0); SetPetCommandState(PET_BUTTON_FOCUS, 0); SetPetCommandState(PET_BUTTON_SPELLHOLD, 0); } database.LoadAuras(this); // this ends up spawning them so probably safer to load this later (here) entity_list.RefreshClientXTargets(this); worldserver.RequestTellQueue(GetName()); entity_list.ScanCloseMobs(close_mobs, this, true); if (GetGM() && IsDevToolsEnabled()) { ShowDevToolsMenu(); } // shared tasks memberlist if (GetTaskState()->HasActiveSharedTask()) { // struct auto p = new ServerPacket( ServerOP_SharedTaskRequestMemberlist, sizeof(ServerSharedTaskRequestMemberlist_Struct) ); auto *r = (ServerSharedTaskRequestMemberlist_Struct *) p->pBuffer; // fill r->source_character_id = CharacterID(); r->task_id = GetTaskState()->GetActiveSharedTask().task_id; // send worldserver.SendPacket(p); safe_delete(p); } } // connecting opcode handlers /* void Client::Handle_Connect_0x3e33(const EQApplicationPacket *app) { //OP_0x0380 = 0x642c EQApplicationPacket* outapp = new EQApplicationPacket(OP_0x0380, sizeof(uint32)); // Dunno QueuePacket(outapp); safe_delete(outapp); return; } */ void Client::Handle_Connect_OP_ApproveZone(const EQApplicationPacket *app) { if (app->size != sizeof(ApproveZone_Struct)) { LogError("Invalid size on OP_ApproveZone: Expected [{}], Got [{}]", sizeof(ApproveZone_Struct), app->size); return; } ApproveZone_Struct* azone = (ApproveZone_Struct*)app->pBuffer; azone->approve = 1; QueuePacket(app); return; } void Client::Handle_Connect_OP_ClientError(const EQApplicationPacket *app) { if (app->size != sizeof(ClientError_Struct)) { LogError("Invalid size on OP_ClientError: Expected [{}], Got [{}]", sizeof(ClientError_Struct), app->size); return; } // Client reporting error to server ClientError_Struct* error = (ClientError_Struct*)app->pBuffer; LogError("Client error: [{}]", error->character_name); LogError("Error message: [{}]", error->message); Message(Chat::Red, error->message); #if (EQDEBUG>=5) DumpPacket(app); #endif return; } void Client::Handle_Connect_OP_ClientReady(const EQApplicationPacket *app) { conn_state = ClientReadyReceived; if (!Spawned()) SendZoneInPackets(); CompleteConnect(); SendHPUpdate(); } void Client::Handle_Connect_OP_ClientUpdate(const EQApplicationPacket *app) { //Once we get this, the client thinks it is connected //So give it the benefit of the doubt and move to connected Handle_Connect_OP_ClientReady(app); } void Client::Handle_Connect_OP_ReqClientSpawn(const EQApplicationPacket *app) { conn_state = ClientSpawnRequested; auto outapp = new EQApplicationPacket; // Send Zone Doors if (entity_list.MakeDoorSpawnPacket(outapp, this)) { QueuePacket(outapp); } safe_delete(outapp); // Send Zone Objects entity_list.SendZoneObjects(this); SendZonePoints(); // Live does this outapp = new EQApplicationPacket(OP_SendAAStats, 0); FastQueuePacket(&outapp); // Tell client they can continue we're done outapp = new EQApplicationPacket(OP_ZoneServerReady, 0); FastQueuePacket(&outapp); outapp = new EQApplicationPacket(OP_SendExpZonein, 0); FastQueuePacket(&outapp); if (ClientVersion() >= EQ::versions::ClientVersion::RoF) { outapp = new EQApplicationPacket(OP_ClientReady, 0); FastQueuePacket(&outapp); } // New for Secrets of Faydwer - Used in Place of OP_SendExpZonein outapp = new EQApplicationPacket(OP_WorldObjectsSent, 0); QueuePacket(outapp); safe_delete(outapp); if (strncasecmp(zone->GetShortName(), "bazaar", 6) == 0) SendBazaarWelcome(); conn_state = ZoneContentsSent; return; } void Client::Handle_Connect_OP_ReqNewZone(const EQApplicationPacket *app) { conn_state = NewZoneRequested; EQApplicationPacket* outapp = nullptr; ///////////////////////////////////// // New Zone Packet outapp = new EQApplicationPacket(OP_NewZone, sizeof(NewZone_Struct)); NewZone_Struct* nz = (NewZone_Struct*)outapp->pBuffer; memcpy(outapp->pBuffer, &zone->newzone_data, sizeof(NewZone_Struct)); strcpy(nz->char_name, m_pp.name); // This was using FastQueuePacket and the packet was never getting sent... // Not sure if this was timing.... but the NewZone was never logged until // I changed it. outapp->priority = 6; QueuePacket(outapp); safe_delete(outapp); return; } void Client::Handle_Connect_OP_SendAAStats(const EQApplicationPacket *app) { SendAlternateAdvancementTimers(); auto outapp = new EQApplicationPacket(OP_SendAAStats, 0); QueuePacket(outapp); safe_delete(outapp); return; } void Client::Handle_Connect_OP_SendAATable(const EQApplicationPacket *app) { SendAlternateAdvancementTable(); return; } void Client::Handle_Connect_OP_SendExpZonein(const EQApplicationPacket *app) { auto outapp = new EQApplicationPacket(OP_SendExpZonein, 0); QueuePacket(outapp); safe_delete(outapp); // SoF+ Gets Zone-In packets after sending OP_WorldObjectsSent if (ClientVersion() < EQ::versions::ClientVersion::SoF) { SendZoneInPackets(); } return; } void Client::Handle_Connect_OP_SendGuildTributes(const EQApplicationPacket *app) { SendGuildTributes(); return; } void Client::Handle_Connect_OP_SendTributes(const EQApplicationPacket *app) { SendTributes(); return; } void Client::Handle_Connect_OP_SetServerFilter(const EQApplicationPacket *app) { if (app->size != sizeof(SetServerFilter_Struct)) { LogError("Received invalid sized OP_SetServerFilter"); DumpPacket(app); return; } SetServerFilter_Struct* filter = (SetServerFilter_Struct*)app->pBuffer; ServerFilter(filter); return; } void Client::Handle_Connect_OP_SpawnAppearance(const EQApplicationPacket *app) { return; } void Client::Handle_Connect_OP_TGB(const EQApplicationPacket *app) { if (app->size != sizeof(uint32)) { LogError("Invalid size on OP_TGB: Expected [{}], Got [{}]", sizeof(uint32), app->size); return; } OPTGB(app); return; } void Client::Handle_Connect_OP_UpdateAA(const EQApplicationPacket *app) { SendAlternateAdvancementPoints(); } void Client::Handle_Connect_OP_WearChange(const EQApplicationPacket *app) { //not sure what these are supposed to mean to us. return; } void Client::Handle_Connect_OP_WorldObjectsSent(const EQApplicationPacket *app) { // New for SoF+ auto outapp = new EQApplicationPacket(OP_WorldObjectsSent, 0); QueuePacket(outapp); safe_delete(outapp); // Packet order changed for SoF+, so below is sent here instead of OP_SendExpLogin SendZoneInPackets(); if (RuleB(Mercs, AllowMercs)) { SpawnMercOnZone(); } return; } void Client::Handle_Connect_OP_ZoneComplete(const EQApplicationPacket *app) { auto outapp = new EQApplicationPacket(OP_0x0347, 0); QueuePacket(outapp); safe_delete(outapp); return; } void Client::Handle_Connect_OP_ZoneEntry(const EQApplicationPacket *app) { if (app->size != sizeof(ClientZoneEntry_Struct)) return; ClientZoneEntry_Struct *cze = (ClientZoneEntry_Struct *)app->pBuffer; if (strlen(cze->char_name) > 63) return; conn_state = ReceivedZoneEntry; SetClientVersion(Connection()->ClientVersion()); m_ClientVersionBit = EQ::versions::ConvertClientVersionToClientVersionBit(Connection()->ClientVersion()); m_pp.SetPlayerProfileVersion(m_ClientVersion); m_inv.SetInventoryVersion(m_ClientVersion); /* Antighost code tmp var is so the search doesnt find this object */ Client* client = entity_list.GetClientByName(cze->char_name); if (!zone->GetAuth(ip, cze->char_name, &WID, &account_id, &character_id, &admin, lskey, &tellsoff)) { LogClientLogin("[{}] failed zone auth check", cze->char_name); if (nullptr != client) { client->Save(); client->Kick("Failed auth check"); } return; } strcpy(name, cze->char_name); /* Check for Client Spoofing */ if (client != 0) { struct in_addr ghost_addr; ghost_addr.s_addr = eqs->GetRemoteIP(); LogError("Ghosting client: Account ID:[{}] Name:[{}] Character:[{}] IP:[{}]", client->AccountID(), client->AccountName(), client->GetName(), inet_ntoa(ghost_addr)); client->Save(); client->Disconnect(); } uint32 pplen = 0; EQApplicationPacket* outapp = nullptr; MYSQL_RES* result = nullptr; bool loaditems = 0; uint32 i; std::string query; unsigned long* lengths = nullptr; uint32 cid = CharacterID(); character_id = cid; /* Global character_id reference */ /* Flush and reload factions */ database.RemoveTempFactions(this); database.LoadCharacterFactionValues(cid, factionvalues); /* Load Character Account Data: Temp until I move */ query = StringFormat("SELECT `status`, `name`, `ls_id`, `lsaccount_id`, `gmspeed`, `revoked`, `hideme`, `time_creation` FROM `account` WHERE `id` = %u", this->AccountID()); auto results = database.QueryDatabase(query); for (auto row = results.begin(); row != results.end(); ++row) { admin = atoi(row[0]); strn0cpy(account_name, row[1], sizeof(account_name)); strn0cpy(loginserver, row[2], sizeof(loginserver)); lsaccountid = atoi(row[3]); gmspeed = atoi(row[4]); revoked = atoi(row[5]); gm_hide_me = atoi(row[6]); account_creation = atoul(row[7]); } /* Load Character Data */ query = StringFormat("SELECT `lfp`, `lfg`, `xtargets`, `firstlogon`, `guild_id`, `rank` FROM `character_data` LEFT JOIN `guild_members` ON `id` = `char_id` WHERE `id` = %i", cid); results = database.QueryDatabase(query); for (auto row = results.begin(); row != results.end(); ++row) { if (row[4] && atoi(row[4]) > 0) { guild_id = atoi(row[4]); if (row[5] != nullptr) { guildrank = atoi(row[5]); } else { guildrank = GUILD_RANK_NONE; } } if (LFP) { LFP = atoi(row[0]); } if (LFG) { LFG = atoi(row[1]); } if (row[3]) firstlogon = atoi(row[3]); } if (RuleB(Character, SharedBankPlat)) m_pp.platinum_shared = database.GetSharedPlatinum(this->AccountID()); database.ClearOldRecastTimestamps(cid); /* Clear out our old recast timestamps to keep the DB clean */ // set to full support in case they're a gm with items in disabled expansion slots..but, have their gm flag off... // item loss will occur when they use the 'empty' slots, if this is not done m_inv.SetGMInventory(true); loaditems = database.GetInventory(cid, &m_inv); /* Load Character Inventory */ database.LoadCharacterBandolier(cid, &m_pp); /* Load Character Bandolier */ database.LoadCharacterBindPoint(cid, &m_pp); /* Load Character Bind */ database.LoadCharacterMaterialColor(cid, &m_pp); /* Load Character Material */ database.LoadCharacterPotions(cid, &m_pp); /* Load Character Potion Belt */ database.LoadCharacterCurrency(cid, &m_pp); /* Load Character Currency into PP */ database.LoadCharacterData(cid, &m_pp, &m_epp); /* Load Character Data from DB into PP as well as E_PP */ database.LoadCharacterSkills(cid, &m_pp); /* Load Character Skills */ database.LoadCharacterInspectMessage(cid, &m_inspect_message); /* Load Character Inspect Message */ database.LoadCharacterSpellBook(cid, &m_pp); /* Load Character Spell Book */ database.LoadCharacterMemmedSpells(cid, &m_pp); /* Load Character Memorized Spells */ database.LoadCharacterDisciplines(cid, &m_pp); /* Load Character Disciplines */ database.LoadCharacterLanguages(cid, &m_pp); /* Load Character Languages */ database.LoadCharacterLeadershipAA(cid, &m_pp); /* Load Character Leadership AA's */ database.LoadCharacterTribute(cid, &m_pp); /* Load CharacterTribute */ /* Load AdventureStats */ AdventureStats_Struct as; if (database.GetAdventureStats(cid, &as)) { m_pp.ldon_wins_guk = as.success.guk; m_pp.ldon_wins_mir = as.success.mir; m_pp.ldon_wins_mmc = as.success.mmc; m_pp.ldon_wins_ruj = as.success.ruj; m_pp.ldon_wins_tak = as.success.tak; m_pp.ldon_losses_guk = as.failure.guk; m_pp.ldon_losses_mir = as.failure.mir; m_pp.ldon_losses_mmc = as.failure.mmc; m_pp.ldon_losses_ruj = as.failure.ruj; m_pp.ldon_losses_tak = as.failure.tak; } /* Set item material tint */ for (int i = EQ::textures::textureBegin; i <= EQ::textures::LastTexture; i++) { if (m_pp.item_tint.Slot[i].UseTint == 1 || m_pp.item_tint.Slot[i].UseTint == 255) { m_pp.item_tint.Slot[i].UseTint = 0xFF; } } if (level) { level = m_pp.level; } /* If GM, not trackable */ if (gm_hide_me) { trackable = false; } /* Set Con State for Reporting */ conn_state = PlayerProfileLoaded; m_pp.zone_id = zone->GetZoneID(); m_pp.zoneInstance = zone->GetInstanceID(); /* Set Total Seconds Played */ TotalSecondsPlayed = m_pp.timePlayedMin * 60; /* If we can maintain intoxication across zones, check for it */ if (!RuleB(Character, MaintainIntoxicationAcrossZones)) m_pp.intoxication = 0; strcpy(name, m_pp.name); strcpy(lastname, m_pp.last_name); /* If PP is set to weird coordinates */ if ((m_pp.x == -1 && m_pp.y == -1 && m_pp.z == -1) || (m_pp.x == -2 && m_pp.y == -2 && m_pp.z == -2)) { auto zone_safe_point = zone->GetSafePoint(); m_pp.x = zone_safe_point.x; m_pp.y = zone_safe_point.y; m_pp.z = zone_safe_point.z; m_pp.heading = zone_safe_point.w; } /* If too far below ground, then fix */ // float ground_z = GetGroundZ(m_pp.x, m_pp.y, m_pp.z); // if (m_pp.z < (ground_z - 500)) // m_pp.z = ground_z; /* Set Mob variables for spawn */ class_ = m_pp.class_; level = m_pp.level; m_Position.x = m_pp.x; m_Position.y = m_pp.y; m_Position.z = m_pp.z; m_Position.w = m_pp.heading; race = m_pp.race; base_race = m_pp.race; gender = m_pp.gender; base_gender = m_pp.gender; deity = m_pp.deity; haircolor = m_pp.haircolor; beardcolor = m_pp.beardcolor; eyecolor1 = m_pp.eyecolor1; eyecolor2 = m_pp.eyecolor2; hairstyle = m_pp.hairstyle; luclinface = m_pp.face; beard = m_pp.beard; drakkin_heritage = m_pp.drakkin_heritage; drakkin_tattoo = m_pp.drakkin_tattoo; drakkin_details = m_pp.drakkin_details; // Max Level for Character:PerCharacterQglobalMaxLevel and Character:PerCharacterBucketMaxLevel int client_max_level = 0; if (RuleB(Character, PerCharacterQglobalMaxLevel)) { client_max_level = GetCharMaxLevelFromQGlobal(); } else if (RuleB(Character, PerCharacterBucketMaxLevel)) { client_max_level = GetCharMaxLevelFromBucket(); } SetClientMaxLevel(client_max_level); // we know our class now, so we might have to fix our consume timer! if (class_ == MONK) consume_food_timer.SetTimer(CONSUMPTION_MNK_TIMER); InitInnates(); /* If GM not set in DB, and does not meet min status to be GM, reset */ if (m_pp.gm && admin < minStatusToBeGM) m_pp.gm = 0; /* Load Guild */ if (!IsInAGuild()) { m_pp.guild_id = GUILD_NONE; } else { m_pp.guild_id = GuildID(); uint8 rank = guild_mgr.GetDisplayedRank(GuildID(), GuildRank(), CharacterID()); // FIXME: RoF guild rank if (ClientVersion() >= EQ::versions::ClientVersion::RoF) { switch (rank) { case 0: rank = 5; break; case 1: rank = 3; break; case 2: rank = 1; break; default: break; } } m_pp.guildrank = rank; if (zone->GetZoneID() == RuleI(World, GuildBankZoneID)) GuildBanker = (guild_mgr.IsGuildLeader(GuildID(), CharacterID()) || guild_mgr.GetBankerFlag(CharacterID())); } m_pp.guildbanker = GuildBanker; switch (race) { case OGRE: size = 9; break; case TROLL: size = 8; break; case VAHSHIR: case BARBARIAN: size = 7; break; case HUMAN: case HIGH_ELF: case ERUDITE: case IKSAR: case DRAKKIN: size = 6; break; case HALF_ELF: size = 5.5; break; case WOOD_ELF: case DARK_ELF: case FROGLOK: size = 5; break; case DWARF: size = 4; break; case HALFLING: size = 3.5; break; case GNOME: size = 3; break; default: size = 0; } /* Check for Invalid points */ if (m_pp.ldon_points_guk < 0 || m_pp.ldon_points_guk > 2000000000) { m_pp.ldon_points_guk = 0; } if (m_pp.ldon_points_mir < 0 || m_pp.ldon_points_mir > 2000000000) { m_pp.ldon_points_mir = 0; } if (m_pp.ldon_points_mmc < 0 || m_pp.ldon_points_mmc > 2000000000) { m_pp.ldon_points_mmc = 0; } if (m_pp.ldon_points_ruj < 0 || m_pp.ldon_points_ruj > 2000000000) { m_pp.ldon_points_ruj = 0; } if (m_pp.ldon_points_tak < 0 || m_pp.ldon_points_tak > 2000000000) { m_pp.ldon_points_tak = 0; } if (m_pp.ldon_points_available < 0 || m_pp.ldon_points_available > 2000000000) { m_pp.ldon_points_available = 0; } if (RuleB(World, UseClientBasedExpansionSettings)) { m_pp.expansions = EQ::expansions::ConvertClientVersionToExpansionsMask(ClientVersion()); } else { m_pp.expansions = (RuleI(World, ExpansionSettings) & EQ::expansions::ConvertClientVersionToExpansionsMask(ClientVersion())); } if (!database.LoadAlternateAdvancement(this)) { LogError("Error loading AA points for [{}]", GetName()); } if (SPDAT_RECORDS > 0) { for (uint32 z = 0; z < EQ::spells::SPELL_GEM_COUNT; z++) { if (m_pp.mem_spells[z] >= (uint32)SPDAT_RECORDS) UnmemSpell(z, false); } database.LoadBuffs(this); uint32 max_slots = GetMaxBuffSlots(); for (int i = 0; i < BUFF_COUNT; i++) { if (buffs[i].spellid != SPELL_UNKNOWN) { m_pp.buffs[i].spellid = buffs[i].spellid; m_pp.buffs[i].bard_modifier = buffs[i].instrument_mod; m_pp.buffs[i].effect_type = 2; m_pp.buffs[i].player_id = 0x2211; m_pp.buffs[i].level = buffs[i].casterlevel; m_pp.buffs[i].unknown003 = 0; m_pp.buffs[i].duration = buffs[i].ticsremaining; m_pp.buffs[i].counters = buffs[i].counters; m_pp.buffs[i].num_hits = buffs[i].numhits; } else { m_pp.buffs[i].spellid = SPELLBOOK_UNKNOWN; m_pp.buffs[i].bard_modifier = 10; m_pp.buffs[i].effect_type = 0; m_pp.buffs[i].player_id = 0; m_pp.buffs[i].level = 0; m_pp.buffs[i].unknown003 = 0; m_pp.buffs[i].duration = 0; m_pp.buffs[i].counters = 0; m_pp.buffs[i].num_hits = 0; } } } /* Load Character Key Ring */ KeyRingLoad(); /* Send Group Members via PP */ uint32 groupid = database.GetGroupID(GetName()); Group* group = nullptr; if (groupid > 0) { group = entity_list.GetGroupByID(groupid); if (!group) { //nobody from our is here... start a new group group = new Group(groupid); if (group->GetID() != 0) entity_list.AddGroup(group, groupid); else //error loading group members... { delete group; group = nullptr; } } //else, somebody from our group is already here... if (!group) database.SetGroupID(GetName(), 0, CharacterID(), false); //cannot re-establish group, kill it } else { //no group id //clear out the group junk in our PP uint32 xy = 0; for (xy = 0; xy < MAX_GROUP_MEMBERS; xy++) memset(m_pp.groupMembers[xy], 0, 64); } if (group) { // If the group leader is not set, pull the group leader infomrmation from the database. if (!group->GetLeader()) { char ln[64]; char MainTankName[64]; char AssistName[64]; char PullerName[64]; char NPCMarkerName[64]; char mentoree_name[64]; int mentor_percent; GroupLeadershipAA_Struct GLAA; memset(ln, 0, 64); database.GetGroupLeadershipInfo(group->GetID(), ln, MainTankName, AssistName, PullerName, NPCMarkerName, mentoree_name, &mentor_percent, &GLAA); Client *c = entity_list.GetClientByName(ln); if (c) group->SetLeader(c); group->SetMainTank(MainTankName); group->SetMainAssist(AssistName); group->SetPuller(PullerName); group->SetNPCMarker(NPCMarkerName); group->SetGroupAAs(&GLAA); group->SetGroupMentor(mentor_percent, mentoree_name); //group->NotifyMainTank(this, 1); //group->NotifyMainAssist(this, 1); //group->NotifyPuller(this, 1); // If we are the leader, force an update of our group AAs to other members in the zone, in case // we purchased a new one while out-of-zone. if (group->IsLeader(this)) group->SendLeadershipAAUpdate(); } JoinGroupXTargets(group); group->UpdatePlayer(this); LFG = false; } #ifdef BOTS database.botdb.LoadOwnerOptions(this); // TODO: mod below function for loading spawned botgroups Bot::LoadAndSpawnAllZonedBots(this); #endif m_inv.SetGMInventory((bool)m_pp.gm); // set to current gm state for calc CalcBonuses(); if (RuleB(Zone, EnableLoggedOffReplenishments) && time(nullptr) - m_pp.lastlogin >= RuleI(Zone, MinOfflineTimeToReplenishments)) { m_pp.cur_hp = GetMaxHP(); m_pp.mana = GetMaxMana(); m_pp.endurance = GetMaxEndurance(); } if (m_pp.cur_hp <= 0) m_pp.cur_hp = GetMaxHP(); SetHP(m_pp.cur_hp); Mob::SetMana(m_pp.mana); // mob function doesn't send the packet SetEndurance(m_pp.endurance); /* Update LFP in case any (or all) of our group disbanded while we were zoning. */ if (IsLFP()) { UpdateLFP(); } p_timers.SetCharID(CharacterID()); if (!p_timers.Load(&database)) { LogError("Unable to load ability timers from the database for [{}] ([{}])!", GetCleanName(), CharacterID()); } /* Load Spell Slot Refresh from Currently Memoried Spells */ for (unsigned int i = 0; i < EQ::spells::SPELL_GEM_COUNT; ++i) if (IsValidSpell(m_pp.mem_spells[i])) m_pp.spellSlotRefresh[i] = p_timers.GetRemainingTime(pTimerSpellStart + m_pp.mem_spells[i]) * 1000; /* Ability slot refresh send SK/PAL */ if (m_pp.class_ == SHADOWKNIGHT || m_pp.class_ == PALADIN) { uint32 abilitynum = 0; if (m_pp.class_ == SHADOWKNIGHT) { abilitynum = pTimerHarmTouch; } else { abilitynum = pTimerLayHands; } uint32 remaining = p_timers.GetRemainingTime(abilitynum); if (remaining > 0 && remaining < 15300) m_pp.abilitySlotRefresh = remaining * 1000; else m_pp.abilitySlotRefresh = 0; } #ifdef _EQDEBUG printf("Dumping inventory on load:\n"); m_inv.dumpEntireInventory(); #endif /* Reset to max so they dont drown on zone in if its underwater */ m_pp.air_remaining = 60; /* Check for PVP Zone status*/ if (zone->IsPVPZone()) m_pp.pvp = 1; /* Time entitled on Account: Move to account */ m_pp.timeentitledonaccount = database.GetTotalTimeEntitledOnAccount(AccountID()) / 1440; /* Reset rest timer if the durations have been lowered in the database */ if ((m_pp.RestTimer > RuleI(Character, RestRegenTimeToActivate)) && (m_pp.RestTimer > RuleI(Character, RestRegenRaidTimeToActivate))) m_pp.RestTimer = 0; /* This checksum should disappear once dynamic structs are in... each struct strategy will do it */ // looks to be in place now //CRC32::SetEQChecksum((unsigned char*)&m_pp, sizeof(PlayerProfile_Struct) - sizeof(m_pp.m_player_profile_version) - 4); // m_pp.checksum = 0; // All server out-bound player profile packets are now translated - no need to waste cycles calculating this... outapp = new EQApplicationPacket(OP_PlayerProfile, sizeof(PlayerProfile_Struct)); /* The entityid field in the Player Profile is used by the Client in relation to Group Leadership AA */ m_pp.entityid = GetID(); memcpy(outapp->pBuffer, &m_pp, outapp->size); outapp->priority = 6; FastQueuePacket(&outapp); if (m_pp.RestTimer) rest_timer.Start(m_pp.RestTimer * 1000); /* Load Pet */ database.LoadPetInfo(this); if (m_petinfo.SpellID > 1 && !GetPet() && m_petinfo.SpellID <= SPDAT_RECORDS) { MakePoweredPet(m_petinfo.SpellID, spells[m_petinfo.SpellID].teleport_zone, m_petinfo.petpower, m_petinfo.Name, m_petinfo.size); if (GetPet() && GetPet()->IsNPC()) { NPC *pet = GetPet()->CastToNPC(); pet->SetPetState(m_petinfo.Buffs, m_petinfo.Items); pet->CalcBonuses(); pet->SetHP(m_petinfo.HP); pet->SetMana(m_petinfo.Mana); // Taunt persists when zoning on newer clients, overwrite default. if (m_ClientVersionBit & EQ::versions::maskUFAndLater) { if (!firstlogon) { pet->SetTaunting(m_petinfo.taunting); } } } m_petinfo.SpellID = 0; } /* Moved here so it's after where we load the pet data. */ if (!aabonuses.ZoneSuspendMinion && !spellbonuses.ZoneSuspendMinion && !itembonuses.ZoneSuspendMinion) { memset(&m_suspendedminion, 0, sizeof(PetInfo)); } /* Server Zone Entry Packet */ outapp = new EQApplicationPacket(OP_ZoneEntry, sizeof(ServerZoneEntry_Struct)); ServerZoneEntry_Struct* sze = (ServerZoneEntry_Struct*)outapp->pBuffer; FillSpawnStruct(&sze->player, CastToMob()); sze->player.spawn.curHp = 1; sze->player.spawn.NPC = 0; sze->player.spawn.z += 6; //arbitrary lift, seems to help spawning under zone. outapp->priority = 6; FastQueuePacket(&outapp); /* Zone Spawns Packet */ entity_list.SendZoneSpawnsBulk(this); entity_list.SendZoneCorpsesBulk(this); entity_list.SendZonePVPUpdates(this); //hack until spawn struct is fixed. /* Time of Day packet */ outapp = new EQApplicationPacket(OP_TimeOfDay, sizeof(TimeOfDay_Struct)); TimeOfDay_Struct* tod = (TimeOfDay_Struct*)outapp->pBuffer; zone->zone_time.GetCurrentEQTimeOfDay(time(0), tod); outapp->priority = 6; FastQueuePacket(&outapp); /* Tribute Packets */ DoTributeUpdate(); if (m_pp.tribute_active) { //restart the tribute timer where we left off tribute_timer.Start(m_pp.tribute_time_remaining); } /* Character Inventory Packet this is not quite where live sends inventory, they do it after tribute */ if (loaditems) { /* Dont load if a length error occurs */ if (admin >= minStatusToBeGM) m_inv.SetGMInventory(true); // set to true to allow expansion-restricted packets through BulkSendInventoryItems(); /* Send stuff on the cursor which isnt sent in bulk */ for (auto iter = m_inv.cursor_cbegin(); iter != m_inv.cursor_cend(); ++iter) { /* First item cursor is sent in bulk inventory packet */ if (iter == m_inv.cursor_cbegin()) continue; const EQ::ItemInstance *inst = *iter; SendItemPacket(EQ::invslot::slotCursor, inst, ItemPacketLimbo); } // this is kinda hackish atm..this process needs to be realigned to allow a contiguous flow m_inv.SetGMInventory((bool)m_pp.gm); // reset back to current gm state } ApplyWeaponsStance(); auto dynamic_zone_member_entries = DynamicZoneMembersRepository::GetWhere(database, fmt::format("character_id = {}", CharacterID())); for (const auto& entry : dynamic_zone_member_entries) { m_dynamic_zone_ids.emplace_back(entry.dynamic_zone_id); } m_expedition_id = ExpeditionsRepository::GetIDByMemberID(database, CharacterID()); auto dz = zone->GetDynamicZone(); if (dz && dz->GetSafeReturnLocation().zone_id != 0) { auto safereturn = dz->GetSafeReturnLocation(); auto safereturn_entry = CharacterInstanceSafereturnsRepository::NewEntity(); safereturn_entry.character_id = CharacterID(); safereturn_entry.instance_zone_id = zone->GetZoneID(); safereturn_entry.instance_id = zone->GetInstanceID(); safereturn_entry.safe_zone_id = safereturn.zone_id; safereturn_entry.safe_x = safereturn.x; safereturn_entry.safe_y = safereturn.y; safereturn_entry.safe_z = safereturn.z; safereturn_entry.safe_heading = safereturn.heading; CharacterInstanceSafereturnsRepository::InsertOneOrUpdate(database, safereturn_entry); } else { CharacterInstanceSafereturnsRepository::DeleteWhere(database, fmt::format("character_id = {}", character_id)); } /** * DevTools Load Settings */ if (Admin() >= EQ::DevTools::GM_ACCOUNT_STATUS_LEVEL) { std::string dev_tools_window_key = StringFormat("%i-dev-tools-disabled", AccountID()); if (DataBucket::GetData(dev_tools_window_key) == "true") { dev_tools_enabled = false; } } if (m_ClientVersionBit & EQ::versions::maskUFAndLater) { outapp = new EQApplicationPacket(OP_XTargetResponse, 8); outapp->WriteUInt32(GetMaxXTargets()); outapp->WriteUInt32(0); FastQueuePacket(&outapp); } /* Weather Packet This shouldent be moved, this seems to be what the client uses to advance to the next state (sending ReqNewZone) */ outapp = new EQApplicationPacket(OP_Weather, 12); Weather_Struct *ws = (Weather_Struct *)outapp->pBuffer; ws->val1 = 0x000000FF; if (zone->zone_weather == 1) { ws->type = 0x31; } // Rain if (zone->zone_weather == 2) { outapp->pBuffer[8] = 0x01; ws->type = 0x02; } outapp->priority = 6; QueuePacket(outapp); safe_delete(outapp); if (ClientVersion() >= EQ::versions::ClientVersion::RoF) { Handle_Connect_OP_ReqNewZone(nullptr); } SetAttackTimer(); conn_state = ZoneInfoSent; zoneinpacket_timer.Start(); return; } // connected opcode handlers void Client::Handle_0x0193(const EQApplicationPacket *app) { // Not sure what this opcode does. It started being sent when OP_ClientUpdate was // changed to pump OP_ClientUpdate back out instead of OP_MobUpdate // 2 bytes: 00 00 return; } void Client::Handle_OP_AAAction(const EQApplicationPacket *app) { LogAA("Received OP_AAAction"); if (app->size != sizeof(AA_Action)) { LogAA("Error! OP_AAAction size didnt match!"); return; } AA_Action* action = (AA_Action*)app->pBuffer; if (action->action == aaActionActivate) {//AA Hotkey LogAA("Activating AA [{}]", action->ability); ActivateAlternateAdvancementAbility(action->ability, action->target_id); } else if (action->action == aaActionBuy) { PurchaseAlternateAdvancementRank(action->ability); } else if (action->action == aaActionDisableEXP) { //Turn Off AA Exp if (m_epp.perAA > 0) MessageString(Chat::White, AA_OFF); m_epp.perAA = 0; SendAlternateAdvancementStats(); } else if (action->action == aaActionSetEXP) { if (m_epp.perAA == 0) MessageString(Chat::White, AA_ON); m_epp.perAA = action->exp_value; if (m_epp.perAA < 0 || m_epp.perAA > 100) m_epp.perAA = 0; // stop exploit with sanity check // send an update SendAlternateAdvancementStats(); SendAlternateAdvancementTable(); } else { LogAA("Unknown AA action : [{}] [{}] [{}] [{}]", action->action, action->ability, action->target_id, action->exp_value); } } void Client::Handle_OP_AcceptNewTask(const EQApplicationPacket *app) { if (app->size != sizeof(AcceptNewTask_Struct)) { LogDebug("Size mismatch in OP_AcceptNewTask expected [{}] got [{}]", sizeof(AcceptNewTask_Struct), app->size); DumpPacket(app); return; } AcceptNewTask_Struct *ant = (AcceptNewTask_Struct*)app->pBuffer; if (ant->task_id > 0 && RuleB(TaskSystem, EnableTaskSystem) && task_state) task_state->AcceptNewTask(this, ant->task_id, ant->task_master_id, std::time(nullptr)); } void Client::Handle_OP_AdventureInfoRequest(const EQApplicationPacket *app) { if (app->size < sizeof(EntityId_Struct)) { LogError("Handle_OP_AdventureInfoRequest had a packet that was too small"); return; } EntityId_Struct* ent = (EntityId_Struct*)app->pBuffer; Mob * m = entity_list.GetMob(ent->entity_id); if (m && m->IsNPC()) { std::map<uint32, std::string>::iterator it; it = zone->adventure_entry_list_flavor.find(m->CastToNPC()->GetAdventureTemplate()); if (it != zone->adventure_entry_list_flavor.end()) { auto outapp = new EQApplicationPacket(OP_AdventureInfo, (it->second.size() + 2)); strn0cpy((char*)outapp->pBuffer, it->second.c_str(), it->second.size()); FastQueuePacket(&outapp); } else { if (m->CastToNPC()->GetAdventureTemplate() != 0) { std::string text = "Choose your difficulty and preferred adventure type."; auto outapp = new EQApplicationPacket(OP_AdventureInfo, (text.size() + 2)); strn0cpy((char*)outapp->pBuffer, text.c_str(), text.size()); FastQueuePacket(&outapp); } } } } void Client::Handle_OP_AdventureLeaderboardRequest(const EQApplicationPacket *app) { if (app->size < sizeof(AdventureLeaderboardRequest_Struct)) { return; } if (adventure_leaderboard_timer) { return; } adventure_leaderboard_timer = new Timer(4000); auto pack = new ServerPacket(ServerOP_AdventureLeaderboard, sizeof(ServerLeaderboardRequest_Struct)); ServerLeaderboardRequest_Struct *lr = (ServerLeaderboardRequest_Struct*)pack->pBuffer; strcpy(lr->player, GetName()); AdventureLeaderboardRequest_Struct *lrs = (AdventureLeaderboardRequest_Struct*)app->pBuffer; lr->type = 1 + (lrs->theme * 2) + lrs->type; worldserver.SendPacket(pack); delete pack; } void Client::Handle_OP_AdventureMerchantPurchase(const EQApplicationPacket *app) { if (app->size != sizeof(Adventure_Purchase_Struct)) { LogError("OP size error: OP_AdventureMerchantPurchase expected:[{}] got:[{}]", sizeof(Adventure_Purchase_Struct), app->size); return; } Adventure_Purchase_Struct* aps = (Adventure_Purchase_Struct*)app->pBuffer; /* Get item apc->itemid (can check NPC if thats necessary), ldon point theme check only if theme is not 0 (I am not sure what 1-5 are though for themes) if(ldon_points_available >= item ldonpointcost) { give item (67 00 00 00 for the packettype using opcode 0x02c5) ldon_points_available -= ldonpointcost; } */ uint32 merchantid = 0; Mob* tmp = entity_list.GetMob(aps->npcid); if (tmp == 0 || !tmp->IsNPC() || ((tmp->GetClass() != ADVENTUREMERCHANT) && (tmp->GetClass() != DISCORD_MERCHANT) && (tmp->GetClass() != NORRATHS_KEEPERS_MERCHANT) && (tmp->GetClass() != DARK_REIGN_MERCHANT))) return; //you have to be somewhat close to them to be properly using them if (DistanceSquared(m_Position, tmp->GetPosition()) > USE_NPC_RANGE2) return; merchantid = tmp->CastToNPC()->MerchantType; const EQ::ItemData* item = nullptr; bool found = false; std::list<MerchantList> merlist = zone->merchanttable[merchantid]; std::list<MerchantList>::const_iterator itr; for (itr = merlist.begin(); itr != merlist.end(); ++itr) { MerchantList ml = *itr; if (GetLevel() < ml.level_required) { continue; } int32 fac = tmp->GetPrimaryFaction(); if (fac != 0 && GetModCharacterFactionLevel(fac) < ml.faction_required) { continue; } item = database.GetItem(ml.item); if (!item) continue; if (item->ID == aps->itemid) { //This check to make sure that the item is actually on the NPC, people attempt to inject packets to get items summoned... found = true; break; } } if (!item || !found) { Message(Chat::Red, "Error: The item you purchased does not exist!"); return; } if (aps->Type == LDoNMerchant) { if (m_pp.ldon_points_available < int32(item->LDoNPrice)) { Message(Chat::Red, "You cannot afford that item."); return; } if (item->LDoNTheme <= 16) { if (item->LDoNTheme & 16) { if (m_pp.ldon_points_tak < int32(item->LDoNPrice)) { Message(Chat::Red, "You need at least %u points in tak to purchase this item.", int32(item->LDoNPrice)); return; } } else if (item->LDoNTheme & 8) { if (m_pp.ldon_points_ruj < int32(item->LDoNPrice)) { Message(Chat::Red, "You need at least %u points in ruj to purchase this item.", int32(item->LDoNPrice)); return; } } else if (item->LDoNTheme & 4) { if (m_pp.ldon_points_mmc < int32(item->LDoNPrice)) { Message(Chat::Red, "You need at least %u points in mmc to purchase this item.", int32(item->LDoNPrice)); return; } } else if (item->LDoNTheme & 2) { if (m_pp.ldon_points_mir < int32(item->LDoNPrice)) { Message(Chat::Red, "You need at least %u points in mir to purchase this item.", int32(item->LDoNPrice)); return; } } else if (item->LDoNTheme & 1) { if (m_pp.ldon_points_guk < int32(item->LDoNPrice)) { Message(Chat::Red, "You need at least %u points in guk to purchase this item.", int32(item->LDoNPrice)); return; } } } } else if (aps->Type == DiscordMerchant) { if (GetPVPPoints() < item->LDoNPrice) { Message(Chat::Red, "You need at least %u PVP points to purchase this item.", int32(item->LDoNPrice)); return; } } else if (aps->Type == NorrathsKeepersMerchant) { if (GetRadiantCrystals() < item->LDoNPrice) { Message(Chat::Red, "You need at least %u Radiant Crystals to purchase this item.", int32(item->LDoNPrice)); return; } } else if (aps->Type == DarkReignMerchant) { if (GetEbonCrystals() < item->LDoNPrice) { Message(Chat::Red, "You need at least %u Ebon Crystals to purchase this item.", int32(item->LDoNPrice)); return; } } else { Message(Chat::Red, "Unknown Adventure Merchant type."); return; } if (CheckLoreConflict(item)) { Message(Chat::Yellow, "You can only have one of a lore item."); return; } if (aps->Type == LDoNMerchant) { int32 requiredpts = (int32)item->LDoNPrice*-1; if (!UpdateLDoNPoints(6, requiredpts)) return; } else if (aps->Type == DiscordMerchant) { SetPVPPoints(GetPVPPoints() - (int32)item->LDoNPrice); SendPVPStats(); } else if (aps->Type == NorrathsKeepersMerchant) { SetRadiantCrystals(GetRadiantCrystals() - (int32)item->LDoNPrice); } else if (aps->Type == DarkReignMerchant) { SetEbonCrystals(GetEbonCrystals() - (int32)item->LDoNPrice); } int16 charges = 1; if (item->MaxCharges != 0) charges = item->MaxCharges; EQ::ItemInstance *inst = database.CreateItem(item, charges); if (!AutoPutLootInInventory(*inst, true, true)) { PutLootInInventory(EQ::invslot::slotCursor, *inst); } Save(1); } void Client::Handle_OP_AdventureMerchantRequest(const EQApplicationPacket *app) { if (app->size != sizeof(AdventureMerchant_Struct)) { LogError("OP size error: OP_AdventureMerchantRequest expected:[{}] got:[{}]", sizeof(AdventureMerchant_Struct), app->size); return; } std::stringstream ss(std::stringstream::in | std::stringstream::out); uint8 count = 0; AdventureMerchant_Struct* eid = (AdventureMerchant_Struct*)app->pBuffer; uint32 merchantid = 0; Mob* tmp = entity_list.GetMob(eid->entity_id); if (tmp == 0 || !tmp->IsNPC() || ((tmp->GetClass() != ADVENTUREMERCHANT) && (tmp->GetClass() != DISCORD_MERCHANT) && (tmp->GetClass() != NORRATHS_KEEPERS_MERCHANT) && (tmp->GetClass() != DARK_REIGN_MERCHANT))) return; //you have to be somewhat close to them to be properly using them if (DistanceSquared(m_Position, tmp->GetPosition()) > USE_NPC_RANGE2) return; merchantid = tmp->CastToNPC()->MerchantType; const EQ::ItemData *item = nullptr; std::list<MerchantList> merlist = zone->merchanttable[merchantid]; std::list<MerchantList>::const_iterator itr; for (itr = merlist.begin(); itr != merlist.end() && count<255; ++itr) { const MerchantList &ml = *itr; if (GetLevel() < ml.level_required) { continue; } int32 fac = tmp->GetPrimaryFaction(); if (fac != 0 && GetModCharacterFactionLevel(fac) < ml.faction_required) { continue; } item = database.GetItem(ml.item); if (item) { uint32 theme; if (item->LDoNTheme > 16) { theme = 0; } else if (item->LDoNTheme & 16) { theme = 5; } else if (item->LDoNTheme & 8) { theme = 4; } else if (item->LDoNTheme & 4) { theme = 3; } else if (item->LDoNTheme & 2) { theme = 2; } else if (item->LDoNTheme & 1) { theme = 1; } else { theme = 0; } ss << "^" << item->Name << "|"; ss << item->ID << "|"; ss << item->LDoNPrice << "|"; ss << theme << "|"; ss << (item->Stackable ? 1 : 0) << "|"; ss << (item->LoreFlag ? 1 : 0) << "|"; ss << item->Races << "|"; ss << item->Classes; count++; } } //Count //^Item Name,Item ID,Cost in Points,Theme (0=none),0,1,races bit map,classes bitmap EQApplicationPacket* outapp = new EQApplicationPacket(OP_AdventureMerchantResponse, ss.str().size() + 2); outapp->pBuffer[0] = count; strn0cpy((char*)&outapp->pBuffer[1], ss.str().c_str(), ss.str().size()); FastQueuePacket(&outapp); } void Client::Handle_OP_AdventureMerchantSell(const EQApplicationPacket *app) { if (app->size != sizeof(Adventure_Sell_Struct)) { LogDebug("Size mismatch on OP_AdventureMerchantSell: got [{}] expected [{}]", app->size, sizeof(Adventure_Sell_Struct)); DumpPacket(app); return; } Adventure_Sell_Struct *ams_in = (Adventure_Sell_Struct*)app->pBuffer; Mob* vendor = entity_list.GetMob(ams_in->npcid); if (vendor == 0 || !vendor->IsNPC() || ((vendor->GetClass() != ADVENTUREMERCHANT) && (vendor->GetClass() != NORRATHS_KEEPERS_MERCHANT) && (vendor->GetClass() != DARK_REIGN_MERCHANT))) { Message(Chat::Red, "Vendor was not found."); return; } if (DistanceSquared(m_Position, vendor->GetPosition()) > USE_NPC_RANGE2) { Message(Chat::Red, "Vendor is out of range."); return; } uint32 itemid = GetItemIDAt(ams_in->slot); if (itemid == 0) { Message(Chat::Red, "Found no item at that slot."); return; } const EQ::ItemData* item = database.GetItem(itemid); EQ::ItemInstance* inst = GetInv().GetItem(ams_in->slot); if (!item || !inst) { Message(Chat::Red, "You seemed to have misplaced that item..."); return; } // Note that Lucy has ldonsold values of 4 and 5 for items sold by Norrath's Keepers and Dark Reign, whereas 13th Floor // has ldonsold = 0 for these items, so some manual editing of the items DB will be required to support sell back of the // items. // // The Merchant seems to have some other way of knowing whether he will accept the item, other than the ldonsold field, // e.g. if you summon items 76036 and 76053 (good and evil versions of Spell: Ward Of Vengeance), if you are interacting // with a Norrath's Keeper merchant and click on 76036 in your inventory, he says he will give you radiant crystals for // it, but he will refuse for item 76053. // // Similarly, just giving a cloth cap an ldonsold value of 4 will not make the Merchant buy it. // // Note that the the Client will not allow you to sell anything back to a Discord merchant, so there is no need to handle // that case here. if (item->LDoNSold == 0) { Message(Chat::Red, "The merchant does not want that item."); return; } if (item->LDoNPrice == 0) { Message(Chat::Red, "The merchant does not want that item."); return; } // 06/11/2016 This formula matches RoF2 client side calculation. int32 price = (item->LDoNPrice + 1) * item->LDoNSellBackRate / 100; if (price == 0) { Message(Chat::Red, "The merchant does not want that item."); return; } if (RuleB(EventLog, RecordSellToMerchant)) LogMerchant(this, vendor, ams_in->charges, price, item, false); if (!inst->IsStackable()) { DeleteItemInInventory(ams_in->slot, 0, false); } else { if (inst->GetCharges() < ams_in->charges) { ams_in->charges = inst->GetCharges(); } if (ams_in->charges == 0) { Message(Chat::Red, "Charge mismatch error."); return; } DeleteItemInInventory(ams_in->slot, ams_in->charges, false); price *= ams_in->charges; } auto outapp = new EQApplicationPacket(OP_AdventureMerchantSell, sizeof(Adventure_Sell_Struct)); Adventure_Sell_Struct *ams = (Adventure_Sell_Struct*)outapp->pBuffer; ams->slot = ams_in->slot; ams->unknown000 = 1; ams->npcid = ams->npcid; ams->charges = ams_in->charges; ams->sell_price = price; FastQueuePacket(&outapp); switch (vendor->GetClass()) { case ADVENTUREMERCHANT: { UpdateLDoNPoints(6, price); break; } case NORRATHS_KEEPERS_MERCHANT: { SetRadiantCrystals(GetRadiantCrystals() + price); break; } case DARK_REIGN_MERCHANT: { SetEbonCrystals(GetEbonCrystals() + price); break; } default: break; } Save(1); } void Client::Handle_OP_AdventureRequest(const EQApplicationPacket *app) { if (app->size < sizeof(AdventureRequest_Struct)) { LogError("Handle_OP_AdventureRequest had a packet that was too small"); return; } if (IsOnAdventure()) { return; } if (!p_timers.Expired(&database, pTimerStartAdventureTimer, false)) { return; } if (GetPendingAdventureRequest()) { return; } AdventureRequest_Struct* ars = (AdventureRequest_Struct*)app->pBuffer; uint8 group_members = 0; Raid *r = nullptr; Group *g = nullptr; if (IsRaidGrouped()) { r = GetRaid(); group_members = r->RaidCount(); } else if (IsGrouped()) { g = GetGroup(); group_members = g->GroupCount(); } else { return; } if (group_members < RuleI(Adventure, MinNumberForGroup) || group_members > RuleI(Adventure, MaxNumberForGroup)) { return; } Mob* m = entity_list.GetMob(ars->entity_id); uint32 template_id = 0; if (m && m->IsNPC()) { template_id = m->CastToNPC()->GetAdventureTemplate(); } else { return; } auto packet = new ServerPacket(ServerOP_AdventureRequest, sizeof(ServerAdventureRequest_Struct) + (64 * group_members)); ServerAdventureRequest_Struct *sar = (ServerAdventureRequest_Struct*)packet->pBuffer; sar->member_count = group_members; sar->risk = ars->risk; sar->type = ars->type; sar->template_id = template_id; strcpy(sar->leader, GetName()); if (IsRaidGrouped()) { int i = 0; for (int x = 0; x < 72; ++x) { if (i == group_members) { break; } const char *c_name = nullptr; c_name = r->GetClientNameByIndex(x); if (c_name) { memcpy((packet->pBuffer + sizeof(ServerAdventureRequest_Struct) + (64 * i)), c_name, strlen(c_name)); ++i; } } } else { int i = 0; for (int x = 0; x < 6; ++x) { if (i == group_members) { break; } const char *c_name = nullptr; c_name = g->GetClientNameByIndex(x); if (c_name) { memcpy((packet->pBuffer + sizeof(ServerAdventureRequest_Struct) + (64 * i)), c_name, strlen(c_name)); ++i; } } } worldserver.SendPacket(packet); delete packet; p_timers.Start(pTimerStartAdventureTimer, 5); } void Client::Handle_OP_AdventureStatsRequest(const EQApplicationPacket *app) { if (adventure_stats_timer) { return; } adventure_stats_timer = new Timer(8000); auto outapp = new EQApplicationPacket(OP_AdventureStatsReply, sizeof(AdventureStats_Struct)); AdventureStats_Struct *as = (AdventureStats_Struct*)outapp->pBuffer; if (database.GetAdventureStats(CharacterID(), as)) { m_pp.ldon_wins_guk = as->success.guk; m_pp.ldon_wins_mir = as->success.mir; m_pp.ldon_wins_mmc = as->success.mmc; m_pp.ldon_wins_ruj = as->success.ruj; m_pp.ldon_wins_tak = as->success.tak; m_pp.ldon_losses_guk = as->failure.guk; m_pp.ldon_losses_mir = as->failure.mir; m_pp.ldon_losses_mmc = as->failure.mmc; m_pp.ldon_losses_ruj = as->failure.ruj; m_pp.ldon_losses_tak = as->failure.tak; } FastQueuePacket(&outapp); } void Client::Handle_OP_AggroMeterLockTarget(const EQApplicationPacket *app) { if (app->size < sizeof(uint32)) { LogError("Handle_OP_AggroMeterLockTarget had a packet that was too small"); return; } SetAggroMeterLock(app->ReadUInt32(0)); ProcessAggroMeter(); } void Client::Handle_OP_AltCurrencyMerchantRequest(const EQApplicationPacket *app) { VERIFY_PACKET_LENGTH(OP_AltCurrencyMerchantRequest, app, uint32); NPC* tar = entity_list.GetNPCByID(*((uint32*)app->pBuffer)); if (tar) { if (DistanceSquared(m_Position, tar->GetPosition()) > USE_NPC_RANGE2) return; if (tar->GetClass() != ALT_CURRENCY_MERCHANT) { return; } uint32 alt_cur_id = tar->GetAltCurrencyType(); if (alt_cur_id == 0) { return; } auto altc_iter = zone->AlternateCurrencies.begin(); bool found = false; while (altc_iter != zone->AlternateCurrencies.end()) { if ((*altc_iter).id == alt_cur_id) { found = true; break; } ++altc_iter; } if (!found) { return; } std::stringstream ss(std::stringstream::in | std::stringstream::out); std::stringstream item_ss(std::stringstream::in | std::stringstream::out); ss << alt_cur_id << "|1|" << alt_cur_id; uint32 count = 0; uint32 merchant_id = tar->MerchantType; const EQ::ItemData *item = nullptr; std::list<MerchantList> merlist = zone->merchanttable[merchant_id]; std::list<MerchantList>::const_iterator itr; for (itr = merlist.begin(); itr != merlist.end() && count < 255; ++itr) { const MerchantList &ml = *itr; if (GetLevel() < ml.level_required) { continue; } int32 fac = tar->GetPrimaryFaction(); if (fac != 0 && GetModCharacterFactionLevel(fac) < ml.faction_required) { continue; } item = database.GetItem(ml.item); if (item) { item_ss << "^" << item->Name << "|"; item_ss << item->ID << "|"; item_ss << ml.alt_currency_cost << "|"; item_ss << "0|"; item_ss << "1|"; item_ss << item->Races << "|"; item_ss << item->Classes; count++; } } if (count > 0) { ss << "|" << count << item_ss.str(); } else { ss << "|0"; } EQApplicationPacket* outapp = new EQApplicationPacket(OP_AltCurrencyMerchantReply, ss.str().length() + 1); memcpy(outapp->pBuffer, ss.str().c_str(), ss.str().length()); FastQueuePacket(&outapp); } } void Client::Handle_OP_AltCurrencyPurchase(const EQApplicationPacket *app) { VERIFY_PACKET_LENGTH(OP_AltCurrencyPurchase, app, AltCurrencyPurchaseItem_Struct); AltCurrencyPurchaseItem_Struct *purchase = (AltCurrencyPurchaseItem_Struct*)app->pBuffer; NPC* tar = entity_list.GetNPCByID(purchase->merchant_entity_id); if (tar) { if (DistanceSquared(m_Position, tar->GetPosition())> USE_NPC_RANGE2) return; if (tar->GetClass() != ALT_CURRENCY_MERCHANT) { return; } uint32 alt_cur_id = tar->GetAltCurrencyType(); if (alt_cur_id == 0) { return; } const EQ::ItemData* item = nullptr; uint32 cost = 0; uint32 current_currency = GetAlternateCurrencyValue(alt_cur_id); uint32 merchant_id = tar->MerchantType; bool found = false; std::list<MerchantList> merlist = zone->merchanttable[merchant_id]; std::list<MerchantList>::const_iterator itr; for (itr = merlist.begin(); itr != merlist.end(); ++itr) { MerchantList ml = *itr; if (GetLevel() < ml.level_required) { continue; } int32 fac = tar->GetPrimaryFaction(); if (fac != 0 && GetModCharacterFactionLevel(fac) < ml.faction_required) { continue; } item = database.GetItem(ml.item); if (!item) continue; if (item->ID == purchase->item_id) { //This check to make sure that the item is actually on the NPC, people attempt to inject packets to get items summoned... cost = ml.alt_currency_cost; found = true; break; } } if (!item || !found) { Message(Chat::Red, "Error: The item you purchased does not exist!"); return; } if (cost > current_currency) { Message(Chat::Red, "You cannot afford that item right now."); return; } if (CheckLoreConflict(item)) { Message(Chat::Yellow, "You can only have one of a lore item."); return; } /* QS: PlayerLogAlternateCurrencyTransactions :: Merchant Purchase */ if (RuleB(QueryServ, PlayerLogAlternateCurrencyTransactions)) { std::string event_desc = StringFormat("Merchant Purchase :: Spent alt_currency_id:%i cost:%i for itemid:%i in zoneid:%i instid:%i", alt_cur_id, cost, item->ID, this->GetZoneID(), this->GetInstanceID()); QServ->PlayerLogEvent(Player_Log_Alternate_Currency_Transactions, this->CharacterID(), event_desc); } AddAlternateCurrencyValue(alt_cur_id, -((int32)cost)); int16 charges = 1; if (item->MaxCharges != 0) charges = item->MaxCharges; EQ::ItemInstance *inst = database.CreateItem(item, charges); if (!AutoPutLootInInventory(*inst, true, true)) { PutLootInInventory(EQ::invslot::slotCursor, *inst); } Save(1); } } void Client::Handle_OP_AltCurrencyReclaim(const EQApplicationPacket *app) { VERIFY_PACKET_LENGTH(OP_AltCurrencyReclaim, app, AltCurrencyReclaim_Struct); AltCurrencyReclaim_Struct *reclaim = (AltCurrencyReclaim_Struct*)app->pBuffer; uint32 item_id = 0; auto iter = zone->AlternateCurrencies.begin(); while (iter != zone->AlternateCurrencies.end()) { if ((*iter).id == reclaim->currency_id) { item_id = (*iter).item_id; } ++iter; } if (item_id == 0) { return; } /* Item to Currency Storage */ if (reclaim->reclaim_flag == 1) { uint32 removed = NukeItem(item_id, invWhereWorn | invWherePersonal | invWhereCursor); if (removed > 0) { AddAlternateCurrencyValue(reclaim->currency_id, removed); /* QS: PlayerLogAlternateCurrencyTransactions :: Item to Currency */ if (RuleB(QueryServ, PlayerLogAlternateCurrencyTransactions)) { std::string event_desc = StringFormat("Reclaim :: Item to Currency :: alt_currency_id:%i amount:%i to currency tab in zoneid:%i instid:%i", reclaim->currency_id, removed, this->GetZoneID(), this->GetInstanceID()); QServ->PlayerLogEvent(Player_Log_Alternate_Currency_Transactions, this->CharacterID(), event_desc); } } } /* Cursor to Item storage */ else { uint32 max_currency = GetAlternateCurrencyValue(reclaim->currency_id); if (max_currency == 0 || reclaim->count == 0) return; /* If you input more than you have currency wise, just give the max of the currency you currently have */ if (reclaim->count > max_currency) { SummonItem(item_id, max_currency); SetAlternateCurrencyValue(reclaim->currency_id, 0); } else { SummonItem(item_id, reclaim->count, 0, 0, 0, 0, 0, 0, false, EQ::invslot::slotCursor); AddAlternateCurrencyValue(reclaim->currency_id, -((int32)reclaim->count)); } /* QS: PlayerLogAlternateCurrencyTransactions :: Cursor to Item Storage */ if (RuleB(QueryServ, PlayerLogAlternateCurrencyTransactions)) { std::string event_desc = StringFormat("Reclaim :: Cursor to Item :: alt_currency_id:%i amount:-%i in zoneid:%i instid:%i", reclaim->currency_id, reclaim->count, this->GetZoneID(), this->GetInstanceID()); QServ->PlayerLogEvent(Player_Log_Alternate_Currency_Transactions, this->CharacterID(), event_desc); } } } void Client::Handle_OP_AltCurrencySell(const EQApplicationPacket *app) { VERIFY_PACKET_LENGTH(OP_AltCurrencySell, app, AltCurrencySellItem_Struct); EQApplicationPacket *outapp = app->Copy(); AltCurrencySellItem_Struct *sell = (AltCurrencySellItem_Struct*)outapp->pBuffer; NPC* tar = entity_list.GetNPCByID(sell->merchant_entity_id); if (tar) { if (DistanceSquared(m_Position, tar->GetPosition()) > USE_NPC_RANGE2) return; if (tar->GetClass() != ALT_CURRENCY_MERCHANT) { return; } uint32 alt_cur_id = tar->GetAltCurrencyType(); if (alt_cur_id == 0) { return; } EQ::ItemInstance* inst = GetInv().GetItem(sell->slot_id); if (!inst) { return; } if (!RuleB(Merchant, EnableAltCurrencySell)) { return; } const EQ::ItemData* item = nullptr; uint32 cost = 0; uint32 current_currency = GetAlternateCurrencyValue(alt_cur_id); uint32 merchant_id = tar->MerchantType; uint32 npc_id = tar->GetNPCTypeID(); bool found = false; std::list<MerchantList> merlist = zone->merchanttable[merchant_id]; std::list<MerchantList>::const_iterator itr; for (itr = merlist.begin(); itr != merlist.end(); ++itr) { MerchantList ml = *itr; if (GetLevel() < ml.level_required) { continue; } int32 fac = tar->GetPrimaryFaction(); if (fac != 0 && GetModCharacterFactionLevel(fac) < ml.faction_required) { continue; } item = database.GetItem(ml.item); if (!item) continue; if (item->ID == inst->GetItem()->ID) { cost = ml.alt_currency_cost; found = true; break; } } if (!found) { return; } if (!inst->IsStackable()) { DeleteItemInInventory(sell->slot_id, 0, false); } else { if (inst->GetCharges() < sell->charges) { sell->charges = inst->GetCharges(); } if (sell->charges == 0) { Message(Chat::Red, "Charge mismatch error."); return; } DeleteItemInInventory(sell->slot_id, sell->charges, false); cost *= sell->charges; } sell->cost = cost; /* QS: PlayerLogAlternateCurrencyTransactions :: Sold to Merchant*/ if (RuleB(QueryServ, PlayerLogAlternateCurrencyTransactions)) { std::string event_desc = StringFormat("Sold to Merchant :: itemid:%u npcid:%u alt_currency_id:%u cost:%u in zoneid:%u instid:%i", item->ID, npc_id, alt_cur_id, cost, this->GetZoneID(), this->GetInstanceID()); QServ->PlayerLogEvent(Player_Log_Alternate_Currency_Transactions, this->CharacterID(), event_desc); } FastQueuePacket(&outapp); AddAlternateCurrencyValue(alt_cur_id, cost); Save(1); } } void Client::Handle_OP_AltCurrencySellSelection(const EQApplicationPacket *app) { VERIFY_PACKET_LENGTH(OP_AltCurrencySellSelection, app, AltCurrencySelectItem_Struct); AltCurrencySelectItem_Struct *select = (AltCurrencySelectItem_Struct*)app->pBuffer; NPC* tar = entity_list.GetNPCByID(select->merchant_entity_id); if (tar) { if (DistanceSquared(m_Position, tar->GetPosition()) > USE_NPC_RANGE2) return; if (tar->GetClass() != ALT_CURRENCY_MERCHANT) { return; } uint32 alt_cur_id = tar->GetAltCurrencyType(); if (alt_cur_id == 0) { return; } EQ::ItemInstance *inst = m_inv.GetItem(select->slot_id); if (!inst) { return; } const EQ::ItemData* item = nullptr; uint32 cost = 0; uint32 current_currency = GetAlternateCurrencyValue(alt_cur_id); uint32 merchant_id = tar->MerchantType; if (RuleB(Merchant, EnableAltCurrencySell)) { bool found = false; std::list<MerchantList> merlist = zone->merchanttable[merchant_id]; std::list<MerchantList>::const_iterator itr; for (itr = merlist.begin(); itr != merlist.end(); ++itr) { MerchantList ml = *itr; if (GetLevel() < ml.level_required) { continue; } int32 fac = tar->GetPrimaryFaction(); if (fac != 0 && GetModCharacterFactionLevel(fac) < ml.faction_required) { continue; } item = database.GetItem(ml.item); if (!item) continue; if (item->ID == inst->GetItem()->ID) { cost = ml.alt_currency_cost; found = true; break; } } if (!found) { cost = 0; } } else { cost = 0; } auto outapp = new EQApplicationPacket(OP_AltCurrencySellSelection, sizeof(AltCurrencySelectItemReply_Struct)); AltCurrencySelectItemReply_Struct *reply = (AltCurrencySelectItemReply_Struct*)outapp->pBuffer; reply->unknown004 = 0xFF; reply->unknown005 = 0xFF; reply->unknown006 = 0xFF; reply->unknown007 = 0xFF; strcpy(reply->item_name, inst->GetItem()->Name); reply->cost = cost; FastQueuePacket(&outapp); } } void Client::Handle_OP_Animation(const EQApplicationPacket *app) { if (app->size != sizeof(Animation_Struct)) { LogError("Received invalid sized OP_Animation: got [{}], expected [{}]", app->size, sizeof(Animation_Struct)); DumpPacket(app); return; } Animation_Struct *s = (Animation_Struct *)app->pBuffer; //might verify spawn ID, but it wouldent affect anything DoAnim(s->action, s->speed); return; } void Client::Handle_OP_ApplyPoison(const EQApplicationPacket *app) { if (app->size != sizeof(ApplyPoison_Struct)) { LogError("Wrong size: OP_ApplyPoison, size=[{}], expected [{}]", app->size, sizeof(ApplyPoison_Struct)); DumpPacket(app); return; } ApplyPoison_Struct* ApplyPoisonData = (ApplyPoison_Struct*)app->pBuffer; uint32 ApplyPoisonSuccessResult = 0; const EQ::ItemInstance* PoisonItemInstance = GetInv().GetItem(ApplyPoisonData->inventorySlot); const EQ::ItemData* poison = (PoisonItemInstance ? PoisonItemInstance->GetItem() : nullptr); bool IsPoison = (poison && poison->ItemType == EQ::item::ItemTypePoison); if (IsPoison && GetClass() == ROGUE) { // Live always checks for skillup, even when poison is too high CheckIncreaseSkill(EQ::skills::SkillApplyPoison, nullptr, 10); if (poison->Proc.Level2 > GetLevel()) { // Poison is too high to apply. MessageString(Chat::LightBlue, POISON_TOO_HIGH); } else { double ChanceRoll = zone->random.Real(0, 1); // Poisons that use this skill (old world poisons) almost // never fail to apply. I did 25 applies of a trivial 120+ // poison with an apply skill of 48 and they all worked. // Also did 25 straight poisons at apply skill 248 for very // high end and they never failed. // Apply poison ranging from 1-9, 28/30 worked for a level 18.. // Poisons that don't proc until a level higher than the // rogue simply won't apply at all, no skill check done. uint16 poison_skill = GetSkill(EQ::skills::SkillApplyPoison); if (ChanceRoll < (.75 + poison_skill / 1000)) { ApplyPoisonSuccessResult = 1; AddProcToWeapon(poison->Proc.Effect, false, (GetDEX() / 100) + 103, POISON_PROC); } } // Live always deletes the item, success or failure. Even if too high. DeleteItemInInventory(ApplyPoisonData->inventorySlot, 1, true); } auto outapp = new EQApplicationPacket(OP_ApplyPoison, nullptr, sizeof(ApplyPoison_Struct)); ApplyPoison_Struct* ApplyPoisonResult = (ApplyPoison_Struct*)outapp->pBuffer; ApplyPoisonResult->success = ApplyPoisonSuccessResult; ApplyPoisonResult->inventorySlot = ApplyPoisonData->inventorySlot; FastQueuePacket(&outapp); } void Client::Handle_OP_Assist(const EQApplicationPacket *app) { if (app->size != sizeof(EntityId_Struct)) { LogDebug("Size mismatch in OP_Assist expected [{}] got [{}]", sizeof(EntityId_Struct), app->size); return; } EntityId_Struct* eid = (EntityId_Struct*)app->pBuffer; Entity* entity = entity_list.GetID(eid->entity_id); EQApplicationPacket* outapp = app->Copy(); eid = (EntityId_Struct*)outapp->pBuffer; if (RuleB(Combat, AssistNoTargetSelf)) eid->entity_id = GetID(); if (entity && entity->IsMob()) { Mob *assistee = entity->CastToMob(); if (assistee->GetTarget()) { Mob *new_target = assistee->GetTarget(); if (new_target && (GetGM() || Distance(m_Position, assistee->GetPosition()) <= TARGETING_RANGE)) { cheat_manager.SetExemptStatus(Assist, true); eid->entity_id = new_target->GetID(); } else { eid->entity_id = 0; } } else { eid->entity_id = 0; } } FastQueuePacket(&outapp); return; } void Client::Handle_OP_AssistGroup(const EQApplicationPacket *app) { if (app->size != sizeof(EntityId_Struct)) { LogDebug("Size mismatch in OP_AssistGroup expected [{}] got [{}]", sizeof(EntityId_Struct), app->size); return; } QueuePacket(app); return; } void Client::Handle_OP_AugmentInfo(const EQApplicationPacket *app) { // This packet is sent by the client when an Augment item information window is opened. // Some clients this seems to nuke the charm text (ex. Adventurer's Stone) if (app->size != sizeof(AugmentInfo_Struct)) { LogDebug("Size mismatch in OP_AugmentInfo expected [{}] got [{}]", sizeof(AugmentInfo_Struct), app->size); DumpPacket(app); return; } AugmentInfo_Struct* AugInfo = (AugmentInfo_Struct*)app->pBuffer; const EQ::ItemData * item = database.GetItem(AugInfo->itemid); if (item) { strn0cpy(AugInfo->augment_info, item->Name, 64); AugInfo->itemid = 0; QueuePacket(app); } } void Client::Handle_OP_AugmentItem(const EQApplicationPacket *app) { if (app->size != sizeof(AugmentItem_Struct)) { LogError("Invalid size for AugmentItem_Struct: Expected: [{}], Got: [{}]", sizeof(AugmentItem_Struct), app->size); return; } AugmentItem_Struct* in_augment = (AugmentItem_Struct*)app->pBuffer; bool deleteItems = false; if (ClientVersion() >= EQ::versions::ClientVersion::RoF) { if ((in_augment->container_slot < EQ::invslot::EQUIPMENT_BEGIN || in_augment->container_slot > EQ::invslot::GENERAL_END) && (in_augment->container_slot < EQ::invbag::GENERAL_BAGS_BEGIN || in_augment->container_slot > EQ::invbag::GENERAL_BAGS_END)) { Message(Chat::Red, "The server does not allow augmentation actions from this slot."); auto cursor_item = m_inv[EQ::invslot::slotCursor]; auto augmented_item = m_inv[in_augment->container_slot]; SendItemPacket(EQ::invslot::slotCursor, cursor_item, ItemPacketCharInventory); // this may crash clients on certain slots SendItemPacket(in_augment->container_slot, augmented_item, ItemPacketCharInventory); return; } EQ::ItemInstance *itemOneToPush = nullptr, *itemTwoToPush = nullptr; //Log(Logs::DebugLevel::Moderate, Logs::Debug, "cslot: [{}] aslot: [{}] cidx: [{}] aidx: [{}] act: [{}] dest: [{}]", // in_augment->container_slot, in_augment->augment_slot, in_augment->container_index, in_augment->augment_index, in_augment->augment_action, in_augment->dest_inst_id); EQ::ItemInstance *tobe_auged = nullptr, *old_aug = nullptr, *new_aug = nullptr, *aug = nullptr, *solvent = nullptr; EQ::InventoryProfile& user_inv = GetInv(); uint16 item_slot = in_augment->container_slot; uint16 solvent_slot = in_augment->augment_slot; uint8 mat = EQ::InventoryProfile::CalcMaterialFromSlot(item_slot); // for when player is augging a piece of equipment while they're wearing it if (item_slot == INVALID_INDEX || solvent_slot == INVALID_INDEX) { Message(Chat::Red, "Error: Invalid Aug Index."); return; } tobe_auged = user_inv.GetItem(item_slot); solvent = user_inv.GetItem(solvent_slot); if (!tobe_auged) { Message(Chat::Red, "Error: Invalid item passed for augmenting."); return; } if ((in_augment->augment_action == 1) || (in_augment->augment_action == 2)) { // Check for valid distiller if safely removing / swapping an augmentation if (!solvent) { old_aug = tobe_auged->GetAugment(in_augment->augment_index); if (!old_aug || old_aug->GetItem()->AugDistiller != 0) { LogError("Player tried to safely remove an augment without a distiller"); Message(Chat::Red, "Error: Missing an augmentation distiller for safely removing this augment."); return; } } else if (solvent->GetItem()->ItemType == EQ::item::ItemTypeAugmentationDistiller) { old_aug = tobe_auged->GetAugment(in_augment->augment_index); if (!old_aug) { LogError("Player tried to safely remove a nonexistent augment"); Message(Chat::Red, "Error: No augment found in slot %i for safely removing.", in_augment->augment_index); return; } else if (solvent->GetItem()->ID != old_aug->GetItem()->AugDistiller) { LogError("Player tried to safely remove an augment with the wrong distiller (item [{}] vs expected [{}])", solvent->GetItem()->ID, old_aug->GetItem()->AugDistiller); Message(Chat::Red, "Error: Wrong augmentation distiller for safely removing this augment."); return; } } else if (solvent->GetItem()->ItemType != EQ::item::ItemTypePerfectedAugmentationDistiller) { LogError("Player tried to safely remove an augment with a non-distiller item"); Message(Chat::Red, "Error: Invalid augmentation distiller for safely removing this augment."); return; } } switch (in_augment->augment_action) { case 0: // Adding an augment case 2: // Swapping augment new_aug = user_inv.GetItem(EQ::invslot::slotCursor); if (!new_aug) // Shouldn't get the OP code without the augment on the user's cursor, but maybe it's h4x. { LogError("AugmentItem OpCode with 'Insert' or 'Swap' action received, but no augment on client's cursor"); Message(Chat::Red, "Error: No augment found on cursor for inserting."); return; } else { if (((tobe_auged->IsAugmentSlotAvailable(new_aug->GetAugmentType(), in_augment->augment_index)) != -1) && (tobe_auged->AvailableWearSlot(new_aug->GetItem()->Slots))) { old_aug = tobe_auged->RemoveAugment(in_augment->augment_index); if (old_aug) { // An old augment was removed in order to be replaced with the new one (augment_action 2) CalcBonuses(); std::vector<EQ::Any> args; args.push_back(old_aug); parse->EventItem(EVENT_UNAUGMENT_ITEM, this, tobe_auged, nullptr, "", in_augment->augment_index, &args); args.assign(1, tobe_auged); args.push_back(false); parse->EventItem(EVENT_AUGMENT_REMOVE, this, old_aug, nullptr, "", in_augment->augment_index, &args); } tobe_auged->PutAugment(in_augment->augment_index, *new_aug); tobe_auged->UpdateOrnamentationInfo(); aug = tobe_auged->GetAugment(in_augment->augment_index); if (aug) { std::vector<EQ::Any> args; args.push_back(aug); parse->EventItem(EVENT_AUGMENT_ITEM, this, tobe_auged, nullptr, "", in_augment->augment_index, &args); args.assign(1, tobe_auged); parse->EventItem(EVENT_AUGMENT_INSERT, this, aug, nullptr, "", in_augment->augment_index, &args); } else { Message(Chat::Red, "Error: Could not properly insert augmentation into augment slot %i. Aborting.", in_augment->augment_index); return; } itemOneToPush = tobe_auged->Clone(); if (old_aug) { itemTwoToPush = old_aug->Clone(); } // Must push items after the items in inventory are deleted - necessary due to lore items... if (itemOneToPush) { DeleteItemInInventory(item_slot, 0, true); DeleteItemInInventory(EQ::invslot::slotCursor, new_aug->IsStackable() ? 1 : 0, true); if (solvent) { // Consume the augment distiller DeleteItemInInventory(solvent_slot, solvent->IsStackable() ? 1 : 0, true); } if (itemTwoToPush) { // This is a swap. Return the old aug to the player's cursor. if (!PutItemInInventory(EQ::invslot::slotCursor, *itemTwoToPush, true)) { LogError("Problem returning old augment to player's cursor after augmentation swap"); Message(Chat::Yellow, "Error: Failed to retrieve old augment after augmentation swap!"); } } if (PutItemInInventory(item_slot, *itemOneToPush, true)) { // Successfully added an augment to the item CalcBonuses(); if (mat != EQ::textures::materialInvalid) { SendWearChange(mat); // Visible item augged while equipped. Send WC in case ornamentation changed. } } else { Message(Chat::Red, "Error: No available slot for end result. Please free up the augment slot."); } } else { Message(Chat::Red, "Error in cloning item for augment. Aborted."); } } else { Message(Chat::Red, "Error: No available slot for augment in that item."); } } break; case 1: // Removing augment safely (distiller) aug = tobe_auged->GetAugment(in_augment->augment_index); if (aug) { std::vector<EQ::Any> args; args.push_back(aug); parse->EventItem(EVENT_UNAUGMENT_ITEM, this, tobe_auged, nullptr, "", in_augment->augment_index, &args); args.assign(1, tobe_auged); args.push_back(false); parse->EventItem(EVENT_AUGMENT_REMOVE, this, aug, nullptr, "", in_augment->augment_index, &args); } else { Message(Chat::Red, "Error: Could not find augmentation to remove at index %i. Aborting.", in_augment->augment_index); return; } old_aug = tobe_auged->RemoveAugment(in_augment->augment_index); tobe_auged->UpdateOrnamentationInfo(); itemOneToPush = tobe_auged->Clone(); if (old_aug) itemTwoToPush = old_aug->Clone(); if (itemOneToPush && itemTwoToPush) { // Consume the augment distiller if (solvent) DeleteItemInInventory(solvent_slot, solvent->IsStackable() ? 1 : 0, true); // Remove the augmented item DeleteItemInInventory(item_slot, 0, true); // Replace it with the unaugmented item if (!PutItemInInventory(item_slot, *itemOneToPush, true)) { LogError("Problem returning equipment item to player's inventory after safe augment removal"); Message(Chat::Yellow, "Error: Failed to return item after de-augmentation!"); } CalcBonuses(); if (mat != EQ::textures::materialInvalid) { SendWearChange(mat); // Visible item augged while equipped. Send WC in case ornamentation changed. } // Drop the removed augment on the player's cursor if (!PutItemInInventory(EQ::invslot::slotCursor, *itemTwoToPush, true)) { LogError("Problem returning augment to player's cursor after safe removal"); Message(Chat::Yellow, "Error: Failed to return augment after removal from item!"); return; } } break; case 3: // Destroying augment (formerly done in birdbath/sealer with a solvent) // RoF client does not require an augmentation solvent for destroying an augmentation in an item. // Augments can be destroyed with a right click -> Destroy at any time. aug = tobe_auged->GetAugment(in_augment->augment_index); if (aug) { std::vector<EQ::Any> args; args.push_back(aug); parse->EventItem(EVENT_UNAUGMENT_ITEM, this, tobe_auged, nullptr, "", in_augment->augment_index, &args); args.assign(1, tobe_auged); args.push_back(true); parse->EventItem(EVENT_AUGMENT_REMOVE, this, aug, nullptr, "", in_augment->augment_index, &args); } else { Message(Chat::Red, "Error: Could not find augmentation to remove at index %i. Aborting."); return; } tobe_auged->DeleteAugment(in_augment->augment_index); tobe_auged->UpdateOrnamentationInfo(); itemOneToPush = tobe_auged->Clone(); if (itemOneToPush) { DeleteItemInInventory(item_slot, 0, true); if (!PutItemInInventory(item_slot, *itemOneToPush, true)) { LogError("Problem returning equipment item to player's inventory after augment deletion"); Message(Chat::Yellow, "Error: Failed to return item after destroying augment!"); } } CalcBonuses(); if (mat != EQ::textures::materialInvalid) { SendWearChange(mat); } break; default: // Unknown LogInventory("Unrecognized augmentation action - cslot: [{}] aslot: [{}] cidx: [{}] aidx: [{}] act: [{}] dest: [{}]", in_augment->container_slot, in_augment->augment_slot, in_augment->container_index, in_augment->augment_index, in_augment->augment_action, in_augment->dest_inst_id); break; } } else { // Delegate to tradeskill object to perform combine Object::HandleAugmentation(this, in_augment, m_tradeskill_object); } return; } void Client::Handle_OP_AutoAttack(const EQApplicationPacket *app) { if (app->size != 4) { LogError("OP size error: OP_AutoAttack expected:4 got:[{}]", app->size); return; } if (app->pBuffer[0] == 0) { auto_attack = false; if (IsAIControlled()) { return; } attack_timer.Disable(); ranged_timer.Disable(); attack_dw_timer.Disable(); m_AutoAttackPosition = glm::vec4(); m_AutoAttackTargetLocation = glm::vec3(); aa_los_them_mob = nullptr; } else if (app->pBuffer[0] == 1) { auto_attack = true; auto_fire = false; if (IsAIControlled()) { return; } SetAttackTimer(); if (GetTarget()) { aa_los_them_mob = GetTarget(); m_AutoAttackPosition = GetPosition(); m_AutoAttackTargetLocation = glm::vec3(aa_los_them_mob->GetPosition()); los_status = CheckLosFN(aa_los_them_mob); los_status_facing = IsFacingMob(aa_los_them_mob); } else { m_AutoAttackPosition = GetPosition(); m_AutoAttackTargetLocation = glm::vec3(); aa_los_them_mob = nullptr; los_status = false; los_status_facing = false; } } } void Client::Handle_OP_AutoAttack2(const EQApplicationPacket *app) { return; } void Client::Handle_OP_AutoFire(const EQApplicationPacket *app) { if (app->size != sizeof(bool)) { LogDebug("Size mismatch in OP_AutoFire expected [{}] got [{}]", sizeof(bool), app->size); DumpPacket(app); return; } bool *af = (bool*)app->pBuffer; auto_fire = *af; auto_attack = false; SetAttackTimer(); } void Client::Handle_OP_Bandolier(const EQApplicationPacket *app) { // Although there are three different structs for OP_Bandolier, they are all the same size. // if (app->size != sizeof(BandolierCreate_Struct)) { LogDebug("Size mismatch in OP_Bandolier expected [{}] got [{}]", sizeof(BandolierCreate_Struct), app->size); DumpPacket(app); return; } BandolierCreate_Struct *bs = (BandolierCreate_Struct*)app->pBuffer; switch (bs->Action) { case bandolierCreate: CreateBandolier(app); break; case bandolierRemove: RemoveBandolier(app); break; case bandolierSet: SetBandolier(app); break; default: LogDebug("Unknown Bandolier action [{}]", bs->Action); break; } } void Client::Handle_OP_BankerChange(const EQApplicationPacket *app) { if (app->size != sizeof(BankerChange_Struct) && app->size != 4) //Titanium only sends 4 Bytes for this { LogDebug("Size mismatch in OP_BankerChange expected [{}] got [{}]", sizeof(BankerChange_Struct), app->size); DumpPacket(app); return; } uint32 distance = 0; NPC *banker = entity_list.GetClosestBanker(this, distance); if (!banker || distance > USE_NPC_RANGE2) { auto hacked_string = fmt::format( "Player tried to make use of a banker(money) but {} is non-existant or too far away ({} units).", banker ? banker->GetName() : "UNKNOWN NPC", distance); database.SetMQDetectionFlag(AccountName(), GetName(), hacked_string, zone->GetShortName()); return; } auto outapp = new EQApplicationPacket(OP_BankerChange, nullptr, sizeof(BankerChange_Struct)); BankerChange_Struct *bc = (BankerChange_Struct *)outapp->pBuffer; if (m_pp.platinum < 0) m_pp.platinum = 0; if (m_pp.gold < 0) m_pp.gold = 0; if (m_pp.silver < 0) m_pp.silver = 0; if (m_pp.copper < 0) m_pp.copper = 0; if (m_pp.platinum_bank < 0) m_pp.platinum_bank = 0; if (m_pp.gold_bank < 0) m_pp.gold_bank = 0; if (m_pp.silver_bank < 0) m_pp.silver_bank = 0; if (m_pp.copper_bank < 0) m_pp.copper_bank = 0; uint64 cp = static_cast<uint64>(m_pp.copper) + (static_cast<uint64>(m_pp.silver) * 10) + (static_cast<uint64>(m_pp.gold) * 100) + (static_cast<uint64>(m_pp.platinum) * 1000); m_pp.copper = cp % 10; cp /= 10; m_pp.silver = cp % 10; cp /= 10; m_pp.gold = cp % 10; cp /= 10; m_pp.platinum = cp; cp = static_cast<uint64>(m_pp.copper_bank) + (static_cast<uint64>(m_pp.silver_bank) * 10) + (static_cast<uint64>(m_pp.gold_bank) * 100) + (static_cast<uint64>(m_pp.platinum_bank) * 1000); m_pp.copper_bank = cp % 10; cp /= 10; m_pp.silver_bank = cp % 10; cp /= 10; m_pp.gold_bank = cp % 10; cp /= 10; m_pp.platinum_bank = cp; bc->copper = m_pp.copper; bc->silver = m_pp.silver; bc->gold = m_pp.gold; bc->platinum = m_pp.platinum; bc->copper_bank = m_pp.copper_bank; bc->silver_bank = m_pp.silver_bank; bc->gold_bank = m_pp.gold_bank; bc->platinum_bank = m_pp.platinum_bank; FastQueuePacket(&outapp); return; } void Client::Handle_OP_Barter(const EQApplicationPacket *app) { if (app->size < 4) { LogDebug("OP_Barter packet below minimum expected size. The packet was [{}] bytes", app->size); DumpPacket(app); return; } char* Buf = (char *)app->pBuffer; // The first 4 bytes of the packet determine the action. A lot of Barter packets require the // packet the client sent, sent back to it as an acknowledgement. // uint32 Action = VARSTRUCT_DECODE_TYPE(uint32, Buf); switch (Action) { case Barter_BuyerSearch: { BuyerItemSearch(app); break; } case Barter_SellerSearch: { BarterSearchRequest_Struct *bsr = (BarterSearchRequest_Struct*)app->pBuffer; SendBuyerResults(bsr->SearchString, bsr->SearchID); break; } case Barter_BuyerModeOn: { if (!Trader) { ToggleBuyerMode(true); } else { Buf = (char *)app->pBuffer; VARSTRUCT_ENCODE_TYPE(uint32, Buf, Barter_BuyerModeOff); Message(Chat::Red, "You cannot be a Trader and Buyer at the same time."); } QueuePacket(app); break; } case Barter_BuyerModeOff: { QueuePacket(app); ToggleBuyerMode(false); break; } case Barter_BuyerItemUpdate: { UpdateBuyLine(app); break; } case Barter_BuyerItemRemove: { BuyerRemoveItem_Struct* bris = (BuyerRemoveItem_Struct*)app->pBuffer; database.RemoveBuyLine(CharacterID(), bris->BuySlot); QueuePacket(app); break; } case Barter_SellItem: { SellToBuyer(app); break; } case Barter_BuyerInspectBegin: { ShowBuyLines(app); break; } case Barter_BuyerInspectEnd: { BuyerInspectRequest_Struct* bir = (BuyerInspectRequest_Struct*)app->pBuffer; Client *Buyer = entity_list.GetClientByID(bir->BuyerID); if (Buyer) Buyer->WithCustomer(0); break; } case Barter_BarterItemInspect: { BarterItemSearchLinkRequest_Struct* bislr = (BarterItemSearchLinkRequest_Struct*)app->pBuffer; const EQ::ItemData* item = database.GetItem(bislr->ItemID); if (!item) Message(Chat::Red, "Error: This item does not exist!"); else { EQ::ItemInstance* inst = database.CreateItem(item); if (inst) { SendItemPacket(0, inst, ItemPacketViewLink); safe_delete(inst); } } break; } case Barter_Welcome: { SendBazaarWelcome(); break; } case Barter_WelcomeMessageUpdate: { BuyerWelcomeMessageUpdate_Struct* bwmu = (BuyerWelcomeMessageUpdate_Struct*)app->pBuffer; SetBuyerWelcomeMessage(bwmu->WelcomeMessage); break; } case Barter_BuyerItemInspect: { BuyerItemSearchLinkRequest_Struct* bislr = (BuyerItemSearchLinkRequest_Struct*)app->pBuffer; const EQ::ItemData* item = database.GetItem(bislr->ItemID); if (!item) Message(Chat::Red, "Error: This item does not exist!"); else { EQ::ItemInstance* inst = database.CreateItem(item); if (inst) { SendItemPacket(0, inst, ItemPacketViewLink); safe_delete(inst); } } break; } case Barter_Unknown23: { // Sent by SoD client for no discernible reason. break; } default: Message(Chat::Red, "Unrecognised Barter action."); LogTrading("Unrecognised Barter Action [{}]", Action); } } void Client::Handle_OP_BazaarInspect(const EQApplicationPacket *app) { if (app->size != sizeof(BazaarInspect_Struct)) { LogError("Invalid size for BazaarInspect_Struct: Expected [{}], Got [{}]", sizeof(BazaarInspect_Struct), app->size); return; } BazaarInspect_Struct* bis = (BazaarInspect_Struct*)app->pBuffer; const EQ::ItemData* item = database.GetItem(bis->ItemID); if (!item) { Message(Chat::Red, "Error: This item does not exist!"); return; } EQ::ItemInstance* inst = database.CreateItem(item); if (inst) { SendItemPacket(0, inst, ItemPacketViewLink); safe_delete(inst); } return; } void Client::Handle_OP_BazaarSearch(const EQApplicationPacket *app) { if (app->size == sizeof(BazaarSearch_Struct)) { BazaarSearch_Struct* bss = (BazaarSearch_Struct*)app->pBuffer; this->SendBazaarResults(bss->TraderID, bss->Class_, bss->Race, bss->ItemStat, bss->Slot, bss->Type, bss->Name, bss->MinPrice * 1000, bss->MaxPrice * 1000); } else if (app->size == sizeof(BazaarWelcome_Struct)) { BazaarWelcome_Struct* bws = (BazaarWelcome_Struct*)app->pBuffer; if (bws->Beginning.Action == BazaarWelcome) SendBazaarWelcome(); } else if (app->size == sizeof(NewBazaarInspect_Struct)) { NewBazaarInspect_Struct *nbis = (NewBazaarInspect_Struct*)app->pBuffer; Client *c = entity_list.GetClientByName(nbis->Name); if (c) { EQ::ItemInstance* inst = c->FindTraderItemBySerialNumber(nbis->SerialNumber); if (inst) SendItemPacket(0, inst, ItemPacketViewLink); } return; } else { LogTrading("Malformed BazaarSearch_Struct packe, Action [{}]t received, ignoring"); LogError("Malformed BazaarSearch_Struct packet received, ignoring\n"); } return; } void Client::Handle_OP_Begging(const EQApplicationPacket *app) { if (!p_timers.Expired(&database, pTimerBeggingPickPocket, false)) { Message(Chat::Red, "Ability recovery time not yet met."); auto outapp = new EQApplicationPacket(OP_Begging, sizeof(BeggingResponse_Struct)); BeggingResponse_Struct *brs = (BeggingResponse_Struct*)outapp->pBuffer; brs->Result = 0; FastQueuePacket(&outapp); return; } if (!HasSkill(EQ::skills::SkillBegging) || !GetTarget()) return; if (GetTarget()->GetClass() == LDON_TREASURE) return; p_timers.Start(pTimerBeggingPickPocket, 8); auto outapp = new EQApplicationPacket(OP_Begging, sizeof(BeggingResponse_Struct)); BeggingResponse_Struct *brs = (BeggingResponse_Struct*)outapp->pBuffer; brs->Result = 0; // Default, Fail. if (GetTarget() == this) { FastQueuePacket(&outapp); return; } int RandomChance = zone->random.Int(0, 100); int ChanceToAttack = 0; if (GetLevel() > GetTarget()->GetLevel()) ChanceToAttack = zone->random.Int(0, 15); else ChanceToAttack = zone->random.Int(((this->GetTarget()->GetLevel() - this->GetLevel()) * 10) - 5, ((this->GetTarget()->GetLevel() - this->GetLevel()) * 10)); if (ChanceToAttack < 0) ChanceToAttack = -ChanceToAttack; if (RandomChance < ChanceToAttack) { GetTarget()->Attack(this); QueuePacket(outapp); safe_delete(outapp); return; } uint16 CurrentSkill = GetSkill(EQ::skills::SkillBegging); float ChanceToBeg = ((float)(CurrentSkill / 700.0f) + 0.15f) * 100; if (RandomChance < ChanceToBeg) { brs->Amount = zone->random.Int(1, 10); // This needs some work to determine how much money they can beg, based on skill level etc. if (CurrentSkill < 50) { brs->Result = 4; // Copper AddMoneyToPP(brs->Amount, false); } else { brs->Result = 3; // Silver AddMoneyToPP(brs->Amount * 10, false); } } QueuePacket(outapp); safe_delete(outapp); CheckIncreaseSkill(EQ::skills::SkillBegging, nullptr, -10); } void Client::Handle_OP_Bind_Wound(const EQApplicationPacket *app) { if (app->size != sizeof(BindWound_Struct)) { LogError("Size mismatch for Bind wound packet"); DumpPacket(app); } BindWound_Struct* bind_in = (BindWound_Struct*)app->pBuffer; Mob* bindmob = entity_list.GetMob(bind_in->to); if (!bindmob) { LogError("Bindwound on non-exsistant mob from [{}]", this->GetName()); } else { LogDebug("BindWound in: to:\'[{}]\' from=\'[{}]\'", bindmob->GetName(), GetName()); BindWound(bindmob, true); } return; } void Client::Handle_OP_BlockedBuffs(const EQApplicationPacket *app) { if (!RuleB(Spells, EnableBlockedBuffs)) return; if (app->size != sizeof(BlockedBuffs_Struct)) { LogDebug("Size mismatch in OP_BlockedBuffs expected [{}] got [{}]", sizeof(BlockedBuffs_Struct), app->size); DumpPacket(app); return; } std::set<uint32>::iterator Iterator; BlockedBuffs_Struct *bbs = (BlockedBuffs_Struct*)app->pBuffer; std::set<uint32> *BlockedBuffs = bbs->Pet ? &PetBlockedBuffs : &PlayerBlockedBuffs; if (bbs->Initialise == 1) { BlockedBuffs->clear(); for (unsigned int i = 0; i < BLOCKED_BUFF_COUNT; ++i) { if ((IsValidSpell(bbs->SpellID[i])) && IsBeneficialSpell(bbs->SpellID[i]) && !spells[bbs->SpellID[i]].no_block) { if (BlockedBuffs->find(bbs->SpellID[i]) == BlockedBuffs->end()) BlockedBuffs->insert(bbs->SpellID[i]); } } auto outapp = new EQApplicationPacket(OP_BlockedBuffs, sizeof(BlockedBuffs_Struct)); BlockedBuffs_Struct *obbs = (BlockedBuffs_Struct*)outapp->pBuffer; for (unsigned int i = 0; i < BLOCKED_BUFF_COUNT; ++i) obbs->SpellID[i] = -1; obbs->Pet = bbs->Pet; obbs->Initialise = 1; obbs->Flags = 0x54; obbs->Count = BlockedBuffs->size(); unsigned int Element = 0; Iterator = BlockedBuffs->begin(); while (Iterator != BlockedBuffs->end()) { obbs->SpellID[Element++] = (*Iterator); ++Iterator; } FastQueuePacket(&outapp); return; } if ((bbs->Initialise == 0) && (bbs->Count > 0)) { auto outapp = new EQApplicationPacket(OP_BlockedBuffs, sizeof(BlockedBuffs_Struct)); BlockedBuffs_Struct *obbs = (BlockedBuffs_Struct*)outapp->pBuffer; for (unsigned int i = 0; i < BLOCKED_BUFF_COUNT; ++i) obbs->SpellID[i] = -1; obbs->Pet = bbs->Pet; obbs->Initialise = 0; obbs->Flags = 0x54; for (unsigned int i = 0; i < BLOCKED_BUFF_COUNT; ++i) { if (!IsValidSpell(bbs->SpellID[i]) || !IsBeneficialSpell(bbs->SpellID[i]) || spells[bbs->SpellID[i]].no_block) continue; if ((BlockedBuffs->size() < BLOCKED_BUFF_COUNT) && (BlockedBuffs->find(bbs->SpellID[i]) == BlockedBuffs->end())) BlockedBuffs->insert(bbs->SpellID[i]); } obbs->Count = BlockedBuffs->size(); Iterator = BlockedBuffs->begin(); unsigned int Element = 0; while (Iterator != BlockedBuffs->end()) { obbs->SpellID[Element++] = (*Iterator); ++Iterator; } FastQueuePacket(&outapp); } } void Client::Handle_OP_BoardBoat(const EQApplicationPacket *app) { // this sends unclean mob name, so capped at 64 // a_boat006 if (app->size <= 5 || app->size > 64) { LogError("Size mismatch in OP_BoardBoad. Expected greater than 5 less than 64, got [{}]", app->size); DumpPacket(app); return; } char boatname[64]; memcpy(boatname, app->pBuffer, app->size); boatname[63] = '\0'; Mob* boat = entity_list.GetMob(boatname); if (!boat || !boat->IsControllableBoat()) { return; } controlling_boat_id = boat->GetID(); // set the client's BoatID to show that it's on this boat Message(0, "Board boat: %s", boatname); return; } void Client::Handle_OP_Buff(const EQApplicationPacket *app) { if (app->size != sizeof(SpellBuffPacket_Struct)) { LogError("Size mismatch in OP_Buff. expected [{}] got [{}]", sizeof(SpellBuffPacket_Struct), app->size); DumpPacket(app); return; } SpellBuffPacket_Struct* sbf = (SpellBuffPacket_Struct*)app->pBuffer; uint32 spid = sbf->buff.spellid; LogSpells("Client requested that buff with spell id [{}] be canceled", spid); //something about IsDetrimentalSpell() crashes this portion of code.. //tbh we shouldn't use it anyway since this is a simple red vs blue buff check and //isdetrimentalspell() is much more complex if (spid == 0xFFFF || (IsValidSpell(spid) && (spells[spid].goodEffect == 0))) QueuePacket(app); else BuffFadeBySpellID(spid); return; } void Client::Handle_OP_BuffRemoveRequest(const EQApplicationPacket *app) { // In SoD, this is used for clicking off Pet Buffs only. In Underfoot, it is used both for Client and Pets // The payload contains buffslot and EntityID only, so we must check if the EntityID is ours or our pets. // VERIFY_PACKET_LENGTH(OP_BuffRemoveRequest, app, BuffRemoveRequest_Struct); BuffRemoveRequest_Struct *brrs = (BuffRemoveRequest_Struct*)app->pBuffer; Mob *m = nullptr; if (brrs->EntityID == GetID()) { m = this; } else if (brrs->EntityID == GetPetID()) { m = GetPet(); } #ifdef BOTS else { Mob* bot_test = entity_list.GetMob(brrs->EntityID); if (bot_test && bot_test->IsBot() && bot_test->GetOwner() == this) m = bot_test; } #endif if (!m) return; if (brrs->SlotID > (uint32)m->GetMaxTotalSlots()) return; uint16 SpellID = m->GetSpellIDFromSlot(brrs->SlotID); if (SpellID && IsBeneficialSpell(SpellID) && !spells[SpellID].no_remove) m->BuffFadeBySlot(brrs->SlotID, true); } void Client::Handle_OP_Bug(const EQApplicationPacket *app) { if (!RuleB(Bugs, ReportingSystemActive)) { Message(0, "Bug reporting is disabled on this server."); return; } if (app->size != sizeof(BugReport_Struct)) { printf("Wrong size of BugReport_Struct got %d expected %zu!\n", app->size, sizeof(BugReport_Struct)); } else { BugReport_Struct* bug_report = (BugReport_Struct*)app->pBuffer; if (RuleB(Bugs, UseOldReportingMethod)) database.RegisterBug(bug_report); else database.RegisterBug(this, bug_report); } return; } void Client::Handle_OP_Camp(const EQApplicationPacket *app) { #ifdef BOTS // This block is necessary to clean up any bot objects owned by a Client Bot::BotOrderCampAll(this); // Evidently, this is bad under certain conditions and causes crashes... // Group and Raid code really needs to be overhauled to account for non-client types (mercs and bots) //auto group = GetGroup(); //if (group && group->GroupCount() < 2) // group->DisbandGroup(); #endif if (IsLFP()) worldserver.StopLFP(CharacterID()); if (GetGM()) { OnDisconnect(true); return; } camp_timer.Start(29000, true); return; } void Client::Handle_OP_CancelTask(const EQApplicationPacket *app) { if (app->size != sizeof(CancelTask_Struct)) { LogDebug("Size mismatch in OP_CancelTask expected [{}] got [{}]", sizeof(CancelTask_Struct), app->size); DumpPacket(app); return; } CancelTask_Struct *cts = (CancelTask_Struct*)app->pBuffer; if (RuleB(TaskSystem, EnableTaskSystem) && task_state) task_state->CancelTask(this, cts->SequenceNumber, static_cast<TaskType>(cts->type)); } void Client::Handle_OP_CancelTrade(const EQApplicationPacket *app) { if (app->size != sizeof(CancelTrade_Struct)) { LogError("Wrong size: OP_CancelTrade, size=[{}], expected [{}]", app->size, sizeof(CancelTrade_Struct)); return; } Mob* with = trade->With(); if (with && with->IsClient()) { CancelTrade_Struct* msg = (CancelTrade_Struct*)app->pBuffer; // Forward cancel packet to other client msg->fromid = with->GetID(); //msg->action = 1; with->CastToClient()->QueuePacket(app); // Put trade items/cash back into inventory FinishTrade(this); trade->Reset(); } else if (with) { CancelTrade_Struct* msg = (CancelTrade_Struct*)app->pBuffer; msg->fromid = with->GetID(); QueuePacket(app); FinishTrade(this); trade->Reset(); } EQApplicationPacket end_trade1(OP_FinishWindow, 0); QueuePacket(&end_trade1); EQApplicationPacket end_trade2(OP_FinishWindow2, 0); QueuePacket(&end_trade2); return; } void Client::Handle_OP_CastSpell(const EQApplicationPacket *app) { using EQ::spells::CastingSlot; if (app->size != sizeof(CastSpell_Struct)) { std::cout << "Wrong size: OP_CastSpell, size=" << app->size << ", expected " << sizeof(CastSpell_Struct) << std::endl; return; } if (IsAIControlled()) { this->MessageString(Chat::Red, NOT_IN_CONTROL); //Message(Chat::Red, "You cant cast right now, you arent in control of yourself!"); return; } // Hack for broken RoF2 which allows casting after a zoned IVU/IVA if (invisible_undead || invisible_animals) { BuffFadeByEffect(SE_InvisVsAnimals); BuffFadeByEffect(SE_InvisVsUndead); BuffFadeByEffect(SE_InvisVsUndead2); BuffFadeByEffect(SE_Invisibility); // Included per JJ for completeness - client handles this one atm } CastSpell_Struct* castspell = (CastSpell_Struct*)app->pBuffer; m_TargetRing = glm::vec3(castspell->x_pos, castspell->y_pos, castspell->z_pos); LogSpells("OP CastSpell: slot [{}] spell [{}] target [{}] inv [{}]", castspell->slot, castspell->spell_id, castspell->target_id, (unsigned long)castspell->inventoryslot); CastingSlot slot = static_cast<CastingSlot>(castspell->slot); /* Memorized Spell */ if (m_pp.mem_spells[castspell->slot] && m_pp.mem_spells[castspell->slot] == castspell->spell_id) { uint16 spell_to_cast = 0; if (castspell->slot < EQ::spells::SPELL_GEM_COUNT) { spell_to_cast = m_pp.mem_spells[castspell->slot]; if (spell_to_cast != castspell->spell_id) { InterruptSpell(castspell->spell_id); //CHEATER!!! return; } } else if (castspell->slot >= EQ::spells::SPELL_GEM_COUNT) { InterruptSpell(); return; } CastSpell(spell_to_cast, castspell->target_id, slot); } /* Spell Slot or Potion Belt Slot */ else if (slot == CastingSlot::Item || slot == CastingSlot::PotionBelt) // ITEM or POTION cast { if (m_inv.SupportsClickCasting(castspell->inventoryslot) || slot == CastingSlot::PotionBelt) // sanity check { // packet field types will be reviewed as packet transistions occur const EQ::ItemInstance* inst = m_inv[castspell->inventoryslot]; //slot values are int16, need to check packet on this field //bool cancast = true; if (inst && inst->IsClassCommon()) { const EQ::ItemData* item = inst->GetItem(); if (item->Click.Effect != (uint32)castspell->spell_id) { database.SetMQDetectionFlag(account_name, name, "OP_CastSpell with item, tried to cast a different spell.", zone->GetShortName()); InterruptSpell(castspell->spell_id); //CHEATER!! return; } if ((item->Click.Type == EQ::item::ItemEffectClick) || (item->Click.Type == EQ::item::ItemEffectExpendable) || (item->Click.Type == EQ::item::ItemEffectEquipClick) || (item->Click.Type == EQ::item::ItemEffectClick2)) { if (item->Click.Level2 > 0) { if (GetLevel() >= item->Click.Level2) { EQ::ItemInstance* p_inst = (EQ::ItemInstance*)inst; int i = parse->EventItem(EVENT_ITEM_CLICK_CAST, this, p_inst, nullptr, "", castspell->inventoryslot); if (i == 0) { CastSpell(item->Click.Effect, castspell->target_id, slot, item->CastTime, 0, 0, castspell->inventoryslot); } else { InterruptSpell(castspell->spell_id); return; } } else { database.SetMQDetectionFlag(account_name, name, "OP_CastSpell with item, did not meet req level.", zone->GetShortName()); Message(0, "Error: level not high enough.", castspell->inventoryslot); InterruptSpell(castspell->spell_id); } } else { EQ::ItemInstance* p_inst = (EQ::ItemInstance*)inst; int i = parse->EventItem(EVENT_ITEM_CLICK_CAST, this, p_inst, nullptr, "", castspell->inventoryslot); if (i == 0) { CastSpell(item->Click.Effect, castspell->target_id, slot, item->CastTime, 0, 0, castspell->inventoryslot); } else { InterruptSpell(castspell->spell_id); return; } } } else { Message(0, "Error: unknown item->Click.Type (0x%02x)", item->Click.Type); } } else { Message(0, "Error: item not found in inventory slot #%i", castspell->inventoryslot); InterruptSpell(castspell->spell_id); } } else { Message(0, "Error: castspell->inventoryslot >= %i (0x%04x)", EQ::invslot::slotCursor, castspell->inventoryslot); InterruptSpell(castspell->spell_id); } } /* Discipline -- older clients use the same slot as items, but we translate to it's own */ else if (slot == CastingSlot::Discipline) { if (!UseDiscipline(castspell->spell_id, castspell->target_id)) { LogSpells("Unknown ability being used by [{}], spell being cast is: [{}]\n", GetName(), castspell->spell_id); InterruptSpell(castspell->spell_id); return; } } /* ABILITY cast (LoH and Harm Touch) */ else if (slot == CastingSlot::Ability) { uint16 spell_to_cast = 0; if (castspell->spell_id == SPELL_LAY_ON_HANDS && GetClass() == PALADIN) { if (!p_timers.Expired(&database, pTimerLayHands)) { Message(Chat::Red, "Ability recovery time not yet met."); InterruptSpell(castspell->spell_id); return; } spell_to_cast = SPELL_LAY_ON_HANDS; p_timers.Start(pTimerLayHands, LayOnHandsReuseTime); } else if ((castspell->spell_id == SPELL_HARM_TOUCH || castspell->spell_id == SPELL_HARM_TOUCH2) && GetClass() == SHADOWKNIGHT) { if (!p_timers.Expired(&database, pTimerHarmTouch)) { Message(Chat::Red, "Ability recovery time not yet met."); InterruptSpell(castspell->spell_id); return; } // determine which version of HT we are casting based on level if (GetLevel() < 40) spell_to_cast = SPELL_HARM_TOUCH; else spell_to_cast = SPELL_HARM_TOUCH2; p_timers.Start(pTimerHarmTouch, HarmTouchReuseTime); } if (spell_to_cast > 0) // if we've matched LoH or HT, cast now CastSpell(spell_to_cast, castspell->target_id, slot); } return; } void Client::Handle_OP_ChannelMessage(const EQApplicationPacket *app) { ChannelMessage_Struct* cm = (ChannelMessage_Struct*)app->pBuffer; if (app->size < sizeof(ChannelMessage_Struct)) { std::cout << "Wrong size " << app->size << ", should be " << sizeof(ChannelMessage_Struct) << "+ on 0x" << std::hex << std::setfill('0') << std::setw(4) << app->GetOpcode() << std::dec << std::endl; return; } if (IsAIControlled() && !GetGM()) { Message(Chat::Red, "You try to speak but cant move your mouth!"); return; } uint8 skill_in_language = 100; if (cm->language < MAX_PP_LANGUAGE) { skill_in_language = m_pp.languages[cm->language]; } ChannelMessageReceived(cm->chan_num, cm->language, skill_in_language, cm->message, cm->targetname); return; } void Client::Handle_OP_ClearBlockedBuffs(const EQApplicationPacket *app) { if (!RuleB(Spells, EnableBlockedBuffs)) return; if (app->size != 1) { LogDebug("Size mismatch in OP_ClearBlockedBuffs expected 1 got [{}]", app->size); DumpPacket(app); return; } bool Pet = app->pBuffer[0]; if (Pet) PetBlockedBuffs.clear(); else PlayerBlockedBuffs.clear(); QueuePacket(app); } void Client::Handle_OP_ClearNPCMarks(const EQApplicationPacket *app) { if (app->size != 0) { LogDebug("Size mismatch in OP_ClearNPCMarks expected 0 got [{}]", app->size); DumpPacket(app); return; } Group *g = GetGroup(); if (g) g->ClearAllNPCMarks(); } void Client::Handle_OP_ClearSurname(const EQApplicationPacket *app) { ChangeLastName(""); } void Client::Handle_OP_ClickDoor(const EQApplicationPacket *app) { if (app->size != sizeof(ClickDoor_Struct)) { LogError("Wrong size: OP_ClickDoor, size=[{}], expected [{}]", app->size, sizeof(ClickDoor_Struct)); return; } ClickDoor_Struct* cd = (ClickDoor_Struct*)app->pBuffer; Doors* currentdoor = entity_list.FindDoor(cd->doorid); if (!currentdoor) { Message(0, "Unable to find door, please notify a GM (DoorID: %i).", cd->doorid); return; } // set door selected if (IsDevToolsEnabled()) { SetDoorToolEntityId(currentdoor->GetEntityID()); DoorManipulation::CommandHeader(this); Message( Chat::White, fmt::format( "Door ({}) [{}]", currentdoor->GetEntityID(), EQ::SayLinkEngine::GenerateQuestSaylink("#door edit", false, "#door edit") ).c_str() ); } char buf[20]; snprintf(buf, 19, "%u", cd->doorid); buf[19] = '\0'; std::vector<EQ::Any> args; args.push_back(currentdoor); parse->EventPlayer(EVENT_CLICK_DOOR, this, buf, 0, &args); currentdoor->HandleClick(this, 0); return; } void Client::Handle_OP_ClickObject(const EQApplicationPacket *app) { if (app->size != sizeof(ClickObject_Struct)) { LogError("Invalid size on ClickObject_Struct: Expected [{}], Got [{}]", sizeof(ClickObject_Struct), app->size); return; } ClickObject_Struct* click_object = (ClickObject_Struct*)app->pBuffer; Entity* entity = entity_list.GetID(click_object->drop_id); if (entity && entity->IsObject()) { Object* object = entity->CastToObject(); object->HandleClick(this, click_object); std::vector<EQ::Any> args; args.push_back(object); char buf[10]; snprintf(buf, 9, "%u", click_object->drop_id); buf[9] = '\0'; parse->EventPlayer(EVENT_CLICK_OBJECT, this, buf, GetID(), &args); } // Observed in RoF after OP_ClickObjectAction: //EQApplicationPacket end_trade2(OP_FinishWindow2, 0); //QueuePacket(&end_trade2); return; } void Client::Handle_OP_ClickObjectAction(const EQApplicationPacket *app) { if (app->size == 0) { // RoF sends this packet 0 sized when switching from auto-combine to experiment windows. // Not completely sure if 0 sized is for this or for closing objects as commented out below EQApplicationPacket end_trade1(OP_FinishWindow, 0); QueuePacket(&end_trade1); EQApplicationPacket end_trade2(OP_FinishWindow2, 0); QueuePacket(&end_trade2); // RoF sends a 0 sized packet for closing objects if (GetTradeskillObject() && ClientVersion() >= EQ::versions::ClientVersion::RoF) GetTradeskillObject()->CastToObject()->Close(); return; } else { if (app->size != sizeof(ClickObjectAction_Struct)) { LogError("Invalid size on OP_ClickObjectAction: Expected [{}], Got [{}]", sizeof(ClickObjectAction_Struct), app->size); return; } ClickObjectAction_Struct* oos = (ClickObjectAction_Struct*)app->pBuffer; Entity* entity = entity_list.GetEntityObject(oos->drop_id); if (entity && entity->IsObject()) { Object* object = entity->CastToObject(); if (oos->open == 0) { object->Close(); } else { LogError("Unsupported action [{}] in OP_ClickObjectAction", oos->open); } } else { LogError("Invalid object [{}] in OP_ClickObjectAction", oos->drop_id); } } SetTradeskillObject(nullptr); EQApplicationPacket end_trade1(OP_FinishWindow, 0); QueuePacket(&end_trade1); EQApplicationPacket end_trade2(OP_FinishWindow2, 0); QueuePacket(&end_trade2); return; } void Client::Handle_OP_ClientError(const EQApplicationPacket *app) { ClientError_Struct* error = (ClientError_Struct*)app->pBuffer; LogError("Client error: [{}]", error->character_name); LogError("Error message:[{}]", error->message); return; } void Client::Handle_OP_ClientTimeStamp(const EQApplicationPacket *app) { return; } void Client::Handle_OP_ClientUpdate(const EQApplicationPacket *app) { if (IsAIControlled()) return; if (dead) return; /* Invalid size check */ if (app->size != sizeof(PlayerPositionUpdateClient_Struct) && app->size != (sizeof(PlayerPositionUpdateClient_Struct) + 1) ) { LogError("OP size error: OP_ClientUpdate expected:[{}] got:[{}]", sizeof(PlayerPositionUpdateClient_Struct), app->size); return; } PlayerPositionUpdateClient_Struct *ppu = (PlayerPositionUpdateClient_Struct *) app->pBuffer; /* Non PC handling like boats and eye of zomm */ if (ppu->spawn_id && ppu->spawn_id != GetID()) { Mob *cmob = entity_list.GetMob(ppu->spawn_id); if (!cmob) { return; } if (cmob->IsControllableBoat()) { // Controllable boats auto boat_delta = glm::vec4(ppu->delta_x, ppu->delta_y, ppu->delta_z, EQ10toFloat(ppu->delta_heading)); cmob->SetDelta(boat_delta); auto outapp = new EQApplicationPacket(OP_ClientUpdate, sizeof(PlayerPositionUpdateServer_Struct)); PlayerPositionUpdateServer_Struct *ppus = (PlayerPositionUpdateServer_Struct *) outapp->pBuffer; cmob->MakeSpawnUpdate(ppus); entity_list.QueueCloseClients(cmob, outapp, true, 300, this, false); safe_delete(outapp); /* Update the boat's position on the server, without sending an update */ cmob->GMMove(ppu->x_pos, ppu->y_pos, ppu->z_pos, EQ12toFloat(ppu->heading), false); return; } else { // Eye of Zomm needs code here to track position of the eye on server // so that other clients see it. I could add a check here for eye of zomm // race, to limit this code, but this should handle any client controlled // mob that gets updates from OP_ClientUpdate if (!cmob->IsControllableBoat() && ppu->spawn_id == controlled_mob_id) { cmob->SetPosition(ppu->x_pos, ppu->y_pos, ppu->z_pos); cmob->SetHeading(EQ12toFloat(ppu->heading)); mMovementManager->SendCommandToClients(cmob, 0.0, 0.0, 0.0, 0.0, 0, ClientRangeAny, nullptr, this); cmob->CastToNPC()->SaveGuardSpot(glm::vec4(ppu->x_pos, ppu->y_pos, ppu->z_pos, EQ12toFloat(ppu->heading))); } } return; } // At this point, all that's left is a client update. // Pure boat updates, and client contolled mob updates are complete. // This can still be tricky. If ppu->vehicle_id is set, then the client // position is actually an offset from the boat he is inside. bool on_boat = (ppu->vehicle_id != 0); // From this point forward, we need to use a new set of variables for client // position. If the client is in a boat, we need to add the boat pos and // the client offset together. float cx = ppu->x_pos; float cy = ppu->y_pos; float cz = ppu->z_pos; float new_heading = EQ12toFloat(ppu->heading); if (on_boat) { Mob *boat = entity_list.GetMob(ppu->vehicle_id); if (boat == 0) { LogError("Can't find boat for client position offset."); } else { if (boat->turning) return; // Calculate angle from boat heading to EQ heading double theta = std::fmod(((boat->GetHeading() * 360.0) / 512.0),360.0); double thetar = (theta * M_PI) / 180.0; // Boat cx is inverted (positive to left) // Boat cy is normal (positive toward heading) double cosine = std::cos(thetar); double sine = std::sin(thetar); double normalizedx, normalizedy; normalizedx = cx * cosine - -cy * sine; normalizedy = -cx * sine + cy * cosine; cx = boat->GetX() + normalizedx; cy = boat->GetY() + normalizedy; cz += boat->GetZ(); new_heading += boat->GetHeading(); } } cheat_manager.MovementCheck(glm::vec3(cx, cy, cz)); if (IsDraggingCorpse()) DragCorpses(); /* Check to see if PPU should trigger an update to the rewind position. */ float rewind_x_diff = 0; float rewind_y_diff = 0; rewind_x_diff = cx - m_RewindLocation.x; rewind_x_diff *= rewind_x_diff; rewind_y_diff = cy - m_RewindLocation.y; rewind_y_diff *= rewind_y_diff; /* We only need to store updated values if the player has moved. If the player has moved more than units for x or y, then we'll store his pre-PPU x and y for /rewind, in case he gets stuck. */ if ((rewind_x_diff > 750) || (rewind_y_diff > 750)) m_RewindLocation = glm::vec3(m_Position); /* If the PPU was a large jump, such as a cross zone gate or Call of Hero, just update rewind coordinates to the new ppu coordinates. This will prevent exploitation. */ if ((rewind_x_diff > 5000) || (rewind_y_diff > 5000)) m_RewindLocation = glm::vec3(cx, cy, cz); if (proximity_timer.Check()) { entity_list.ProcessMove(this, glm::vec3(cx, cy, cz)); if (RuleB(TaskSystem, EnableTaskSystem) && RuleB(TaskSystem, EnableTaskProximity)) ProcessTaskProximities(cx, cy, cz); m_Proximity = glm::vec3(cx, cy, cz); } /* Update internal state */ m_Delta = glm::vec4(ppu->delta_x, ppu->delta_y, ppu->delta_z, EQ10toFloat(ppu->delta_heading)); if (IsTracking() && ((m_Position.x != cx) || (m_Position.y != cy))) { if (zone->random.Real(0, 100) < 70)//should be good CheckIncreaseSkill(EQ::skills::SkillTracking, nullptr, -20); } /* Break Hide if moving without sneaking and set rewind timer if moved */ if (cy != m_Position.y || cx != m_Position.x) { if ((hidden || improved_hidden) && !sneaking) { hidden = false; improved_hidden = false; if (!invisible) { auto outapp = new EQApplicationPacket(OP_SpawnAppearance, sizeof(SpawnAppearance_Struct)); SpawnAppearance_Struct *sa_out = (SpawnAppearance_Struct *) outapp->pBuffer; sa_out->spawn_id = GetID(); sa_out->type = 0x03; sa_out->parameter = 0; entity_list.QueueClients(this, outapp, true); safe_delete(outapp); } } rewind_timer.Start(30000, true); } is_client_moving = !(cy == m_Position.y && cx == m_Position.x); /** * Client aggro scanning */ const uint16 client_scan_npc_aggro_timer_idle = RuleI(Aggro, ClientAggroCheckIdleInterval); const uint16 client_scan_npc_aggro_timer_moving = RuleI(Aggro, ClientAggroCheckMovingInterval); LogAggroDetail( "ClientUpdate [{}] {}moving, scan timer [{}]", GetCleanName(), is_client_moving ? "" : "NOT ", client_scan_npc_aggro_timer.GetRemainingTime() ); if (is_client_moving) { if (client_scan_npc_aggro_timer.GetRemainingTime() > client_scan_npc_aggro_timer_moving) { LogAggroDetail("Client [{}] Restarting with moving timer", GetCleanName()); client_scan_npc_aggro_timer.Disable(); client_scan_npc_aggro_timer.Start(client_scan_npc_aggro_timer_moving); client_scan_npc_aggro_timer.Trigger(); } } else if (client_scan_npc_aggro_timer.GetDuration() == client_scan_npc_aggro_timer_moving) { LogAggroDetail("Client [{}] Restarting with idle timer", GetCleanName()); client_scan_npc_aggro_timer.Disable(); client_scan_npc_aggro_timer.Start(client_scan_npc_aggro_timer_idle); } /** * Client mob close list cache scan timer */ const uint16 client_mob_close_scan_timer_moving = 6000; const uint16 client_mob_close_scan_timer_idle = 60000; LogAIScanCloseDetail( "Client [{}] {}moving, scan timer [{}]", GetCleanName(), is_client_moving ? "" : "NOT ", mob_close_scan_timer.GetRemainingTime() ); if (is_client_moving) { if (mob_close_scan_timer.GetRemainingTime() > client_mob_close_scan_timer_moving) { LogAIScanCloseDetail("Client [{}] Restarting with moving timer", GetCleanName()); mob_close_scan_timer.Disable(); mob_close_scan_timer.Start(client_mob_close_scan_timer_moving); mob_close_scan_timer.Trigger(); } } else if (mob_close_scan_timer.GetDuration() == client_mob_close_scan_timer_moving) { LogAIScanCloseDetail("Client [{}] Restarting with idle timer", GetCleanName()); mob_close_scan_timer.Disable(); mob_close_scan_timer.Start(client_mob_close_scan_timer_idle); } /** * On a normal basis we limit mob movement updates based on distance * This ensures we send a periodic full zone update to a client that has started moving after 5 or so minutes * * For very large zones we will also force a full update based on distance * * We ignore a small distance around us so that we don't interrupt already pathing deltas as those npcs will appear * to full stop when they are actually still pathing */ float distance_moved = DistanceNoZ(GetLastPositionBeforeBulkUpdate(), GetPosition()); bool moved_far_enough_before_bulk_update = distance_moved >= zone->GetNpcPositionUpdateDistance(); bool is_ready_to_update = ( client_zone_wide_full_position_update_timer.Check() || moved_far_enough_before_bulk_update ); if (is_client_moving && is_ready_to_update) { LogDebug("[[{}]] Client Zone Wide Position Update NPCs", GetCleanName()); auto &mob_movement_manager = MobMovementManager::Get(); auto &mob_list = entity_list.GetMobList(); for (auto &it : mob_list) { Mob *entity = it.second; if (!entity->IsNPC()) { continue; } int animation_speed = 0; if (entity->IsMoving()) { if (entity->IsRunning()) { animation_speed = (entity->IsFeared() ? entity->GetFearSpeed() : entity->GetRunspeed()); } else { animation_speed = entity->GetWalkspeed(); } } mob_movement_manager.SendCommandToClients(entity, 0.0, 0.0, 0.0, 0.0, animation_speed, ClientRangeAny, this); } SetLastPositionBeforeBulkUpdate(GetPosition()); } int32 new_animation = ppu->animation; /* Update internal server position from what the client has sent */ m_Position.x = cx; m_Position.y = cy; m_Position.z = cz; /* Visual Debugging */ if (RuleB(Character, OPClientUpdateVisualDebug)) { LogDebug("ClientUpdate: ppu x: [{}] y: [{}] z: [{}] h: [{}]", cx, cy, cz, new_heading); this->SendAppearanceEffect(78, 0, 0, 0, 0); this->SendAppearanceEffect(41, 0, 0, 0, 0); } /* Only feed real time updates when client is moving */ if (is_client_moving || new_heading != m_Position.w || new_animation != animation) { animation = ppu->animation; m_Position.w = new_heading; /* Broadcast update to other clients */ auto outapp = new EQApplicationPacket(OP_ClientUpdate, sizeof(PlayerPositionUpdateServer_Struct)); PlayerPositionUpdateServer_Struct *position_update = (PlayerPositionUpdateServer_Struct *) outapp->pBuffer; MakeSpawnUpdate(position_update); if (gm_hide_me) { entity_list.QueueClientsStatus(this, outapp, true, Admin(), 255); } else { entity_list.QueueCloseClients(this, outapp, true, RuleI(Range, ClientPositionUpdates), nullptr, true); } /* Always send position updates to group - send when beyond normal ClientPositionUpdate range */ Group *group = this->GetGroup(); Raid *raid = this->GetRaid(); if (raid) { raid->QueueClients(this, outapp, true, true, (RuleI(Range, ClientPositionUpdates) * -1)); } else if (group) { group->QueueClients(this, outapp, true, true, (RuleI(Range, ClientPositionUpdates) * -1)); } safe_delete(outapp); } if (zone->watermap) { if (zone->watermap->InLiquid(glm::vec3(m_Position))) { CheckIncreaseSkill(EQ::skills::SkillSwimming, nullptr, -17); // Dismount horses when entering water if (GetHorseId() && RuleB(Character, DismountWater)) { SetHorseId(0); BuffFadeByEffect(SE_SummonHorse); } } CheckRegionTypeChanges(); } CheckVirtualZoneLines(); } void Client::Handle_OP_CombatAbility(const EQApplicationPacket *app) { if (app->size != sizeof(CombatAbility_Struct)) { std::cout << "Wrong size on OP_CombatAbility. Got: " << app->size << ", Expected: " << sizeof(CombatAbility_Struct) << std::endl; return; } auto ca_atk = (CombatAbility_Struct *)app->pBuffer; OPCombatAbility(ca_atk); return; } void Client::Handle_OP_ConfirmDelete(const EQApplicationPacket* app) { return; } void Client::Handle_OP_Consent(const EQApplicationPacket *app) { if (app->size<64) { Consent_Struct* c = (Consent_Struct*)app->pBuffer; ConsentCorpses(c->name, false); } } void Client::Handle_OP_ConsentDeny(const EQApplicationPacket *app) { if (app->size<64) { Consent_Struct* c = (Consent_Struct*)app->pBuffer; ConsentCorpses(c->name, true); } } void Client::Handle_OP_Consider(const EQApplicationPacket *app) { if (app->size != sizeof(Consider_Struct)) { LogDebug("Size mismatch in Consider expected [{}] got [{}]", sizeof(Consider_Struct), app->size); return; } Consider_Struct* conin = (Consider_Struct*)app->pBuffer; Mob* tmob = entity_list.GetMob(conin->targetid); if (tmob == 0) return; if (parse->EventPlayer(EVENT_CONSIDER, this, fmt::format("{}", conin->targetid), 0) == 1) { return; } if (tmob->GetClass() == LDON_TREASURE) { Message(Chat::Yellow, "%s", tmob->GetCleanName()); return; } auto outapp = new EQApplicationPacket(OP_Consider, sizeof(Consider_Struct)); Consider_Struct* con = (Consider_Struct*)outapp->pBuffer; con->playerid = GetID(); con->targetid = conin->targetid; if (tmob->IsNPC()) con->faction = GetFactionLevel(character_id, tmob->GetNPCTypeID(), GetFactionRace(), class_, deity, (tmob->IsNPC()) ? tmob->CastToNPC()->GetPrimaryFaction() : 0, tmob); // Dec. 20, 2001; TODO: Send the players proper deity else con->faction = 1; con->level = GetLevelCon(tmob->GetLevel()); if (ClientVersion() <= EQ::versions::ClientVersion::Titanium) { if (con->level == CON_GRAY) { con->level = CON_GREEN; } if (con->level == CON_WHITE) { con->level = CON_WHITE_TITANIUM; } } if (zone->IsPVPZone()) { if (!tmob->IsNPC()) con->pvpcon = tmob->CastToClient()->GetPVP(); } // If we're feigned show NPC as indifferent if (tmob->IsNPC()) { if (GetFeigned()) con->faction = FACTION_INDIFFERENT; } if (!(con->faction == FACTION_SCOWLS)) { if (tmob->IsNPC()) { if (tmob->CastToNPC()->IsOnHatelist(this)) con->faction = FACTION_THREATENLY; } } if (con->faction == FACTION_APPREHENSIVE) { con->faction = FACTION_SCOWLS; } else if (con->faction == FACTION_DUBIOUS) { con->faction = FACTION_THREATENLY; } else if (con->faction == FACTION_SCOWLS) { con->faction = FACTION_APPREHENSIVE; } else if (con->faction == FACTION_THREATENLY) { con->faction = FACTION_DUBIOUS; } mod_consider(tmob, con); QueuePacket(outapp); // only wanted to check raid target once // and need con to still be around so, do it here! if (tmob->IsRaidTarget()) { uint32 color = 0; switch (con->level) { case CON_GREEN: color = 2; break; case CON_LIGHTBLUE: color = 10; break; case CON_BLUE: color = 4; break; case CON_WHITE_TITANIUM: case CON_WHITE: color = 10; break; case CON_YELLOW: color = 15; break; case CON_RED: color = 13; break; case CON_GRAY: color = 6; break; } if (ClientVersion() <= EQ::versions::ClientVersion::Titanium) { if (color == 6) { color = 2; } } SendColoredText(color, std::string("This creature would take an army to defeat!")); } // this could be done better, but this is only called when you con so w/e // Shroud of Stealth has a special message if (improved_hidden && (!tmob->see_improved_hide && (tmob->see_invis || tmob->see_hide))) MessageString(Chat::NPCQuestSay, SOS_KEEPS_HIDDEN); // we are trying to hide but they can see us else if ((invisible || invisible_undead || hidden || invisible_animals) && !IsInvisible(tmob)) MessageString(Chat::NPCQuestSay, SUSPECT_SEES_YOU); safe_delete(outapp); return; } void Client::Handle_OP_ConsiderCorpse(const EQApplicationPacket *app) { if (app->size != sizeof(Consider_Struct)) { LogDebug("Size mismatch in Consider corpse expected [{}] got [{}]", sizeof(Consider_Struct), app->size); return; } Consider_Struct* conin = (Consider_Struct*)app->pBuffer; Corpse* tcorpse = entity_list.GetCorpseByID(conin->targetid); if (tcorpse && tcorpse->IsNPCCorpse()) { if (parse->EventPlayer(EVENT_CONSIDER_CORPSE, this, fmt::format("{}", conin->targetid), 0) == 1) { return; } uint32 min; uint32 sec; uint32 ttime; if ((ttime = tcorpse->GetDecayTime()) != 0) { sec = (ttime / 1000) % 60; // Total seconds min = (ttime / 60000) % 60; // Total seconds / 60 drop .00 char val1[20] = { 0 }; char val2[20] = { 0 }; MessageString(Chat::NPCQuestSay, CORPSE_DECAY1, ConvertArray(min, val1), ConvertArray(sec, val2)); } else { MessageString(Chat::NPCQuestSay, CORPSE_DECAY_NOW); } } else if (tcorpse && tcorpse->IsPlayerCorpse()) { if (parse->EventPlayer(EVENT_CONSIDER_CORPSE, this, fmt::format("{}", conin->targetid), 0) == 1) { return; } uint32 day, hour, min, sec, ttime; if ((ttime = tcorpse->GetDecayTime()) != 0) { sec = (ttime / 1000) % 60; // Total seconds min = (ttime / 60000) % 60; // Total seconds hour = (ttime / 3600000) % 24; // Total hours day = ttime / 86400000; // Total Days if (day) Message(0, "This corpse will decay in %i days, %i hours, %i minutes and %i seconds.", day, hour, min, sec); else if (hour) Message(0, "This corpse will decay in %i hours, %i minutes and %i seconds.", hour, min, sec); else Message(0, "This corpse will decay in %i minutes and %i seconds.", min, sec); Message(0, "This corpse %s be resurrected.", tcorpse->IsRezzed() ? "cannot" : "can"); } else { MessageString(Chat::NPCQuestSay, CORPSE_DECAY_NOW); } } } void Client::Handle_OP_Consume(const EQApplicationPacket *app) { if (app->size != sizeof(Consume_Struct)) { LogError("OP size error: OP_Consume expected:[{}] got:[{}]", sizeof(Consume_Struct), app->size); return; } Consume_Struct* pcs = (Consume_Struct*)app->pBuffer; if (pcs->type == 0x01) { if (m_pp.hunger_level > 6000) { EQApplicationPacket *outapp = nullptr; outapp = new EQApplicationPacket(OP_Stamina, sizeof(Stamina_Struct)); Stamina_Struct* sta = (Stamina_Struct*)outapp->pBuffer; sta->food = m_pp.hunger_level > 6000 ? 6000 : m_pp.hunger_level; sta->water = m_pp.thirst_level > 6000 ? 6000 : m_pp.thirst_level; QueuePacket(outapp); safe_delete(outapp); return; } } else if (pcs->type == 0x02) { if (m_pp.thirst_level > 6000) { EQApplicationPacket *outapp = nullptr; outapp = new EQApplicationPacket(OP_Stamina, sizeof(Stamina_Struct)); Stamina_Struct* sta = (Stamina_Struct*)outapp->pBuffer; sta->food = m_pp.hunger_level > 6000 ? 6000 : m_pp.hunger_level; sta->water = m_pp.thirst_level > 6000 ? 6000 : m_pp.thirst_level; QueuePacket(outapp); safe_delete(outapp); return; } } EQ::ItemInstance *myitem = GetInv().GetItem(pcs->slot); if (myitem == nullptr) { LogError("Consuming from empty slot [{}]", pcs->slot); return; } const EQ::ItemData* eat_item = myitem->GetItem(); if (pcs->type == 0x01) { Consume(eat_item, EQ::item::ItemTypeFood, pcs->slot, (pcs->auto_consumed == 0xffffffff)); } else if (pcs->type == 0x02) { Consume(eat_item, EQ::item::ItemTypeDrink, pcs->slot, (pcs->auto_consumed == 0xffffffff)); } else { LogError("OP_Consume: unknown type, type:[{}]", (int)pcs->type); return; } if (m_pp.hunger_level > 50000) m_pp.hunger_level = 50000; if (m_pp.thirst_level > 50000) m_pp.thirst_level = 50000; EQApplicationPacket *outapp = nullptr; outapp = new EQApplicationPacket(OP_Stamina, sizeof(Stamina_Struct)); Stamina_Struct* sta = (Stamina_Struct*)outapp->pBuffer; sta->food = m_pp.hunger_level > 6000 ? 6000 : m_pp.hunger_level; sta->water = m_pp.thirst_level > 6000 ? 6000 : m_pp.thirst_level; QueuePacket(outapp); safe_delete(outapp); return; } void Client::Handle_OP_ControlBoat(const EQApplicationPacket *app) { if (app->size != sizeof(ControlBoat_Struct)) { LogError("Wrong size: OP_ControlBoat, size=[{}], expected [{}]", app->size, sizeof(ControlBoat_Struct)); return; } ControlBoat_Struct* cbs = (ControlBoat_Struct*)app->pBuffer; Mob* boat = entity_list.GetMob(cbs->boatId); if (!boat) { LogError("Player tried to take control of non-existent boat (char_id: %u, boat_eid: %u)", CharacterID(), cbs->boatId); return; // do nothing if the boat isn't valid } if (!boat->IsNPC() || !boat->IsControllableBoat()) { auto hacked_string = fmt::format("OP_Control Boat was sent against {} which is of race {}", boat->GetName(), boat->GetRace()); database.SetMQDetectionFlag(this->AccountName(), this->GetName(), hacked_string, zone->GetShortName()); return; } if (cbs->TakeControl) { // this uses the boat's target to indicate who has control of it. It has to check hate to make sure the boat isn't actually attacking anyone. if (!boat->GetTarget() || (boat->GetTarget() == this && boat->GetHateAmount(this) == 0)) { boat->SetTarget(this); } else { this->MessageString(Chat::Red, IN_USE); return; } } else { if (boat->GetTarget() == this) { boat->SetTarget(nullptr); } } // client responds better to a packet echo than an empty op QueuePacket(app); // have the boat signal itself, so quests can be triggered by boat use boat->CastToNPC()->SignalNPC(0); } void Client::Handle_OP_CorpseDrag(const EQApplicationPacket *app) { if (DraggedCorpses.size() >= (unsigned int)RuleI(Character, MaxDraggedCorpses)) { MessageString(Chat::Red, CORPSEDRAG_LIMIT); return; } VERIFY_PACKET_LENGTH(OP_CorpseDrag, app, CorpseDrag_Struct); CorpseDrag_Struct *cds = (CorpseDrag_Struct*)app->pBuffer; Mob* corpse = entity_list.GetMob(cds->CorpseName); if (!corpse || !corpse->IsPlayerCorpse() || corpse->CastToCorpse()->IsBeingLooted()) return; Client *c = entity_list.FindCorpseDragger(corpse->GetID()); if (c) { if (c == this) MessageString(Chat::DefaultText, CORPSEDRAG_ALREADY, corpse->GetCleanName()); else MessageString(Chat::DefaultText, CORPSEDRAG_SOMEONE_ELSE, corpse->GetCleanName()); return; } if (!corpse->CastToCorpse()->Summon(this, false, true)) return; DraggedCorpses.push_back(std::pair<std::string, uint16>(cds->CorpseName, corpse->GetID())); MessageString(Chat::DefaultText, CORPSEDRAG_BEGIN, cds->CorpseName); } void Client::Handle_OP_CorpseDrop(const EQApplicationPacket *app) { if (app->size == 1) { MessageString(Chat::DefaultText, CORPSEDRAG_STOPALL); ClearDraggedCorpses(); return; } for (auto Iterator = DraggedCorpses.begin(); Iterator != DraggedCorpses.end(); ++Iterator) { if (!strcasecmp(Iterator->first.c_str(), (const char *)app->pBuffer)) { MessageString(Chat::DefaultText, CORPSEDRAG_STOP); Iterator = DraggedCorpses.erase(Iterator); return; } } } void Client::Handle_OP_CrashDump(const EQApplicationPacket *app) { return; } void Client::Handle_OP_CreateObject(const EQApplicationPacket *app) { if (LogSys.log_settings[Logs::Inventory].is_category_enabled) LogInventory("Handle_OP_CreateObject() [psize: [{}]] [{}]", app->size, DumpPacketToString(app).c_str()); DropItem(EQ::invslot::slotCursor); return; } void Client::Handle_OP_CrystalCreate(const EQApplicationPacket *app) { VERIFY_PACKET_LENGTH(OP_CrystalCreate, app, CrystalReclaim_Struct); CrystalReclaim_Struct *cr = (CrystalReclaim_Struct*)app->pBuffer; const uint32 requestQty = cr->amount; const bool isRadiant = cr->type == 4; const bool isEbon = cr->type == 5; // Check: Valid type requested. if (!isRadiant && !isEbon) { return; } // Check: Valid quantity requested. if (requestQty < 1) { return; } // Check: Valid client state to make request. // In this situation the client is either desynced or attempting an exploit. const uint32 currentQty = isRadiant ? GetRadiantCrystals() : GetEbonCrystals(); if (currentQty == 0) { return; } // Prevent the client from creating more than they have. const uint32 amount = EQ::ClampUpper(requestQty, currentQty); const uint32 itemID = isRadiant ? RuleI(Zone, RadiantCrystalItemID) : RuleI(Zone, EbonCrystalItemID); // Summon crystals for player. const bool success = SummonItem(itemID, amount); if (!success) { return; } // Deduct crystals from client and update them. if (isRadiant) { m_pp.currentRadCrystals -= amount; m_pp.careerRadCrystals -= amount; } else if (isEbon) { m_pp.currentEbonCrystals -= amount; m_pp.careerEbonCrystals -= amount; } SaveCurrency(); SendCrystalCounts(); } void Client::Handle_OP_CrystalReclaim(const EQApplicationPacket *app) { uint32 ebon = NukeItem(RuleI(Zone, EbonCrystalItemID), invWhereWorn | invWherePersonal | invWhereCursor); uint32 radiant = NukeItem(RuleI(Zone, RadiantCrystalItemID), invWhereWorn | invWherePersonal | invWhereCursor); if ((ebon + radiant) > 0) { AddCrystals(radiant, ebon); } } void Client::Handle_OP_Damage(const EQApplicationPacket *app) { if (app->size != sizeof(CombatDamage_Struct)) { LogError("Received invalid sized OP_Damage: got [{}], expected [{}]", app->size, sizeof(CombatDamage_Struct)); DumpPacket(app); return; } // Broadcast to other clients CombatDamage_Struct* damage = (CombatDamage_Struct*)app->pBuffer; //dont send to originator of falling damage packets entity_list.QueueClients(this, app, (damage->type == DamageTypeFalling)); return; } void Client::Handle_OP_Death(const EQApplicationPacket *app) { if (app->size != sizeof(Death_Struct)) return; Death_Struct* ds = (Death_Struct*)app->pBuffer; //I think this attack_skill value is really a value from SkillDamageTypes... if (ds->attack_skill > EQ::skills::HIGHEST_SKILL) { return; } if (GetHP() > 0) return; Mob* killer = entity_list.GetMob(ds->killer_id); Death(killer, ds->damage, ds->spell_id, (EQ::skills::SkillType)ds->attack_skill); return; } void Client::Handle_OP_DelegateAbility(const EQApplicationPacket *app) { if (app->size != sizeof(DelegateAbility_Struct)) { LogDebug("Size mismatch in OP_DelegateAbility expected [{}] got [{}]", sizeof(DelegateAbility_Struct), app->size); DumpPacket(app); return; } DelegateAbility_Struct* das = (DelegateAbility_Struct*)app->pBuffer; Group *g = GetGroup(); if (!g) return; switch (das->DelegateAbility) { case 0: { g->DelegateMainAssist(das->Name); break; } case 1: { g->DelegateMarkNPC(das->Name); break; } case 2: { g->DelegateMainTank(das->Name); break; } case 3: { g->DelegatePuller(das->Name); break; } default: break; } } void Client::Handle_OP_DeleteItem(const EQApplicationPacket *app) { if (app->size != sizeof(DeleteItem_Struct)) { std::cout << "Wrong size on OP_DeleteItem. Got: " << app->size << ", Expected: " << sizeof(DeleteItem_Struct) << std::endl; return; } DeleteItem_Struct* alc = (DeleteItem_Struct*)app->pBuffer; const EQ::ItemInstance *inst = GetInv().GetItem(alc->from_slot); if (inst && inst->GetItem()->ItemType == EQ::item::ItemTypeAlcohol) { entity_list.MessageCloseString(this, true, 50, 0, DRINKING_MESSAGE, GetName(), inst->GetItem()->Name); CheckIncreaseSkill(EQ::skills::SkillAlcoholTolerance, nullptr, 25); int16 AlcoholTolerance = GetSkill(EQ::skills::SkillAlcoholTolerance); int16 IntoxicationIncrease; if (ClientVersion() < EQ::versions::ClientVersion::SoD) IntoxicationIncrease = (200 - AlcoholTolerance) * 30 / 200 + 10; else IntoxicationIncrease = (270 - AlcoholTolerance) * 0.111111108 + 10; if (IntoxicationIncrease < 0) IntoxicationIncrease = 1; m_pp.intoxication += IntoxicationIncrease; if (m_pp.intoxication > 200) m_pp.intoxication = 200; } DeleteItemInInventory(alc->from_slot, 1); return; } void Client::Handle_OP_DeleteSpawn(const EQApplicationPacket *app) { // The client will send this with his id when he zones, maybe when he disconnects too? //eqs->RemoveData(); // Flushing the queue of packet data to allow for proper zoning //just make sure this gets out auto outapp = new EQApplicationPacket(OP_LogoutReply); FastQueuePacket(&outapp); outapp = new EQApplicationPacket(OP_DeleteSpawn, sizeof(EntityId_Struct)); EntityId_Struct* eid = (EntityId_Struct*)outapp->pBuffer; eid->entity_id = GetID(); entity_list.QueueClients(this, outapp, false); safe_delete(outapp); hate_list.RemoveEntFromHateList(this->CastToMob()); Disconnect(); return; } void Client::Handle_OP_Disarm(const EQApplicationPacket *app) { if (dead || bZoning) return; if (!HasSkill(EQ::skills::SkillDisarm)) return; if (app->size != sizeof(Disarm_Struct)) { LogSkills("Size mismatch for Disarm_Struct packet"); return; } Disarm_Struct *disarm = (Disarm_Struct *)app->pBuffer; if (!p_timers.Expired(&database, pTimerCombatAbility2, false)) { Message(Chat::Red, "Ability recovery time not yet met."); return; } p_timers.Start(pTimerCombatAbility2, 8); BreakInvis(); Mob* pmob = entity_list.GetMob(disarm->source); Mob* tmob = entity_list.GetMob(disarm->target); if (!pmob || !tmob) return; if (pmob->GetID() != GetID()) { // Client sent a disarm request with an originator ID not matching their own ID. auto hack_str = fmt::format("Player {} ({}) sent OP_Disarm with source ID of: {}", GetCleanName(), GetID(), pmob->GetID()); database.SetMQDetectionFlag(this->account_name, this->name, hack_str, zone->GetShortName()); return; } // No disarm on corpses if (tmob->IsCorpse()) return; // No target if (!GetTarget()) return; // Targets don't match (possible hack, but not flagging) if (GetTarget() != tmob) { return; } // Too far away if (pmob->CalculateDistance(GetTarget()->GetX(), GetTarget()->GetY(), GetTarget()->GetZ()) > 400) return; // Can't see mob //if (tmob->BehindMob(pmob)) // return; // How can we disarm someone if we are feigned. if (GetFeigned()) return; // We can't disarm someone who is feigned. if (tmob->IsClient() && tmob->CastToClient()->GetFeigned()) return; if (GetTarget() == tmob && pmob == this->CastToMob() && disarm->skill == GetSkill(EQ::skills::SkillDisarm) && IsAttackAllowed(tmob)) { int p_level = pmob->GetLevel() ? pmob->GetLevel() : 1; int t_level = tmob->GetLevel() ? tmob->GetLevel() : 1; // We have a disarmable target - sucess or fail, we always aggro the mob if (tmob->IsNPC()) { if (!tmob->CheckAggro(pmob)) { zone->AddAggroMob(); tmob->AddToHateList(pmob, p_level); } else { tmob->AddToHateList(pmob, p_level / 3); } } int chance = GetSkill(EQ::skills::SkillDisarm); // (1% @ 0 skill) (11% @ 200 skill) - against even con chance /= 2; chance += 10; // Modify chance based on level difference float lvl_mod = p_level / t_level; chance *= lvl_mod; if (chance > 300) chance = 300; // max chance of 30% if (tmob->IsNPC()) { tmob->CastToNPC()->Disarm(this, chance); } else if (tmob->IsClient()) { tmob->CastToClient()->Disarm(this, chance); } return; } // Trying to disarm something we can't disarm MessageString(Chat::Skills, DISARM_NO_TARGET); return; } void Client::Handle_OP_DeleteSpell(const EQApplicationPacket *app) { if (app->size != sizeof(DeleteSpell_Struct)) return; EQApplicationPacket* outapp = app->Copy(); DeleteSpell_Struct* dss = (DeleteSpell_Struct*)outapp->pBuffer; if (dss->spell_slot < 0 || dss->spell_slot >= EQ::spells::DynamicLookup(ClientVersion(), GetGM())->SpellbookSize) return; if (m_pp.spell_book[dss->spell_slot] != SPELLBOOK_UNKNOWN) { m_pp.spell_book[dss->spell_slot] = SPELLBOOK_UNKNOWN; database.DeleteCharacterSpell(this->CharacterID(), m_pp.spell_book[dss->spell_slot], dss->spell_slot); dss->success = 1; } else dss->success = 0; FastQueuePacket(&outapp); return; } void Client::Handle_OP_DisarmTraps(const EQApplicationPacket *app) { if (!HasSkill(EQ::skills::SkillDisarmTraps)) return; if (!p_timers.Expired(&database, pTimerDisarmTraps, false)) { Message(Chat::Red, "Ability recovery time not yet met."); return; } int reuse = DisarmTrapsReuseTime - GetSkillReuseTime(EQ::skills::SkillDisarmTraps); if (reuse < 1) reuse = 1; p_timers.Start(pTimerDisarmTraps, reuse - 1); uint8 success = SKILLUP_FAILURE; float curdist = 0; Trap* trap = entity_list.FindNearbyTrap(this, 250, curdist, true); if (trap && trap->detected) { float max_radius = (trap->radius * 2) * (trap->radius * 2); // radius is used to trigger trap, so disarm radius should be a bit bigger. Log(Logs::General, Logs::Traps, "%s is attempting to disarm trap %d. Curdist is %0.2f maxdist is %0.2f", GetName(), trap->trap_id, curdist, max_radius); if (curdist <= max_radius) { int uskill = GetSkill(EQ::skills::SkillDisarmTraps); if ((zone->random.Int(0, 49) + uskill) >= (zone->random.Int(0, 49) + trap->skill)) { success = SKILLUP_SUCCESS; MessageString(Chat::Skills, DISARMED_TRAP); trap->disarmed = true; Log(Logs::General, Logs::Traps, "Trap %d is disarmed.", trap->trap_id); trap->UpdateTrap(); } else { MessageString(Chat::Skills, FAIL_DISARM_DETECTED_TRAP); if (zone->random.Int(0, 99) < 25) { trap->Trigger(this); } } CheckIncreaseSkill(EQ::skills::SkillDisarmTraps, nullptr); return; } else { MessageString(Chat::Skills, TRAP_TOO_FAR); } } else { MessageString(Chat::Skills, LDON_SENSE_TRAP2); } return; } void Client::Handle_OP_DoGroupLeadershipAbility(const EQApplicationPacket *app) { if (app->size != sizeof(DoGroupLeadershipAbility_Struct)) { LogDebug("Size mismatch in OP_DoGroupLeadershipAbility expected [{}] got [{}]", sizeof(DoGroupLeadershipAbility_Struct), app->size); DumpPacket(app); return; } DoGroupLeadershipAbility_Struct* dglas = (DoGroupLeadershipAbility_Struct*)app->pBuffer; switch (dglas->Ability) { case GroupLeadershipAbility_MarkNPC: { if (GetTarget()) { Group* g = GetGroup(); if (g) g->MarkNPC(GetTarget(), dglas->Parameter); } break; } case groupAAInspectBuffs: { Mob *Target = GetTarget(); if (!Target || !Target->IsClient()) return; if (IsRaidGrouped()) { Raid *raid = GetRaid(); if (!raid) return; uint32 group_id = raid->GetGroup(this); if (group_id > 11 || raid->GroupCount(group_id) < 3) return; Target->CastToClient()->InspectBuffs(this, raid->GetLeadershipAA(groupAAInspectBuffs, group_id)); return; } Group *g = GetGroup(); if (!g || (g->GroupCount() < 3)) return; Target->CastToClient()->InspectBuffs(this, g->GetLeadershipAA(groupAAInspectBuffs)); break; } default: LogDebug("Got unhandled OP_DoGroupLeadershipAbility Ability: [{}] Parameter: [{}]", dglas->Ability, dglas->Parameter); break; } } void Client::Handle_OP_DuelResponse(const EQApplicationPacket *app) { if (app->size != sizeof(DuelResponse_Struct)) return; DuelResponse_Struct* ds = (DuelResponse_Struct*)app->pBuffer; Entity* entity = entity_list.GetID(ds->target_id); Entity* initiator = entity_list.GetID(ds->entity_id); if (!entity->IsClient() || !initiator->IsClient()) return; entity->CastToClient()->SetDuelTarget(0); entity->CastToClient()->SetDueling(false); initiator->CastToClient()->SetDuelTarget(0); initiator->CastToClient()->SetDueling(false); if (GetID() == initiator->GetID()) entity->CastToClient()->MessageString(Chat::NPCQuestSay, DUEL_DECLINE, initiator->GetName()); else initiator->CastToClient()->MessageString(Chat::NPCQuestSay, DUEL_DECLINE, entity->GetName()); return; } void Client::Handle_OP_DuelResponse2(const EQApplicationPacket *app) { if (app->size != sizeof(Duel_Struct)) return; Duel_Struct* ds = (Duel_Struct*)app->pBuffer; Entity* entity = entity_list.GetID(ds->duel_target); Entity* initiator = entity_list.GetID(ds->duel_initiator); if (entity && initiator && entity == this && initiator->IsClient()) { auto outapp = new EQApplicationPacket(OP_RequestDuel, sizeof(Duel_Struct)); Duel_Struct* ds2 = (Duel_Struct*)outapp->pBuffer; ds2->duel_initiator = entity->GetID(); ds2->duel_target = entity->GetID(); initiator->CastToClient()->QueuePacket(outapp); outapp->SetOpcode(OP_DuelResponse2); ds2->duel_initiator = initiator->GetID(); initiator->CastToClient()->QueuePacket(outapp); QueuePacket(outapp); SetDueling(true); initiator->CastToClient()->SetDueling(true); SetDuelTarget(ds->duel_initiator); safe_delete(outapp); if (IsCasting()) InterruptSpell(); if (initiator->CastToClient()->IsCasting()) initiator->CastToClient()->InterruptSpell(); } return; } void Client::Handle_OP_DumpName(const EQApplicationPacket *app) { return; } void Client::Handle_OP_Dye(const EQApplicationPacket *app) { if (app->size != sizeof(EQ::TintProfile)) printf("Wrong size of DyeStruct, Got: %i, Expected: %zu\n", app->size, sizeof(EQ::TintProfile)); else { EQ::TintProfile* dye = (EQ::TintProfile*)app->pBuffer; DyeArmor(dye); } return; } void Client::Handle_OP_DzAddPlayer(const EQApplicationPacket *app) { auto expedition = GetExpedition(); if (expedition) { auto dzcmd = reinterpret_cast<ExpeditionCommand_Struct*>(app->pBuffer); expedition->DzAddPlayer(this, dzcmd->name); } else { // the only /dz command that sends an error message if no active expedition Message(Chat::System, DZ_YOU_NOT_ASSIGNED); } } void Client::Handle_OP_DzChooseZoneReply(const EQApplicationPacket *app) { auto dzmsg = reinterpret_cast<DynamicZoneChooseZoneReply_Struct*>(app->pBuffer); LogDynamicZones("Character [{}] chose DynamicZone [{}]:[{}] type: [{}] with system id: [{}]", CharacterID(), dzmsg->dz_zone_id, dzmsg->dz_instance_id, dzmsg->dz_type, dzmsg->unknown_id2); if (!dzmsg->dz_instance_id || !database.VerifyInstanceAlive(dzmsg->dz_instance_id, CharacterID())) { // live just no-ops this without a message LogDynamicZones("Character [{}] chose invalid DynamicZone [{}]:[{}] or is no longer a member", CharacterID(), dzmsg->dz_zone_id, dzmsg->dz_instance_id); return; } auto client_dzs = GetDynamicZones(); auto it = std::find_if(client_dzs.begin(), client_dzs.end(), [&](const DynamicZone* dz) { return dz->IsSameDz(dzmsg->dz_zone_id, dzmsg->dz_instance_id); }); if (it != client_dzs.end()) { DynamicZoneLocation loc = (*it)->GetZoneInLocation(); ZoneMode zone_mode = (*it)->HasZoneInLocation() ? ZoneMode::ZoneSolicited : ZoneMode::ZoneToSafeCoords; MovePC(dzmsg->dz_zone_id, dzmsg->dz_instance_id, loc.x, loc.y, loc.z, loc.heading, 0, zone_mode); } } void Client::Handle_OP_DzExpeditionInviteResponse(const EQApplicationPacket *app) { auto expedition = Expedition::FindCachedExpeditionByID(m_pending_expedition_invite.expedition_id); std::string swap_remove_name = m_pending_expedition_invite.swap_remove_name; m_pending_expedition_invite = { 0 }; // clear before re-validating if (expedition) { auto dzmsg = reinterpret_cast<ExpeditionInviteResponse_Struct*>(app->pBuffer); expedition->DzInviteResponse(this, dzmsg->accepted, swap_remove_name); } } void Client::Handle_OP_DzListTimers(const EQApplicationPacket *app) { DzListTimers(); } void Client::Handle_OP_DzMakeLeader(const EQApplicationPacket *app) { auto expedition = GetExpedition(); if (expedition) { auto dzcmd = reinterpret_cast<ExpeditionCommand_Struct*>(app->pBuffer); expedition->DzMakeLeader(this, dzcmd->name); } } void Client::Handle_OP_DzPlayerList(const EQApplicationPacket *app) { auto expedition = GetExpedition(); if (expedition) { expedition->DzPlayerList(this); } } void Client::Handle_OP_DzRemovePlayer(const EQApplicationPacket *app) { auto expedition = GetExpedition(); if (expedition) { auto dzcmd = reinterpret_cast<ExpeditionCommand_Struct*>(app->pBuffer); expedition->DzRemovePlayer(this, dzcmd->name); } } void Client::Handle_OP_DzSwapPlayer(const EQApplicationPacket *app) { auto expedition = GetExpedition(); if (expedition) { auto dzcmd = reinterpret_cast<ExpeditionCommandSwap_Struct*>(app->pBuffer); expedition->DzSwapPlayer(this, dzcmd->rem_player_name, dzcmd->add_player_name); } } void Client::Handle_OP_DzQuit(const EQApplicationPacket *app) { auto expedition = GetExpedition(); if (expedition) { expedition->DzQuit(this); } } void Client::Handle_OP_Emote(const EQApplicationPacket *app) { if (app->size != sizeof(Emote_Struct)) { LogError("Received invalid sized OP_Emote: got [{}], expected [{}]", app->size, sizeof(Emote_Struct)); DumpPacket(app); return; } // Calculate new packet dimensions Emote_Struct* in = (Emote_Struct*)app->pBuffer; in->message[1023] = '\0'; const char* name = GetName(); uint32 len_name = strlen(name); uint32 len_msg = strlen(in->message); // crash protection -- cheater if (len_msg > 512) { in->message[512] = '\0'; len_msg = 512; } uint32 len_packet = sizeof(in->type) + len_name + len_msg + 1; // Construct outgoing packet auto outapp = new EQApplicationPacket(OP_Emote, len_packet); Emote_Struct* out = (Emote_Struct*)outapp->pBuffer; out->type = in->type; memcpy(out->message, name, len_name); memcpy(&out->message[len_name], in->message, len_msg); /* if (target && target->IsClient()) { entity_list.QueueCloseClients(this, outapp, false, 100, target); cptr = outapp->pBuffer + 2; // not sure if live does this or not. thought it was a nice feature, but would take a lot to // clean up grammatical and other errors. Maybe with a regex parser... replacestr((char *)cptr, target->GetName(), "you"); replacestr((char *)cptr, " he", " you"); replacestr((char *)cptr, " she", " you"); replacestr((char *)cptr, " him", " you"); replacestr((char *)cptr, " her", " you"); target->CastToClient()->QueuePacket(outapp); } else */ entity_list.QueueCloseClients(this, outapp, true, RuleI(Range, Emote), 0, true, FilterSocials); safe_delete(outapp); return; } void Client::Handle_OP_EndLootRequest(const EQApplicationPacket *app) { if (app->size != sizeof(uint32)) { std::cout << "Wrong size: OP_EndLootRequest, size=" << app->size << ", expected " << sizeof(uint32) << std::endl; return; } SetLooting(0); Entity* entity = entity_list.GetID(*((uint16*)app->pBuffer)); if (entity == 0) { Message(Chat::Red, "Error: OP_EndLootRequest: Corpse not found (ent = 0)"); if (ClientVersion() >= EQ::versions::ClientVersion::SoD) Corpse::SendEndLootErrorPacket(this); else Corpse::SendLootReqErrorPacket(this); return; } else if (!entity->IsCorpse()) { Message(Chat::Red, "Error: OP_EndLootRequest: Corpse not found (!entity->IsCorpse())"); Corpse::SendLootReqErrorPacket(this); return; } else { entity->CastToCorpse()->EndLoot(this, app); } return; } void Client::Handle_OP_EnvDamage(const EQApplicationPacket *app) { if (!ClientFinishedLoading()) { SetHP(GetHP() - 1); return; } if (app->size != sizeof(EnvDamage2_Struct)) { LogError("Received invalid sized OP_EnvDamage: got [{}], expected [{}]", app->size, sizeof(EnvDamage2_Struct)); DumpPacket(app); return; } EnvDamage2_Struct* ed = (EnvDamage2_Struct*)app->pBuffer; int damage = ed->damage; if (ed->dmgtype == 252) { int mod = spellbonuses.ReduceFallDamage + itembonuses.ReduceFallDamage + aabonuses.ReduceFallDamage; damage -= damage * mod / 100; } if (damage < 0) damage = 31337; if (admin >= minStatusToAvoidFalling && GetGM()) { Message(Chat::Red, "Your GM status protects you from %i points of type %i environmental damage.", ed->damage, ed->dmgtype); SetHP(GetHP() - 1);//needed or else the client wont acknowledge return; } else if (GetInvul()) { Message(Chat::Red, "Your invuln status protects you from %i points of type %i environmental damage.", ed->damage, ed->dmgtype); SetHP(GetHP() - 1);//needed or else the client wont acknowledge return; } else if (zone->GetZoneID() == 183 || zone->GetZoneID() == 184) { // Hard coded tutorial and load zones for no fall damage return; } else { SetHP(GetHP() - (damage * RuleR(Character, EnvironmentDamageMulipliter))); /* EVENT_ENVIRONMENTAL_DAMAGE */ int final_damage = (damage * RuleR(Character, EnvironmentDamageMulipliter)); char buf[24]; snprintf(buf, 23, "%u %u %i", ed->damage, ed->dmgtype, final_damage); parse->EventPlayer(EVENT_ENVIRONMENTAL_DAMAGE, this, buf, 0); } if (GetHP() <= 0) { mod_client_death_env(); Death(0, 32000, SPELL_UNKNOWN, EQ::skills::SkillHandtoHand); } SendHPUpdate(); return; } void Client::Handle_OP_FaceChange(const EQApplicationPacket *app) { if (app->size != sizeof(FaceChange_Struct)) { LogError("Invalid size for OP_FaceChange: Expected: [{}], Got: [{}]", sizeof(FaceChange_Struct), app->size); return; } // Notify other clients in zone entity_list.QueueClients(this, app, false); FaceChange_Struct* fc = (FaceChange_Struct*)app->pBuffer; m_pp.haircolor = fc->haircolor; m_pp.beardcolor = fc->beardcolor; m_pp.eyecolor1 = fc->eyecolor1; m_pp.eyecolor2 = fc->eyecolor2; m_pp.hairstyle = fc->hairstyle; m_pp.face = fc->face; m_pp.beard = fc->beard; m_pp.drakkin_heritage = fc->drakkin_heritage; m_pp.drakkin_tattoo = fc->drakkin_tattoo; m_pp.drakkin_details = fc->drakkin_details; Save(); MessageString(Chat::Red, FACE_ACCEPTED); //Message(Chat::Red, "Facial features updated."); return; } void Client::Handle_OP_FeignDeath(const EQApplicationPacket *app) { if (GetClass() != MONK) return; if (!p_timers.Expired(&database, pTimerFeignDeath, false)) { Message(Chat::Red, "Ability recovery time not yet met."); return; } int reuse = FeignDeathReuseTime; reuse -= GetSkillReuseTime(EQ::skills::SkillFeignDeath); if (reuse < 1) reuse = 1; p_timers.Start(pTimerFeignDeath, reuse - 1); //BreakInvis(); uint16 primfeign = GetSkill(EQ::skills::SkillFeignDeath); uint16 secfeign = GetSkill(EQ::skills::SkillFeignDeath); if (primfeign > 100) { primfeign = 100; secfeign = secfeign - 100; secfeign = secfeign / 2; } else secfeign = 0; uint16 totalfeign = primfeign + secfeign; if (zone->random.Real(0, 160) > totalfeign) { SetFeigned(false); entity_list.MessageCloseString(this, false, 200, 10, STRING_FEIGNFAILED, GetName()); } else { SetFeigned(true); } CheckIncreaseSkill(EQ::skills::SkillFeignDeath, nullptr, 5); return; } void Client::Handle_OP_FindPersonRequest(const EQApplicationPacket *app) { if (app->size != sizeof(FindPersonRequest_Struct)) printf("Error in FindPersonRequest_Struct. Expected size of: %zu, but got: %i\n", sizeof(FindPersonRequest_Struct), app->size); else { FindPersonRequest_Struct* t = (FindPersonRequest_Struct*)app->pBuffer; std::vector<FindPerson_Point> points; Mob* target = entity_list.GetMob(t->npc_id); if (target == nullptr) { //empty length packet == not found. EQApplicationPacket outapp(OP_FindPersonReply, 0); QueuePacket(&outapp); return; } if (!RuleB(Pathing, Find) && RuleB(Bazaar, EnableWarpToTrader) && target->IsClient() && (target->CastToClient()->Trader || target->CastToClient()->Buyer)) { Message(Chat::Yellow, "Moving you to Trader %s", target->GetName()); MovePC(zone->GetZoneID(), zone->GetInstanceID(), target->GetX(), target->GetY(), target->GetZ(), 0.0f); } if (!RuleB(Pathing, Find) || !zone->pathing) { //fill in the path array... // points.clear(); FindPerson_Point a; FindPerson_Point b; a.x = GetX(); a.y = GetY(); a.z = GetZ(); b.x = target->GetX(); b.y = target->GetY(); b.z = target->GetZ(); points.push_back(a); points.push_back(b); } else { glm::vec3 Start(GetX(), GetY(), GetZ() + (GetSize() < 6.0 ? 6 : GetSize()) * HEAD_POSITION); glm::vec3 End(target->GetX(), target->GetY(), target->GetZ() + (target->GetSize() < 6.0 ? 6 : target->GetSize()) * HEAD_POSITION); bool partial = false; bool stuck = false; auto pathlist = zone->pathing->FindRoute(Start, End, partial, stuck); if (pathlist.empty() || partial) { EQApplicationPacket outapp(OP_FindPersonReply, 0); QueuePacket(&outapp); return; } // Live appears to send the points in this order: // Final destination. // Current Position. // rest of the points. FindPerson_Point p; int PointNumber = 0; bool LeadsToTeleporter = false; auto v = pathlist.back(); p.x = v.pos.x; p.y = v.pos.y; p.z = v.pos.z; points.push_back(p); p.x = GetX(); p.y = GetY(); p.z = GetZ(); points.push_back(p); for (auto Iterator = pathlist.begin(); Iterator != pathlist.end(); ++Iterator) { if ((*Iterator).teleport) // Teleporter { LeadsToTeleporter = true; break; } glm::vec3 v = (*Iterator).pos; p.x = v.x; p.y = v.y; p.z = v.z; points.push_back(p); ++PointNumber; } if (!LeadsToTeleporter) { p.x = target->GetX(); p.y = target->GetY(); p.z = target->GetZ(); points.push_back(p); } } SendPathPacket(points); } } void Client::Handle_OP_Fishing(const EQApplicationPacket *app) { if (!p_timers.Expired(&database, pTimerFishing, false)) { Message(Chat::Red, "Ability recovery time not yet met."); return; } if (CanFish()) { parse->EventPlayer(EVENT_FISH_START, this, "", 0); //these will trigger GoFish() after a delay if we're able to actually fish, and if not, we won't stop the client from trying again immediately (although we may need to tell it to repop the button) p_timers.Start(pTimerFishing, FishingReuseTime - 1); fishing_timer.Start(); } return; // Changes made based on Bobs work on foraging. Now can set items in the forage database table to // forage for. } void Client::Handle_OP_Forage(const EQApplicationPacket *app) { if (!p_timers.Expired(&database, pTimerForaging, false)) { Message(Chat::Red, "Ability recovery time not yet met."); return; } p_timers.Start(pTimerForaging, ForagingReuseTime - 1); ForageItem(); return; } void Client::Handle_OP_FriendsWho(const EQApplicationPacket *app) { char *FriendsString = (char*)app->pBuffer; FriendsWho(FriendsString); return; } void Client::Handle_OP_GetGuildMOTD(const EQApplicationPacket *app) { LogGuilds("Received OP_GetGuildMOTD"); SendGuildMOTD(true); if (IsInAGuild()) { SendGuildURL(); SendGuildChannel(); } } void Client::Handle_OP_GetGuildsList(const EQApplicationPacket *app) { LogGuilds("Received OP_GetGuildsList"); SendGuildList(); } void Client::Handle_OP_GMBecomeNPC(const EQApplicationPacket *app) { if (this->Admin() < minStatusToUseGMCommands) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/becomenpc"); return; } if (app->size != sizeof(BecomeNPC_Struct)) { LogError("Wrong size: OP_GMBecomeNPC, size=[{}], expected [{}]", app->size, sizeof(BecomeNPC_Struct)); return; } //entity_list.QueueClients(this, app, false); BecomeNPC_Struct* bnpc = (BecomeNPC_Struct*)app->pBuffer; Mob* cli = (Mob*)entity_list.GetMob(bnpc->id); if (cli == 0) return; if (cli->IsClient()) cli->CastToClient()->QueuePacket(app); cli->SendAppearancePacket(AT_NPCName, 1, true); cli->CastToClient()->SetBecomeNPC(true); cli->CastToClient()->SetBecomeNPCLevel(bnpc->maxlevel); cli->MessageString(Chat::White, TOGGLE_OFF); cli->CastToClient()->tellsoff = true; //TODO: Make this toggle a BecomeNPC flag so that it gets updated when people zone in as well; Make combat work with this. return; } void Client::Handle_OP_GMDelCorpse(const EQApplicationPacket *app) { if (app->size != sizeof(GMDelCorpse_Struct)) return; if (this->Admin() < commandEditPlayerCorpses) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/delcorpse"); return; } GMDelCorpse_Struct* dc = (GMDelCorpse_Struct *)app->pBuffer; Mob* corpse = entity_list.GetMob(dc->corpsename); if (corpse == 0) { return; } if (corpse->IsCorpse() != true) { return; } corpse->CastToCorpse()->Delete(); std::cout << name << " deleted corpse " << dc->corpsename << std::endl; Message(Chat::Red, "Corpse %s deleted.", dc->corpsename); return; } void Client::Handle_OP_GMEmoteZone(const EQApplicationPacket *app) { if (this->Admin() < minStatusToUseGMCommands) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/emote"); return; } if (app->size != sizeof(GMEmoteZone_Struct)) { LogError("Wrong size: OP_GMEmoteZone, size=[{}], expected [{}]", app->size, sizeof(GMEmoteZone_Struct)); return; } GMEmoteZone_Struct* gmez = (GMEmoteZone_Struct*)app->pBuffer; char* newmessage = nullptr; if (strstr(gmez->text, "^") == 0) entity_list.Message(0, 15, gmez->text); else { for (newmessage = strtok((char*)gmez->text, "^"); newmessage != nullptr; newmessage = strtok(nullptr, "^")) entity_list.Message(0, 15, newmessage); } return; } void Client::Handle_OP_GMEndTraining(const EQApplicationPacket *app) { if (app->size != sizeof(GMTrainEnd_Struct)) { LogDebug("Size mismatch in OP_GMEndTraining expected [{}] got [{}]", sizeof(GMTrainEnd_Struct), app->size); DumpPacket(app); return; } OPGMEndTraining(app); return; } void Client::Handle_OP_GMFind(const EQApplicationPacket *app) { if (this->Admin() < minStatusToUseGMCommands) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/find"); return; } if (app->size != sizeof(GMSummon_Struct)) { LogError("Wrong size: OP_GMFind, size=[{}], expected [{}]", app->size, sizeof(GMSummon_Struct)); return; } //Break down incoming GMSummon_Struct* request = (GMSummon_Struct*)app->pBuffer; //Create a new outgoing auto outapp = new EQApplicationPacket(OP_GMFind, sizeof(GMSummon_Struct)); GMSummon_Struct* foundplayer = (GMSummon_Struct*)outapp->pBuffer; //Copy the constants strcpy(foundplayer->charname, request->charname); strcpy(foundplayer->gmname, request->gmname); //Check if the NPC exits intrazone... Mob* gt = entity_list.GetMob(request->charname); if (gt != 0) { foundplayer->success = 1; foundplayer->x = (int32)gt->GetX(); foundplayer->y = (int32)gt->GetY(); foundplayer->z = (int32)gt->GetZ(); foundplayer->zoneID = zone->GetZoneID(); } //Send the packet... FastQueuePacket(&outapp); return; } void Client::Handle_OP_GMGoto(const EQApplicationPacket *app) { if (app->size != sizeof(GMSummon_Struct)) { std::cout << "Wrong size on OP_GMGoto. Got: " << app->size << ", Expected: " << sizeof(GMSummon_Struct) << std::endl; return; } if (this->Admin() < minStatusToUseGMCommands) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/goto"); return; } GMSummon_Struct* gmg = (GMSummon_Struct*)app->pBuffer; Mob* gt = entity_list.GetMob(gmg->charname); if (gt != nullptr) { this->MovePC(zone->GetZoneID(), zone->GetInstanceID(), gt->GetX(), gt->GetY(), gt->GetZ(), gt->GetHeading()); } else if (!worldserver.Connected()) Message(0, "Error: World server disconnected."); else { auto pack = new ServerPacket(ServerOP_GMGoto, sizeof(ServerGMGoto_Struct)); memset(pack->pBuffer, 0, pack->size); ServerGMGoto_Struct* wsgmg = (ServerGMGoto_Struct*)pack->pBuffer; strcpy(wsgmg->myname, this->GetName()); strcpy(wsgmg->gotoname, gmg->charname); wsgmg->admin = admin; worldserver.SendPacket(pack); safe_delete(pack); } return; } void Client::Handle_OP_GMHideMe(const EQApplicationPacket *app) { if (this->Admin() < minStatusToUseGMCommands) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/hideme"); return; } if (app->size != sizeof(SpawnAppearance_Struct)) { LogError("Wrong size: OP_GMHideMe, size=[{}], expected [{}]", app->size, sizeof(SpawnAppearance_Struct)); return; } SpawnAppearance_Struct* sa = (SpawnAppearance_Struct*)app->pBuffer; Message(Chat::Red, "#: %i, %i", sa->type, sa->parameter); SetHideMe(!sa->parameter); return; } void Client::Handle_OP_GMKick(const EQApplicationPacket *app) { if (app->size != sizeof(GMKick_Struct)) return; if (this->Admin() < minStatusToKick) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/kick"); return; } GMKick_Struct* gmk = (GMKick_Struct *)app->pBuffer; Client* client = entity_list.GetClientByName(gmk->name); if (client == 0) { if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { auto pack = new ServerPacket(ServerOP_KickPlayer, sizeof(ServerKickPlayer_Struct)); ServerKickPlayer_Struct* skp = (ServerKickPlayer_Struct*)pack->pBuffer; strcpy(skp->adminname, gmk->gmname); strcpy(skp->name, gmk->name); skp->adminrank = this->Admin(); worldserver.SendPacket(pack); safe_delete(pack); } } else { entity_list.QueueClients(this, app); //client->Kick(); } return; } void Client::Handle_OP_GMKill(const EQApplicationPacket *app) { if (this->Admin() < minStatusToUseGMCommands) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/kill"); return; } if (app->size != sizeof(GMKill_Struct)) { LogError("Wrong size: OP_GMKill, size=[{}], expected [{}]", app->size, sizeof(GMKill_Struct)); return; } GMKill_Struct* gmk = (GMKill_Struct *)app->pBuffer; Mob* obj = entity_list.GetMob(gmk->name); Client* client = entity_list.GetClientByName(gmk->name); if (obj != 0) { if (client != 0) { entity_list.QueueClients(this, app); } else { obj->Kill(); } } else { if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { auto pack = new ServerPacket(ServerOP_KillPlayer, sizeof(ServerKillPlayer_Struct)); ServerKillPlayer_Struct* skp = (ServerKillPlayer_Struct*)pack->pBuffer; strcpy(skp->gmname, gmk->gmname); strcpy(skp->target, gmk->name); skp->admin = this->Admin(); worldserver.SendPacket(pack); safe_delete(pack); } } return; } void Client::Handle_OP_GMLastName(const EQApplicationPacket *app) { if (app->size != sizeof(GMLastName_Struct)) { std::cout << "Wrong size on OP_GMLastName. Got: " << app->size << ", Expected: " << sizeof(GMLastName_Struct) << std::endl; return; } GMLastName_Struct* gmln = (GMLastName_Struct*)app->pBuffer; if (strlen(gmln->lastname) >= 64) { Message(Chat::Red, "/LastName: New last name too long. (max=63)"); } else { Client* client = entity_list.GetClientByName(gmln->name); if (client == 0) { Message(Chat::Red, "/LastName: %s not found", gmln->name); } else { if (this->Admin() < minStatusToUseGMCommands) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(client->account_name, client->name, "/lastname"); return; } else client->ChangeLastName(gmln->lastname); } gmln->unknown[0] = 1; gmln->unknown[1] = 1; gmln->unknown[2] = 1; gmln->unknown[3] = 1; entity_list.QueueClients(this, app, false); } return; } void Client::Handle_OP_GMNameChange(const EQApplicationPacket *app) { if (app->size != sizeof(GMName_Struct)) { LogError("Wrong size: OP_GMNameChange, size=[{}], expected [{}]", app->size, sizeof(GMName_Struct)); return; } const GMName_Struct* gmn = (const GMName_Struct *)app->pBuffer; if (this->Admin() < minStatusToUseGMCommands) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/name"); return; } Client* client = entity_list.GetClientByName(gmn->oldname); LogInfo("GM([{}]) changeing players name. Old:[{}] New:[{}]", GetName(), gmn->oldname, gmn->newname); bool usedname = database.CheckUsedName((const char*)gmn->newname); if (client == 0) { Message(Chat::Red, "%s not found for name change. Operation failed!", gmn->oldname); return; } if ((strlen(gmn->newname) > 63) || (strlen(gmn->newname) == 0)) { Message(Chat::Red, "Invalid number of characters in new name (%s).", gmn->newname); return; } if (!usedname) { Message(Chat::Red, "%s is already in use. Operation failed!", gmn->newname); return; } database.UpdateName(gmn->oldname, gmn->newname); strcpy(client->name, gmn->newname); client->Save(); if (gmn->badname == 1) { database.AddToNameFilter(gmn->oldname); } EQApplicationPacket* outapp = app->Copy(); GMName_Struct* gmn2 = (GMName_Struct*)outapp->pBuffer; gmn2->unknown[0] = 1; gmn2->unknown[1] = 1; gmn2->unknown[2] = 1; entity_list.QueueClients(this, outapp, false); safe_delete(outapp); UpdateWho(); return; } void Client::Handle_OP_GMSearchCorpse(const EQApplicationPacket *app) { // Could make this into a rule, although there is a hard limit since we are using a popup, of 4096 bytes that can // be displayed in the window, including all the HTML formatting tags. // const int maxResults = 10; if (app->size < sizeof(GMSearchCorpse_Struct)) { LogDebug("OP_GMSearchCorpse size lower than expected: got [{}] expected at least [{}]", app->size, sizeof(GMSearchCorpse_Struct)); DumpPacket(app); return; } GMSearchCorpse_Struct *gmscs = (GMSearchCorpse_Struct *)app->pBuffer; gmscs->Name[63] = '\0'; auto escSearchString = new char[129]; database.DoEscapeString(escSearchString, gmscs->Name, strlen(gmscs->Name)); std::string query = StringFormat("SELECT charname, zone_id, x, y, z, time_of_death, is_rezzed, is_buried " "FROM character_corpses WheRE charname LIKE '%%%s%%' ORDER BY charname LIMIT %i", escSearchString, maxResults); safe_delete_array(escSearchString); auto results = database.QueryDatabase(query); if (!results.Success()) { return; } if (results.RowCount() == 0) return; if (results.RowCount() == maxResults) Message(Chat::Red, "Your search found too many results; some are not displayed."); else Message(Chat::Yellow, "There are %i corpse(s) that match the search string '%s'.", results.RowCount(), gmscs->Name); char charName[64], time_of_death[20]; std::string popupText = "<table><tr><td>Name</td><td>Zone</td><td>X</td><td>Y</td><td>Z</td><td>Date</td><td>" "Rezzed</td><td>Buried</td></tr><tr><td>&nbsp</td><td></td><td></td><td></td><td></td><td>" "</td><td></td><td></td></tr>"; for (auto row = results.begin(); row != results.end(); ++row) { strn0cpy(charName, row[0], sizeof(charName)); uint32 ZoneID = atoi(row[1]); float CorpseX = atof(row[2]); float CorpseY = atof(row[3]); float CorpseZ = atof(row[4]); strn0cpy(time_of_death, row[5], sizeof(time_of_death)); bool corpseRezzed = atoi(row[6]); bool corpseBuried = atoi(row[7]); popupText += StringFormat("<tr><td>%s</td><td>%s</td><td>%8.0f</td><td>%8.0f</td><td>%8.0f</td><td>%s</td><td>%s</td><td>%s</td></tr>", charName, StaticGetZoneName(ZoneID), CorpseX, CorpseY, CorpseZ, time_of_death, corpseRezzed ? "Yes" : "No", corpseBuried ? "Yes" : "No"); if (popupText.size() > 4000) { Message(Chat::Red, "Unable to display all the results."); break; } } popupText += "</table>"; SendPopupToClient("Corpses", popupText.c_str()); } void Client::Handle_OP_GMServers(const EQApplicationPacket *app) { if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { auto pack = new ServerPacket(ServerOP_ZoneStatus, strlen(this->GetName()) + 2); memset(pack->pBuffer, (uint8)admin, 1); strcpy((char *)&pack->pBuffer[1], this->GetName()); worldserver.SendPacket(pack); safe_delete(pack); } return; } void Client::Handle_OP_GMSummon(const EQApplicationPacket *app) { if (app->size != sizeof(GMSummon_Struct)) { std::cout << "Wrong size on OP_GMSummon. Got: " << app->size << ", Expected: " << sizeof(GMSummon_Struct) << std::endl; return; } OPGMSummon(app); return; } void Client::Handle_OP_GMToggle(const EQApplicationPacket *app) { if (app->size != sizeof(GMToggle_Struct)) { std::cout << "Wrong size on OP_GMToggle. Got: " << app->size << ", Expected: " << sizeof(GMToggle_Struct) << std::endl; return; } if (this->Admin() < minStatusToUseGMCommands) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/toggle"); return; } GMToggle_Struct *ts = (GMToggle_Struct *)app->pBuffer; if (ts->toggle == 0) { this->MessageString(Chat::White, TOGGLE_OFF); //Message(0, "Turning tells OFF"); tellsoff = true; } else if (ts->toggle == 1) { //Message(0, "Turning tells ON"); this->MessageString(Chat::White, TOGGLE_ON); tellsoff = false; } else { Message(0, "Unkown value in /toggle packet"); } UpdateWho(); return; } void Client::Handle_OP_GMTraining(const EQApplicationPacket *app) { if (app->size != sizeof(GMTrainee_Struct)) { LogDebug("Size mismatch in OP_GMTraining expected [{}] got [{}]", sizeof(GMTrainee_Struct), app->size); DumpPacket(app); return; } OPGMTraining(app); return; } void Client::Handle_OP_GMTrainSkill(const EQApplicationPacket *app) { if (app->size != sizeof(GMSkillChange_Struct)) { LogDebug("Size mismatch in OP_GMTrainSkill expected [{}] got [{}]", sizeof(GMSkillChange_Struct), app->size); DumpPacket(app); return; } OPGMTrainSkill(app); return; } void Client::Handle_OP_GMZoneRequest(const EQApplicationPacket *app) { if (app->size != sizeof(GMZoneRequest_Struct)) { std::cout << "Wrong size on OP_GMZoneRequest. Got: " << app->size << ", Expected: " << sizeof(GMZoneRequest_Struct) << std::endl; return; } if (this->Admin() < minStatusToBeGM) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/zone"); return; } GMZoneRequest_Struct* gmzr = (GMZoneRequest_Struct*)app->pBuffer; float target_x = -1, target_y = -1, target_z = -1, target_heading; int16 min_status = 0; uint8 min_level = 0; char target_zone[32]; uint16 zone_id = gmzr->zone_id; if (gmzr->zone_id == 0) zone_id = zonesummon_id; const char* zone_short_name = ZoneName(zone_id); if (zone_short_name == nullptr) target_zone[0] = 0; else strcpy(target_zone, zone_short_name); // this both loads the safe points and does a sanity check on zone name if (!content_db.GetSafePoints( target_zone, 0, &target_x, &target_y, &target_z, &target_heading, &min_status, &min_level )) { target_zone[0] = 0; } auto outapp = new EQApplicationPacket(OP_GMZoneRequest, sizeof(GMZoneRequest_Struct)); GMZoneRequest_Struct* gmzr2 = (GMZoneRequest_Struct*)outapp->pBuffer; strcpy(gmzr2->charname, this->GetName()); gmzr2->zone_id = gmzr->zone_id; gmzr2->x = target_x; gmzr2->y = target_y; gmzr2->z = target_z; gmzr2->heading = target_heading; // Next line stolen from ZoneChange as well... - This gives us a nicer message than the normal "zone is down" message... if (target_zone[0] != 0 && admin >= min_status && GetLevel() >= min_level) gmzr2->success = 1; else { std::cout << "GetZoneSafeCoords failed. zoneid = " << gmzr->zone_id << "; czone = " << zone->GetZoneID() << std::endl; gmzr2->success = 0; } QueuePacket(outapp); safe_delete(outapp); return; } void Client::Handle_OP_GMZoneRequest2(const EQApplicationPacket *app) { if (this->Admin() < minStatusToBeGM) { Message(Chat::Red, "Your account has been reported for hacking."); database.SetHackerFlag(this->account_name, this->name, "/zone"); return; } if (app->size < sizeof(uint32)) { LogError("OP size error: OP_GMZoneRequest2 expected:[{}] got:[{}]", sizeof(uint32), app->size); return; } uint32 zonereq = *((uint32 *)app->pBuffer); GoToSafeCoords(zonereq, 0); return; } void Client::Handle_OP_GroupAcknowledge(const EQApplicationPacket *app) { return; } void Client::Handle_OP_GroupCancelInvite(const EQApplicationPacket *app) { if (app->size != sizeof(GroupCancel_Struct)) { LogError("Invalid size for OP_GroupCancelInvite: Expected: [{}], Got: [{}]", sizeof(GroupCancel_Struct), app->size); return; } GroupCancel_Struct* gf = (GroupCancel_Struct*)app->pBuffer; Mob* inviter = entity_list.GetClientByName(gf->name1); if (inviter != nullptr) { if (inviter->IsClient()) inviter->CastToClient()->QueuePacket(app); } else { auto pack = new ServerPacket(ServerOP_GroupCancelInvite, sizeof(GroupCancel_Struct)); memcpy(pack->pBuffer, gf, sizeof(GroupCancel_Struct)); worldserver.SendPacket(pack); safe_delete(pack); } if (!GetMerc()) { database.SetGroupID(GetName(), 0, CharacterID(), false); } return; } void Client::Handle_OP_GroupDelete(const EQApplicationPacket *app) { //should check for leader, only they should be able to do this.. Group* group = GetGroup(); if (group) group->DisbandGroup(); if (LFP) UpdateLFP(); return; } void Client::Handle_OP_GroupDisband(const EQApplicationPacket *app) { if (app->size != sizeof(GroupGeneric_Struct)) { LogError("Invalid size for GroupGeneric_Struct: Expected: [{}], Got: [{}]", sizeof(GroupGeneric_Struct), app->size); return; } LogDebug("Member Disband Request from [{}]\n", GetName()); GroupGeneric_Struct* gd = (GroupGeneric_Struct*)app->pBuffer; Raid *raid = entity_list.GetRaidByClient(this); if (raid) { Mob* memberToDisband = nullptr; if (!raid->IsGroupLeader(GetName())) memberToDisband = this; else memberToDisband = GetTarget(); if (!memberToDisband) memberToDisband = entity_list.GetMob(gd->name2); if (!memberToDisband) memberToDisband = this; if (!memberToDisband->IsClient()) return; //we have a raid.. see if we're in a raid group uint32 grp = raid->GetGroup(memberToDisband->GetName()); bool wasGrpLdr = raid->members[raid->GetPlayerIndex(memberToDisband->GetName())].IsGroupLeader; if (grp < 12) { if (wasGrpLdr) { raid->SetGroupLeader(memberToDisband->GetName(), false); for (int x = 0; x < MAX_RAID_MEMBERS; x++) { if (raid->members[x].GroupNumber == grp) { if (strlen(raid->members[x].membername) > 0 && strcmp(raid->members[x].membername, memberToDisband->GetName()) != 0) { raid->SetGroupLeader(raid->members[x].membername); break; } } } } raid->MoveMember(memberToDisband->GetName(), 0xFFFFFFFF); raid->GroupUpdate(grp); //break //raid->SendRaidGroupRemove(memberToDisband->GetName(), grp); //raid->SendGroupUpdate(memberToDisband->CastToClient()); raid->SendGroupDisband(memberToDisband->CastToClient()); } //we're done return; } Group* group = GetGroup(); if (!group) return; #ifdef BOTS // this block is necessary to allow more control over controlling how bots are zoned or camped. if (Bot::GroupHasBot(group)) { if (group->IsLeader(this)) { if ((GetTarget() == 0 || GetTarget() == this) || (group->GroupCount() < 3)) { Bot::ProcessBotGroupDisband(this, std::string()); } else { Mob* tempMember = entity_list.GetMob(gd->name1); //Name1 is the target you are disbanding if (tempMember && tempMember->IsBot()) { tempMember->CastToBot()->RemoveBotFromGroup(tempMember->CastToBot(), group); if (LFP) { // If we are looking for players, update to show we are on our own now. UpdateLFP(); } return; //No need to continue from here we were removing a bot from party } } } } group = GetGroup(); if (!group) //We must recheck this here.. incase the final bot disbanded the party..otherwise we crash return; #endif Mob* memberToDisband = GetTarget(); if (!memberToDisband) memberToDisband = entity_list.GetMob(gd->name2); if (memberToDisband) { auto group2 = memberToDisband->GetGroup(); if (group2 != group) // they're not in our group! memberToDisband = this; } if (group->GroupCount() < 3) { group->DisbandGroup(); if (GetMerc()) GetMerc()->Suspend(); } else if (group->IsLeader(this) && GetTarget() == nullptr) { if (group->GroupCount() > 2 && GetMerc() && !GetMerc()->IsSuspended()) { group->DisbandGroup(); GetMerc()->MercJoinClientGroup(); } else { group->DisbandGroup(); if (GetMerc()) GetMerc()->Suspend(); } } else if (group->IsLeader(this) && (GetTarget() == this || memberToDisband == this)) { LeaveGroup(); if (GetMerc() && !GetMerc()->IsSuspended()) { GetMerc()->MercJoinClientGroup(); } } else { if (memberToDisband) { if (group->IsLeader(this)) { // the group leader can kick other members out of the group... if (memberToDisband->IsClient()) { group->DelMember(memberToDisband, false); Client* memberClient = memberToDisband->CastToClient(); Merc* memberMerc = memberToDisband->CastToClient()->GetMerc(); if (memberClient && memberMerc) { memberMerc->MercJoinClientGroup(); } } else if (memberToDisband->IsMerc()) { memberToDisband->CastToMerc()->Suspend(); } } else { // ...but other members can only remove themselves group->DelMember(this, false); if (GetMerc() && !GetMerc()->IsSuspended()) { GetMerc()->MercJoinClientGroup(); } } } else { LogError("Failed to remove player from group. Unable to find player named [{}] in player group", gd->name2); } } if (LFP) { // If we are looking for players, update to show we are on our own now. UpdateLFP(); } return; } void Client::Handle_OP_GroupFollow(const EQApplicationPacket *app) { Handle_OP_GroupFollow2(app); } void Client::Handle_OP_GroupFollow2(const EQApplicationPacket *app) { if (app->size != sizeof(GroupGeneric_Struct)) { LogError("Invalid size for OP_GroupFollow: Expected: [{}], Got: [{}]", sizeof(GroupGeneric_Struct), app->size); return; } if (LFP) { // If we were looking for players to start our own group, but we accept an invitation to another // group, turn LFP off. database.SetLFP(CharacterID(), false); worldserver.StopLFP(CharacterID()); } GroupGeneric_Struct* gf = (GroupGeneric_Struct*)app->pBuffer; Mob* inviter = entity_list.GetClientByName(gf->name1); // Inviter and Invitee are in the same zone if (inviter != nullptr && inviter->IsClient()) { if (GroupFollow(inviter->CastToClient())) { strn0cpy(gf->name1, inviter->GetName(), sizeof(gf->name1)); strn0cpy(gf->name2, GetName(), sizeof(gf->name2)); inviter->CastToClient()->QueuePacket(app);//notify inviter the client accepted } } else if (inviter == nullptr) { // Inviter is in another zone - Remove merc from group now if any LeaveGroup(); auto pack = new ServerPacket(ServerOP_GroupFollow, sizeof(ServerGroupFollow_Struct)); ServerGroupFollow_Struct *sgfs = (ServerGroupFollow_Struct *)pack->pBuffer; sgfs->CharacterID = CharacterID(); strn0cpy(sgfs->gf.name1, gf->name1, sizeof(sgfs->gf.name1)); strn0cpy(sgfs->gf.name2, gf->name2, sizeof(sgfs->gf.name2)); worldserver.SendPacket(pack); safe_delete(pack); } } void Client::Handle_OP_GroupInvite(const EQApplicationPacket *app) { //this seems to be the initial invite to form a group Handle_OP_GroupInvite2(app); } void Client::Handle_OP_GroupInvite2(const EQApplicationPacket *app) { if (app->size != sizeof(GroupInvite_Struct)) { LogError("Invalid size for OP_GroupInvite: Expected: [{}], Got: [{}]", sizeof(GroupInvite_Struct), app->size); return; } GroupInvite_Struct* gis = (GroupInvite_Struct*)app->pBuffer; Mob *Invitee = entity_list.GetMob(gis->invitee_name); if (Invitee == this) { MessageString(Chat::LightGray, GROUP_INVITEE_SELF); return; } if (Invitee) { if (Invitee->IsClient()) { if (Invitee->CastToClient()->MercOnlyOrNoGroup() && !Invitee->IsRaidGrouped()) { if (app->GetOpcode() == OP_GroupInvite2) { //Make a new packet using all the same information but make sure it's a fixed GroupInvite opcode so we //Don't have to deal with GroupFollow2 crap. auto outapp = new EQApplicationPacket(OP_GroupInvite, sizeof(GroupInvite_Struct)); memcpy(outapp->pBuffer, app->pBuffer, outapp->size); Invitee->CastToClient()->QueuePacket(outapp); safe_delete(outapp); return; } else { //The correct opcode, no reason to bother wasting time reconstructing the packet Invitee->CastToClient()->QueuePacket(app); } } } #ifdef BOTS else if (Invitee->IsBot()) { Bot::ProcessBotGroupInvite(this, std::string(Invitee->GetName())); } #endif } else { auto pack = new ServerPacket(ServerOP_GroupInvite, sizeof(GroupInvite_Struct)); memcpy(pack->pBuffer, gis, sizeof(GroupInvite_Struct)); worldserver.SendPacket(pack); safe_delete(pack); } return; } void Client::Handle_OP_GroupMakeLeader(const EQApplicationPacket *app) { VERIFY_PACKET_LENGTH(OP_GroupMakeLeader, app, GroupMakeLeader_Struct); GroupMakeLeader_Struct *gmls = (GroupMakeLeader_Struct *)app->pBuffer; Mob* NewLeader = entity_list.GetClientByName(gmls->NewLeader); Group* g = GetGroup(); if (NewLeader && g) { if (g->IsLeader(this)) g->ChangeLeader(NewLeader); else { LogDebug("Group /makeleader request originated from non-leader member: [{}]", GetName()); DumpPacket(app); } } } void Client::Handle_OP_GroupMentor(const EQApplicationPacket *app) { if (app->size != sizeof(GroupMentor_Struct)) { LogError("Wrong size: OP_GroupMentor, size=[{}], expected [{}]", app->size, sizeof(GroupMentor_Struct)); DumpPacket(app); return; } GroupMentor_Struct *gms = (GroupMentor_Struct *)app->pBuffer; gms->name[63] = '\0'; if (IsRaidGrouped()) { Raid *raid = GetRaid(); if (!raid) return; uint32 group_id = raid->GetGroup(this); if (group_id > 11) return; if (strlen(gms->name)) raid->SetGroupMentor(group_id, gms->percent, gms->name); else raid->ClearGroupMentor(group_id); return; } Group *group = GetGroup(); if (!group) return; if (strlen(gms->name)) group->SetGroupMentor(gms->percent, gms->name); else group->ClearGroupMentor(); return; } void Client::Handle_OP_GroupRoles(const EQApplicationPacket *app) { if (app->size != sizeof(GroupRole_Struct)) { LogError("Wrong size: OP_GroupRoles, size=[{}], expected [{}]", app->size, sizeof(GroupRole_Struct)); DumpPacket(app); return; } GroupRole_Struct *grs = (GroupRole_Struct*)app->pBuffer; Group *g = GetGroup(); if (!g) return; switch (grs->RoleNumber) { case 1: //Main Tank { if (grs->Toggle) g->DelegateMainTank(grs->Name1, grs->Toggle); else g->UnDelegateMainTank(grs->Name1, grs->Toggle); break; } case 2: //Main Assist { if (grs->Toggle) g->DelegateMainAssist(grs->Name1, grs->Toggle); else g->UnDelegateMainAssist(grs->Name1, grs->Toggle); break; } case 3: //Puller { if (grs->Toggle) g->DelegatePuller(grs->Name1, grs->Toggle); else g->UnDelegatePuller(grs->Name1, grs->Toggle); break; } default: break; } } void Client::Handle_OP_GroupUpdate(const EQApplicationPacket *app) { if (app->size != sizeof(GroupUpdate_Struct)) { LogDebug("Size mismatch on OP_GroupUpdate: got [{}] expected [{}]", app->size, sizeof(GroupUpdate_Struct)); DumpPacket(app); return; } GroupUpdate_Struct* gu = (GroupUpdate_Struct*)app->pBuffer; switch (gu->action) { case groupActMakeLeader: { Mob* newleader = entity_list.GetClientByName(gu->membername[0]); Group* group = this->GetGroup(); if (newleader && group) { // the client only sends this if it's the group leader, but check anyway if (group->IsLeader(this)) group->ChangeLeader(newleader); else { LogDebug("Group /makeleader request originated from non-leader member: [{}]", GetName()); DumpPacket(app); } } break; } default: { LogDebug("Received unhandled OP_GroupUpdate requesting action [{}]", gu->action); DumpPacket(app); return; } } } void Client::Handle_OP_GuildBank(const EQApplicationPacket *app) { if (!GuildBanks) return; if ((int)zone->GetZoneID() != RuleI(World, GuildBankZoneID)) { Message(Chat::Red, "The Guild Bank is not available in this zone."); return; } if (app->size < sizeof(uint32)) { LogError("Wrong size: OP_GuildBank, size=[{}], expected [{}]", app->size, sizeof(uint32)); DumpPacket(app); return; } char *Buffer = (char *)app->pBuffer; uint32 Action = VARSTRUCT_DECODE_TYPE(uint32, Buffer); uint32 sentAction = Action; if (!IsInAGuild()) { Message(Chat::Red, "You must be in a Guild to use the Guild Bank."); if (Action == GuildBankDeposit) GuildBankDepositAck(true, sentAction); else GuildBankAck(); return; } if (!IsGuildBanker()) { if ((Action != GuildBankDeposit) && (Action != GuildBankViewItem) && (Action != GuildBankWithdraw)) { LogError("Suspected hacking attempt on guild bank from [{}]", GetName()); GuildBankAck(); return; } } switch (Action) { case GuildBankPromote: { if (GuildBanks->IsAreaFull(GuildID(), GuildBankMainArea)) { MessageString(Chat::Red, GUILD_BANK_FULL); GuildBankDepositAck(true, sentAction); return; } GuildBankPromote_Struct *gbps = (GuildBankPromote_Struct*)app->pBuffer; int Slot = GuildBanks->Promote(GuildID(), gbps->Slot); if (Slot >= 0) { EQ::ItemInstance* inst = GuildBanks->GetItem(GuildID(), GuildBankMainArea, Slot, 1); if (inst) { MessageString(Chat::LightGray, GUILD_BANK_TRANSFERRED, inst->GetItem()->Name); safe_delete(inst); } } else Message(Chat::Red, "Unexpected error while moving item into Guild Bank."); GuildBankAck(); break; } case GuildBankViewItem: { GuildBankViewItem_Struct *gbvis = (GuildBankViewItem_Struct*)app->pBuffer; EQ::ItemInstance* inst = GuildBanks->GetItem(GuildID(), gbvis->Area, gbvis->SlotID, 1); if (!inst) break; SendItemPacket(0, inst, ItemPacketViewLink); safe_delete(inst); break; } case GuildBankDeposit: // Deposit Item { if (GuildBanks->IsAreaFull(GuildID(), GuildBankDepositArea)) { MessageString(Chat::Red, GUILD_BANK_FULL); GuildBankDepositAck(true, sentAction); return; } EQ::ItemInstance *CursorItemInst = GetInv().GetItem(EQ::invslot::slotCursor); bool Allowed = true; if (!CursorItemInst) { Message(Chat::Red, "No Item on the cursor."); GuildBankDepositAck(true, sentAction); return; } const EQ::ItemData* CursorItem = CursorItemInst->GetItem(); if (!CursorItem->NoDrop || CursorItemInst->IsAttuned()) { Allowed = false; } else if (CursorItemInst->IsNoneEmptyContainer()) { Allowed = false; } else if (CursorItemInst->IsAugmented()) { Allowed = false; } else if (CursorItem->NoRent == 0) { Allowed = false; } else if (CursorItem->LoreFlag && GuildBanks->HasItem(GuildID(), CursorItem->ID)) { Allowed = false; } if (!Allowed) { MessageString(Chat::Red, GUILD_BANK_CANNOT_DEPOSIT); GuildBankDepositAck(true, sentAction); return; } if (GuildBanks->AddItem(GuildID(), GuildBankDepositArea, CursorItem->ID, CursorItemInst->GetCharges(), GetName(), GuildBankBankerOnly, "")) { GuildBankDepositAck(false, sentAction); DeleteItemInInventory(EQ::invslot::slotCursor, 0, false); } break; } case GuildBankPermissions: { GuildBankPermissions_Struct *gbps = (GuildBankPermissions_Struct*)app->pBuffer; if (gbps->Permissions == 1) GuildBanks->SetPermissions(GuildID(), gbps->SlotID, gbps->Permissions, gbps->MemberName); else GuildBanks->SetPermissions(GuildID(), gbps->SlotID, gbps->Permissions, ""); GuildBankAck(); break; } case GuildBankWithdraw: { if (GetInv()[EQ::invslot::slotCursor]) { MessageString(Chat::Red, GUILD_BANK_EMPTY_HANDS); GuildBankAck(); break; } GuildBankWithdrawItem_Struct *gbwis = (GuildBankWithdrawItem_Struct*)app->pBuffer; EQ::ItemInstance* inst = GuildBanks->GetItem(GuildID(), gbwis->Area, gbwis->SlotID, gbwis->Quantity); if (!inst) { GuildBankAck(); break; } if (!IsGuildBanker() && !GuildBanks->AllowedToWithdraw(GuildID(), gbwis->Area, gbwis->SlotID, GetName())) { LogError("Suspected attempted hack on the guild bank from [{}]", GetName()); GuildBankAck(); safe_delete(inst); break; } if (CheckLoreConflict(inst->GetItem())) { MessageString(Chat::Red, DUP_LORE); GuildBankAck(); safe_delete(inst); break; } if (gbwis->Quantity > 0) { PushItemOnCursor(*inst); SendItemPacket(EQ::invslot::slotCursor, inst, ItemPacketLimbo); GuildBanks->DeleteItem(GuildID(), gbwis->Area, gbwis->SlotID, gbwis->Quantity); } else { Message(0, "Unable to withdraw 0 quantity of %s", inst->GetItem()->Name); } safe_delete(inst); GuildBankAck(); break; } case GuildBankSplitStacks: { if (GuildBanks->IsAreaFull(GuildID(), GuildBankMainArea)) MessageString(Chat::Red, GUILD_BANK_FULL); else { GuildBankWithdrawItem_Struct *gbwis = (GuildBankWithdrawItem_Struct*)app->pBuffer; GuildBanks->SplitStack(GuildID(), gbwis->SlotID, gbwis->Quantity); } GuildBankAck(); break; } case GuildBankMergeStacks: { GuildBankWithdrawItem_Struct *gbwis = (GuildBankWithdrawItem_Struct*)app->pBuffer; GuildBanks->MergeStacks(GuildID(), gbwis->SlotID); GuildBankAck(); break; } default: { Message(Chat::Red, "Unexpected GuildBank action."); LogError("Received unexpected guild bank action code [{}] from [{}]", Action, GetName()); } } } void Client::Handle_OP_GuildCreate(const EQApplicationPacket *app) { if (IsInAGuild()) { Message(Chat::Red, "You are already in a guild!"); return; } if (!RuleB(Guild, PlayerCreationAllowed)) { Message(Chat::Red, "This feature is disabled on this server. Contact a GM or post on your server message boards to create a guild."); return; } if ((Admin() < RuleI(Guild, PlayerCreationRequiredStatus)) || (GetLevel() < RuleI(Guild, PlayerCreationRequiredLevel)) || (database.GetTotalTimeEntitledOnAccount(AccountID()) < (unsigned int)RuleI(Guild, PlayerCreationRequiredTime))) { Message(Chat::Red, "Your status, level or time playing on this account are insufficient to use this feature."); return; } // The Underfoot client Guild Creation window will only allow a guild name of <= around 30 characters, but the packet is 64 bytes. Sanity check the // name anway. // char *GuildName = (char *)app->pBuffer; #ifdef DARWIN #if __DARWIN_C_LEVEL < 200809L if (strlen(GuildName) > 60) #else if (strnlen(GuildName, 64) > 60) #endif // __DARWIN_C_LEVEL #else if (strnlen(GuildName, 64) > 60) #endif // DARWIN { Message(Chat::Red, "Guild name too long."); return; } for (unsigned int i = 0; i < strlen(GuildName); ++i) { if (!isalpha(GuildName[i]) && (GuildName[i] != ' ')) { Message(Chat::Red, "Invalid character in Guild name."); return; } } int32 GuildCount = guild_mgr.DoesAccountContainAGuildLeader(AccountID()); if (GuildCount >= RuleI(Guild, PlayerCreationLimit)) { Message(Chat::Red, "You cannot create this guild because this account may only be leader of %i guilds.", RuleI(Guild, PlayerCreationLimit)); return; } if (guild_mgr.GetGuildIDByName(GuildName) != GUILD_NONE) { MessageString(Chat::Red, GUILD_NAME_IN_USE); return; } uint32 NewGuildID = guild_mgr.CreateGuild(GuildName, CharacterID()); LogGuilds("[{}]: Creating guild [{}] with leader [{}] via UF+ GUI. It was given id [{}]", GetName(), GuildName, CharacterID(), (unsigned long)NewGuildID); if (NewGuildID == GUILD_NONE) Message(Chat::Red, "Guild creation failed."); else { if (!guild_mgr.SetGuild(CharacterID(), NewGuildID, GUILD_LEADER)) Message(Chat::Red, "Unable to set guild leader's guild in the database. Contact a GM."); else { Message(Chat::Yellow, "You are now the leader of %s", GuildName); if (zone->GetZoneID() == RuleI(World, GuildBankZoneID) && GuildBanks) GuildBanks->SendGuildBank(this); SendGuildRanks(); } } } void Client::Handle_OP_GuildDelete(const EQApplicationPacket *app) { LogGuilds("Received OP_GuildDelete"); if (!IsInAGuild() || !guild_mgr.IsGuildLeader(GuildID(), CharacterID())) Message(0, "You are not a guild leader or not in a guild."); else { LogGuilds("Deleting guild [{}] ([{}])", guild_mgr.GetGuildName(GuildID()), GuildID()); if (!guild_mgr.DeleteGuild(GuildID())) Message(0, "Guild delete failed."); else { Message(0, "Guild successfully deleted."); } } } void Client::Handle_OP_GuildDemote(const EQApplicationPacket *app) { LogGuilds("Received OP_GuildDemote"); if (app->size != sizeof(GuildDemoteStruct)) { LogGuilds("Error: app size of [{}] != size of GuildDemoteStruct of [{}]\n", app->size, sizeof(GuildDemoteStruct)); return; } if (!IsInAGuild()) Message(0, "Error: You arent in a guild!"); else if (!guild_mgr.CheckPermission(GuildID(), GuildRank(), GUILD_DEMOTE)) Message(0, "You dont have permission to invite."); else if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { GuildDemoteStruct* demote = (GuildDemoteStruct*)app->pBuffer; CharGuildInfo gci; if (!guild_mgr.GetCharInfo(demote->target, gci)) { Message(0, "Unable to find '%s'", demote->target); return; } if (gci.guild_id != GuildID()) { Message(0, "You aren't in the same guild, what do you think you are doing?"); return; } if (gci.rank < 1) { Message(0, "%s cannot be demoted any further!", demote->target); return; } uint8 rank = gci.rank - 1; LogGuilds("Demoting [{}] ([{}]) from rank [{}] ([{}]) to [{}] ([{}]) in [{}] ([{}])", demote->target, gci.char_id, guild_mgr.GetRankName(GuildID(), gci.rank), gci.rank, guild_mgr.GetRankName(GuildID(), rank), rank, guild_mgr.GetGuildName(GuildID()), GuildID()); if (!guild_mgr.SetGuildRank(gci.char_id, rank)) { Message(Chat::Red, "Error while setting rank %d on '%s'.", rank, demote->target); return; } Message(0, "Successfully demoted %s to rank %d", demote->target, rank); } // SendGuildMembers(GuildID(), true); return; } void Client::Handle_OP_GuildInvite(const EQApplicationPacket *app) { LogGuilds("Received OP_GuildInvite"); if (app->size != sizeof(GuildCommand_Struct)) { std::cout << "Wrong size: OP_GuildInvite, size=" << app->size << ", expected " << sizeof(GuildCommand_Struct) << std::endl; return; } GuildCommand_Struct* gc = (GuildCommand_Struct*)app->pBuffer; if (!IsInAGuild()) Message(0, "Error: You are not in a guild!"); else if (gc->officer > GUILD_MAX_RANK) Message(Chat::Red, "Invalid rank."); else if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { //ok, the invite is also used for changing rank as well. Mob* invitee = entity_list.GetMob(gc->othername); if (!invitee) { Message(Chat::Red, "Prospective guild member %s must be in zone to preform guild operations on them.", gc->othername); return; } if (invitee->IsClient()) { Client* client = invitee->CastToClient(); //ok, figure out what they are trying to do. if (client->GuildID() == GuildID()) { //they are already in this guild, must be a promotion or demotion if (gc->officer < client->GuildRank()) { //demotion if (!guild_mgr.CheckPermission(GuildID(), GuildRank(), GUILD_DEMOTE)) { Message(Chat::Red, "You dont have permission to demote."); return; } //we could send this to the member and prompt them to see if they want to //be demoted (I guess), but I dont see a point in that. LogGuilds("[{}] ([{}]) is demoting [{}] ([{}]) to rank [{}] in guild [{}] ([{}])", GetName(), CharacterID(), client->GetName(), client->CharacterID(), gc->officer, guild_mgr.GetGuildName(GuildID()), GuildID()); if (!guild_mgr.SetGuildRank(client->CharacterID(), gc->officer)) { Message(Chat::Red, "There was an error during the demotion, DB may now be inconsistent."); return; } } else if (gc->officer > client->GuildRank()) { //promotion if (!guild_mgr.CheckPermission(GuildID(), GuildRank(), GUILD_PROMOTE)) { Message(Chat::Red, "You dont have permission to demote."); return; } LogGuilds("[{}] ([{}]) is asking to promote [{}] ([{}]) to rank [{}] in guild [{}] ([{}])", GetName(), CharacterID(), client->GetName(), client->CharacterID(), gc->officer, guild_mgr.GetGuildName(GuildID()), GuildID()); //record the promotion with guild manager so we know its valid when we get the reply guild_mgr.RecordInvite(client->CharacterID(), GuildID(), gc->officer); if (gc->guildeqid == 0) gc->guildeqid = GuildID(); LogGuilds("Sending OP_GuildInvite for promotion to [{}], length [{}]", client->GetName(), app->size); client->QueuePacket(app); } else { Message(Chat::Red, "That member is already that rank."); return; } } else if (!client->IsInAGuild()) { //they are not in this or any other guild, this is an invite // if (client->GetPendingGuildInvitation()) { Message(Chat::Red, "That person is already considering a guild invitation."); return; } if (!guild_mgr.CheckPermission(GuildID(), GuildRank(), GUILD_INVITE)) { Message(Chat::Red, "You dont have permission to invite."); return; } LogGuilds("Inviting [{}] ([{}]) into guild [{}] ([{}])", client->GetName(), client->CharacterID(), guild_mgr.GetGuildName(GuildID()), GuildID()); //record the invite with guild manager so we know its valid when we get the reply guild_mgr.RecordInvite(client->CharacterID(), GuildID(), gc->officer); if (gc->guildeqid == 0) gc->guildeqid = GuildID(); // Convert Membership Level between RoF and previous clients. if (client->ClientVersion() < EQ::versions::ClientVersion::RoF && ClientVersion() >= EQ::versions::ClientVersion::RoF) { gc->officer = 0; } if (client->ClientVersion() >= EQ::versions::ClientVersion::RoF && ClientVersion() < EQ::versions::ClientVersion::RoF) { gc->officer = 8; } LogGuilds("Sending OP_GuildInvite for invite to [{}], length [{}]", client->GetName(), app->size); client->SetPendingGuildInvitation(true); client->QueuePacket(app); } else { //they are in some other guild Message(Chat::Red, "Player is in a guild."); return; } } #ifdef BOTS else if (invitee->IsBot()) { // The guild system is too tightly coupled with the character_data table so we have to avoid using much of the system Bot::ProcessGuildInvite(this, invitee->CastToBot()); return; } #endif } } void Client::Handle_OP_GuildInviteAccept(const EQApplicationPacket *app) { LogGuilds("Received OP_GuildInviteAccept"); SetPendingGuildInvitation(false); if (app->size != sizeof(GuildInviteAccept_Struct)) { std::cout << "Wrong size: OP_GuildInviteAccept, size=" << app->size << ", expected " << sizeof(GuildJoin_Struct) << std::endl; return; } GuildInviteAccept_Struct* gj = (GuildInviteAccept_Struct*)app->pBuffer; uint32 guildrank = gj->response; if (ClientVersion() >= EQ::versions::ClientVersion::RoF) { if (gj->response > 9) { //dont care if the check fails (since we dont know the rank), just want to clear the entry. guild_mgr.VerifyAndClearInvite(CharacterID(), gj->guildeqid, gj->response); worldserver.SendEmoteMessage(gj->inviter, 0, 0, "%s has declined to join the guild.", this->GetName()); return; } } if (gj->response == 5 || gj->response == 4) { //dont care if the check fails (since we dont know the rank), just want to clear the entry. guild_mgr.VerifyAndClearInvite(CharacterID(), gj->guildeqid, gj->response); worldserver.SendEmoteMessage(gj->inviter, 0, 0, "%s has declined to join the guild.", this->GetName()); return; } //uint32 tmpeq = gj->guildeqid; if (IsInAGuild() && gj->response == GuildRank()) Message(0, "Error: You're already in a guild!"); else if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { LogGuilds("Guild Invite Accept: guild [{}], response [{}], inviter [{}], person [{}]", gj->guildeqid, gj->response, gj->inviter, gj->newmember); //ok, the invite is also used for changing rank as well. Mob* inviter = entity_list.GetMob(gj->inviter); if (inviter && inviter->IsClient()) { Client* client = inviter->CastToClient(); // Convert Membership Level between RoF and previous clients. if (client->ClientVersion() < EQ::versions::ClientVersion::RoF && ClientVersion() >= EQ::versions::ClientVersion::RoF) { guildrank = 0; } if (client->ClientVersion() >= EQ::versions::ClientVersion::RoF && ClientVersion() < EQ::versions::ClientVersion::RoF) { guildrank = 8; } } //we dont really care a lot about what this packet means, as long as //it has been authorized with the guild manager if (!guild_mgr.VerifyAndClearInvite(CharacterID(), gj->guildeqid, guildrank)) { worldserver.SendEmoteMessage(gj->inviter, 0, 0, "%s has sent an invalid response to your invite!", GetName()); Message(Chat::Red, "Invalid invite response packet!"); return; } if (gj->guildeqid == GuildID()) { //only need to change rank. LogGuilds("Changing guild rank of [{}] ([{}]) to rank [{}] in guild [{}] ([{}])", GetName(), CharacterID(), gj->response, guild_mgr.GetGuildName(GuildID()), GuildID()); if (!guild_mgr.SetGuildRank(CharacterID(), gj->response)) { Message(Chat::Red, "There was an error during the rank change, DB may now be inconsistent."); return; } } else { LogGuilds("Adding [{}] ([{}]) to guild [{}] ([{}]) at rank [{}]", GetName(), CharacterID(), guild_mgr.GetGuildName(gj->guildeqid), gj->guildeqid, gj->response); //change guild and rank guildrank = gj->response; if (ClientVersion() >= EQ::versions::ClientVersion::RoF) { if (gj->response == 8) { guildrank = 0; } } if (!guild_mgr.SetGuild(CharacterID(), gj->guildeqid, guildrank)) { Message(Chat::Red, "There was an error during the invite, DB may now be inconsistent."); return; } if (zone->GetZoneID() == RuleI(World, GuildBankZoneID) && GuildBanks) GuildBanks->SendGuildBank(this); } } } void Client::Handle_OP_GuildLeader(const EQApplicationPacket *app) { LogGuilds("Received OP_GuildLeader"); if (app->size < 2) { LogGuilds("Invalid length [{}] on OP_GuildLeader", app->size); return; } app->pBuffer[app->size - 1] = 0; GuildMakeLeader* gml = (GuildMakeLeader*)app->pBuffer; if (!IsInAGuild()) Message(0, "Error: You arent in a guild!"); else if (GuildRank() != GUILD_LEADER) Message(0, "Error: You arent the guild leader!"); else if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { //NOTE: we could do cross-zone lookups here... Client* newleader = entity_list.GetClientByName(gml->target); if (newleader) { LogGuilds("Transfering leadership of [{}] ([{}]) to [{}] ([{}])", guild_mgr.GetGuildName(GuildID()), GuildID(), newleader->GetName(), newleader->CharacterID()); if (guild_mgr.SetGuildLeader(GuildID(), newleader->CharacterID())) { Message(0, "Successfully Transfered Leadership to %s.", gml->target); newleader->Message(Chat::Yellow, "%s has transfered the guild leadership into your hands.", GetName()); } else Message(0, "Could not change leadership at this time."); } else Message(0, "Failed to change leader, could not find target."); } // SendGuildMembers(GuildID(), true); return; } void Client::Handle_OP_GuildManageBanker(const EQApplicationPacket *app) { LogGuilds("Got OP_GuildManageBanker of len [{}]", app->size); if (app->size != sizeof(GuildManageBanker_Struct)) { LogGuilds("Error: app size of [{}] != size of OP_GuildManageBanker of [{}]\n", app->size, sizeof(GuildManageBanker_Struct)); return; } GuildManageBanker_Struct* gmb = (GuildManageBanker_Struct*)app->pBuffer; if (!IsInAGuild()) { Message(Chat::Red, "Your not in a guild!"); return; } CharGuildInfo gci; if (!guild_mgr.GetCharInfo(gmb->member, gci)) { Message(0, "Unable to find '%s'", gmb->member); return; } bool IsCurrentlyABanker = guild_mgr.GetBankerFlag(gci.char_id); bool IsCurrentlyAnAlt = guild_mgr.GetAltFlag(gci.char_id); bool NewBankerStatus = gmb->enabled & 0x01; bool NewAltStatus = gmb->enabled & 0x02; if ((IsCurrentlyABanker != NewBankerStatus) && !guild_mgr.IsGuildLeader(GuildID(), CharacterID())) { Message(Chat::Red, "Only the guild leader can assign guild bankers!"); return; } if (IsCurrentlyAnAlt != NewAltStatus) { bool IsAllowed = !strncasecmp(GetName(), gmb->member, strlen(GetName())) || (GuildRank() >= GUILD_OFFICER); if (!IsAllowed) { Message(Chat::Red, "You are not allowed to change the alt status of %s", gmb->member); return; } } if (gci.guild_id != GuildID()) { Message(0, "You aren't in the same guild, what do you think you are doing?"); return; } if (IsCurrentlyABanker != NewBankerStatus) { if (!guild_mgr.SetBankerFlag(gci.char_id, NewBankerStatus)) { Message(Chat::Red, "Error setting guild banker flag."); return; } if (NewBankerStatus) Message(0, "%s has been made a guild banker.", gmb->member); else Message(0, "%s is no longer a guild banker.", gmb->member); } if (IsCurrentlyAnAlt != NewAltStatus) { if (!guild_mgr.SetAltFlag(gci.char_id, NewAltStatus)) { Message(Chat::Red, "Error setting guild alt flag."); return; } if (NewAltStatus) Message(0, "%s has been marked as an alt.", gmb->member); else Message(0, "%s is no longer marked as an alt.", gmb->member); } } void Client::Handle_OP_GuildPeace(const EQApplicationPacket *app) { LogGuilds("Got OP_GuildPeace of len [{}]", app->size); return; } void Client::Handle_OP_GuildPromote(const EQApplicationPacket *app) { LogGuilds("Received OP_GuildPromote"); if (app->size != sizeof(GuildPromoteStruct)) { LogGuilds("Error: app size of [{}] != size of GuildDemoteStruct of [{}]\n", app->size, sizeof(GuildPromoteStruct)); return; } if (!IsInAGuild()) Message(0, "Error: You arent in a guild!"); else if (!guild_mgr.CheckPermission(GuildID(), GuildRank(), GUILD_PROMOTE)) Message(0, "You dont have permission to invite."); else if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { GuildPromoteStruct* promote = (GuildPromoteStruct*)app->pBuffer; CharGuildInfo gci; if (!guild_mgr.GetCharInfo(promote->target, gci)) { Message(0, "Unable to find '%s'", promote->target); return; } if (gci.guild_id != GuildID()) { Message(0, "You aren't in the same guild, what do you think you are doing?"); return; } uint8 rank = gci.rank + 1; if (rank > GUILD_OFFICER) { Message(0, "You cannot promote someone to be guild leader. You must use /guildleader."); return; } LogGuilds("Promoting [{}] ([{}]) from rank [{}] ([{}]) to [{}] ([{}]) in [{}] ([{}])", promote->target, gci.char_id, guild_mgr.GetRankName(GuildID(), gci.rank), gci.rank, guild_mgr.GetRankName(GuildID(), rank), rank, guild_mgr.GetGuildName(GuildID()), GuildID()); if (!guild_mgr.SetGuildRank(gci.char_id, rank)) { Message(Chat::Red, "Error while setting rank %d on '%s'.", rank, promote->target); return; } Message(0, "Successfully promoted %s to rank %d", promote->target, rank); } return; } void Client::Handle_OP_GuildPublicNote(const EQApplicationPacket *app) { LogGuilds("Received OP_GuildPublicNote"); if (app->size < sizeof(GuildUpdate_PublicNote)) { // client calls for a motd on login even if they arent in a guild printf("Error: app size of %i < size of OP_GuildPublicNote of %zu\n", app->size, sizeof(GuildUpdate_PublicNote)); return; } GuildUpdate_PublicNote* gpn = (GuildUpdate_PublicNote*)app->pBuffer; CharGuildInfo gci; if (!guild_mgr.GetCharInfo(gpn->target, gci)) { Message(0, "Unable to find '%s'", gpn->target); return; } if (gci.guild_id != GuildID()) { Message(0, "You aren't in the same guild, what do you think you are doing?"); return; } LogGuilds("Setting public note on [{}] ([{}]) in guild [{}] ([{}]) to: [{}]", gpn->target, gci.char_id, guild_mgr.GetGuildName(GuildID()), GuildID(), gpn->note); if (!guild_mgr.SetPublicNote(gci.char_id, gpn->note)) { Message(Chat::Red, "Failed to set public note on %s", gpn->target); } else { Message(0, "Successfully changed public note on %s", gpn->target); } // SendGuildMembers(GuildID(), true); return; } void Client::Handle_OP_GuildRemove(const EQApplicationPacket *app) { LogGuilds("Received OP_GuildRemove"); if (app->size != sizeof(GuildCommand_Struct)) { std::cout << "Wrong size: OP_GuildRemove, size=" << app->size << ", expected " << sizeof(GuildCommand_Struct) << std::endl; return; } GuildCommand_Struct* gc = (GuildCommand_Struct*)app->pBuffer; if (!IsInAGuild()) Message(0, "Error: You arent in a guild!"); // we can always remove ourself, otherwise, our rank needs remove permissions else if (strcasecmp(gc->othername, GetName()) != 0 && !guild_mgr.CheckPermission(GuildID(), GuildRank(), GUILD_REMOVE)) Message(0, "You dont have permission to remove guild members."); else if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { #ifdef BOTS if (Bot::ProcessGuildRemoval(this, gc->othername)) return; #endif uint32 char_id; Client* client = entity_list.GetClientByName(gc->othername); if (client) { if (!client->IsInGuild(GuildID())) { Message(0, "You aren't in the same guild, what do you think you are doing?"); return; } char_id = client->CharacterID(); LogGuilds("Removing [{}] ([{}]) from guild [{}] ([{}])", client->GetName(), client->CharacterID(), guild_mgr.GetGuildName(GuildID()), GuildID()); } else { CharGuildInfo gci; if (!guild_mgr.GetCharInfo(gc->othername, gci)) { Message(0, "Unable to find '%s'", gc->othername); return; } if (gci.guild_id != GuildID()) { Message(0, "You aren't in the same guild, what do you think you are doing?"); return; } char_id = gci.char_id; LogGuilds("Removing remote/offline [{}] ([{}]) into guild [{}] ([{}])", gci.char_name.c_str(), gci.char_id, guild_mgr.GetGuildName(GuildID()), GuildID()); } if (!guild_mgr.SetGuild(char_id, GUILD_NONE, 0)) { auto outapp = new EQApplicationPacket(OP_GuildManageRemove, sizeof(GuildManageRemove_Struct)); GuildManageRemove_Struct* gm = (GuildManageRemove_Struct*)outapp->pBuffer; gm->guildeqid = GuildID(); strcpy(gm->member, gc->othername); Message(0, "%s successfully removed from your guild.", gc->othername); entity_list.QueueClientsGuild(this, outapp, false, GuildID()); safe_delete(outapp); } else Message(0, "Unable to remove %s from your guild.", gc->othername); } // SendGuildMembers(GuildID(), true); return; } void Client::Handle_OP_GuildStatus(const EQApplicationPacket *app) { if (app->size != sizeof(GuildStatus_Struct)) { LogDebug("Size mismatch in OP_GuildStatus expected [{}] got [{}]", sizeof(GuildStatus_Struct), app->size); DumpPacket(app); return; } GuildStatus_Struct *gss = (GuildStatus_Struct*)app->pBuffer; Client *c = entity_list.GetClientByName(gss->Name); if (!c) { MessageString(Chat::LightGray, TARGET_PLAYER_FOR_GUILD_STATUS); return; } uint32 TargetGuildID = c->GuildID(); if (TargetGuildID == GUILD_NONE) { MessageString(Chat::LightGray, NOT_IN_A_GUILD, c->GetName()); return; } const char *GuildName = guild_mgr.GetGuildName(TargetGuildID); if (!GuildName) return; bool IsLeader = guild_mgr.CheckPermission(TargetGuildID, c->GuildRank(), GUILD_PROMOTE); bool IsOfficer = guild_mgr.CheckPermission(TargetGuildID, c->GuildRank(), GUILD_INVITE); if ((TargetGuildID == GuildID()) && (c != this)) { if (IsLeader) MessageString(Chat::LightGray, LEADER_OF_YOUR_GUILD, c->GetName()); else if (IsOfficer) MessageString(Chat::LightGray, OFFICER_OF_YOUR_GUILD, c->GetName()); else MessageString(Chat::LightGray, MEMBER_OF_YOUR_GUILD, c->GetName()); return; } if (IsLeader) MessageString(Chat::LightGray, LEADER_OF_X_GUILD, c->GetName(), GuildName); else if (IsOfficer) MessageString(Chat::LightGray, OFFICER_OF_X_GUILD, c->GetName(), GuildName); else MessageString(Chat::LightGray, MEMBER_OF_X_GUILD, c->GetName(), GuildName); } void Client::Handle_OP_GuildUpdateURLAndChannel(const EQApplicationPacket *app) { if (app->size != sizeof(GuildUpdateURLAndChannel_Struct)) { LogDebug("Size mismatch in OP_GuildUpdateURLAndChannel expected [{}] got [{}]", sizeof(GuildUpdateURLAndChannel_Struct), app->size); DumpPacket(app); return; } GuildUpdateURLAndChannel_Struct *guuacs = (GuildUpdateURLAndChannel_Struct*)app->pBuffer; if (!IsInAGuild()) return; if (!guild_mgr.IsGuildLeader(GuildID(), CharacterID())) { Message(Chat::Red, "Only the guild leader can change the Channel or URL.!"); return; } if (guuacs->Action == 0) guild_mgr.SetGuildURL(GuildID(), guuacs->Text); else guild_mgr.SetGuildChannel(GuildID(), guuacs->Text); } void Client::Handle_OP_GuildWar(const EQApplicationPacket *app) { LogGuilds("Got OP_GuildWar of len [{}]", app->size); return; } void Client::Handle_OP_Heartbeat(const EQApplicationPacket *app) { return; } void Client::Handle_OP_Hide(const EQApplicationPacket *app) { // newer client respond to OP_CancelSneakHide with OP_Hide with a size of 4 and 0 data if (app->size == 4) { auto data = app->ReadUInt32(0); if (data) LogDebug("Got OP_Hide with unexpected data [{}]", data); return; } if (!HasSkill(EQ::skills::SkillHide) && GetSkill(EQ::skills::SkillHide) == 0) { //Can not be able to train hide but still have it from racial though return; //You cannot hide if you do not have hide } if (!p_timers.Expired(&database, pTimerHide, false)) { Message(Chat::Red, "Ability recovery time not yet met."); return; } int reuse = HideReuseTime - GetSkillReuseTime(EQ::skills::SkillHide); if (reuse < 1) reuse = 1; p_timers.Start(pTimerHide, reuse - 1); float hidechance = ((GetSkill(EQ::skills::SkillHide) / 250.0f) + .25) * 100; float random = zone->random.Real(0, 100); CheckIncreaseSkill(EQ::skills::SkillHide, nullptr, 5); if (random < hidechance) { auto outapp = new EQApplicationPacket(OP_SpawnAppearance, sizeof(SpawnAppearance_Struct)); SpawnAppearance_Struct* sa_out = (SpawnAppearance_Struct*)outapp->pBuffer; sa_out->spawn_id = GetID(); sa_out->type = 0x03; sa_out->parameter = 1; entity_list.QueueClients(this, outapp, true); safe_delete(outapp); if (spellbonuses.ShroudofStealth || aabonuses.ShroudofStealth || itembonuses.ShroudofStealth) { improved_hidden = true; hidden = true; } else hidden = true; tmHidden = Timer::GetCurrentTime(); } if (GetClass() == ROGUE) { auto outapp = new EQApplicationPacket(OP_SimpleMessage, sizeof(SimpleMessage_Struct)); SimpleMessage_Struct *msg = (SimpleMessage_Struct *)outapp->pBuffer; msg->color = 0x010E; Mob *evadetar = GetTarget(); if (!auto_attack && (evadetar && evadetar->CheckAggro(this) && evadetar->IsNPC())) { if (zone->random.Int(0, 260) < (int)GetSkill(EQ::skills::SkillHide)) { msg->string_id = EVADE_SUCCESS; RogueEvade(evadetar); } else { msg->string_id = EVADE_FAIL; } } else { if (hidden) { msg->string_id = HIDE_SUCCESS; } else { msg->string_id = HIDE_FAIL; } } FastQueuePacket(&outapp); } return; } void Client::Handle_OP_HideCorpse(const EQApplicationPacket *app) { // New OPCode for SOD+ as /hidecorpse is handled serverside now. // if (app->size != sizeof(HideCorpse_Struct)) { LogDebug("Size mismatch in OP_HideCorpse expected [{}] got [{}]", sizeof(HideCorpse_Struct), app->size); DumpPacket(app); return; } HideCorpse_Struct *hcs = (HideCorpse_Struct*)app->pBuffer; if (hcs->Action == HideCorpseLooted) return; if ((HideCorpseMode == HideCorpseNone) && (hcs->Action == HideCorpseNone)) return; entity_list.HideCorpses(this, HideCorpseMode, hcs->Action); HideCorpseMode = hcs->Action; } void Client::Handle_OP_Ignore(const EQApplicationPacket *app) { return; } void Client::Handle_OP_Illusion(const EQApplicationPacket *app) { if (app->size != sizeof(Illusion_Struct)) { LogError("Received invalid sized OP_Illusion: got [{}], expected [{}]", app->size, sizeof(Illusion_Struct)); DumpPacket(app); return; } if (!GetGM()) { database.SetMQDetectionFlag(this->AccountName(), this->GetName(), "OP_Illusion sent by non Game Master.", zone->GetShortName()); return; } Illusion_Struct* bnpc = (Illusion_Struct*)app->pBuffer; //these need to be implemented /* texture = bnpc->texture; helmtexture = bnpc->helmtexture; luclinface = bnpc->luclinface; */ race = bnpc->race; size = 0; entity_list.QueueClients(this, app); return; } void Client::Handle_OP_InspectAnswer(const EQApplicationPacket *app) { if (app->size != sizeof(InspectResponse_Struct)) { LogError("Wrong size: OP_InspectAnswer, size=[{}], expected [{}]", app->size, sizeof(InspectResponse_Struct)); return; } //Fills the app sent from client. EQApplicationPacket* outapp = app->Copy(); InspectResponse_Struct* insr = (InspectResponse_Struct*)outapp->pBuffer; Mob* tmp = entity_list.GetMob(insr->TargetID); const EQ::ItemData* item = nullptr; int ornamentationAugtype = RuleI(Character, OrnamentationAugmentType); for (int16 L = EQ::invslot::EQUIPMENT_BEGIN; L <= EQ::invslot::EQUIPMENT_END; L++) { const EQ::ItemInstance* inst = GetInv().GetItem(L); item = inst ? inst->GetItem() : nullptr; if (item) { strcpy(insr->itemnames[L], item->Name); if (inst && inst->GetOrnamentationAug(ornamentationAugtype)) { const EQ::ItemData *aug_item = inst->GetOrnamentationAug(ornamentationAugtype)->GetItem(); insr->itemicons[L] = aug_item->Icon; } else if (inst->GetOrnamentationIcon()) { insr->itemicons[L] = inst->GetOrnamentationIcon(); } else { insr->itemicons[L] = item->Icon; } } else { insr->itemicons[L] = 0xFFFFFFFF; } } InspectMessage_Struct* newmessage = (InspectMessage_Struct*)insr->text; InspectMessage_Struct& playermessage = this->GetInspectMessage(); memcpy(&playermessage, newmessage, sizeof(InspectMessage_Struct)); database.SaveCharacterInspectMessage(this->CharacterID(), &playermessage); if (tmp != 0 && tmp->IsClient()) { tmp->CastToClient()->QueuePacket(outapp); } // Send answer to requester return; } void Client::Handle_OP_InspectMessageUpdate(const EQApplicationPacket *app) { if (app->size != sizeof(InspectMessage_Struct)) { LogError("Wrong size: OP_InspectMessageUpdate, size=[{}], expected [{}]", app->size, sizeof(InspectMessage_Struct)); return; } InspectMessage_Struct* newmessage = (InspectMessage_Struct*)app->pBuffer; InspectMessage_Struct& playermessage = this->GetInspectMessage(); memcpy(&playermessage, newmessage, sizeof(InspectMessage_Struct)); database.SaveCharacterInspectMessage(this->CharacterID(), &playermessage); } void Client::Handle_OP_InspectRequest(const EQApplicationPacket *app) { if (app->size != sizeof(Inspect_Struct)) { LogError("Wrong size: OP_InspectRequest, size=[{}], expected [{}]", app->size, sizeof(Inspect_Struct)); return; } Inspect_Struct* ins = (Inspect_Struct*)app->pBuffer; Mob* tmp = entity_list.GetMob(ins->TargetID); if (tmp != 0 && tmp->IsClient()) { if (tmp->CastToClient()->ClientVersion() < EQ::versions::ClientVersion::SoF) { tmp->CastToClient()->QueuePacket(app); } // Send request to target // Inspecting an SoF or later client will make the server handle the request else { ProcessInspectRequest(tmp->CastToClient(), this); } } #ifdef BOTS if (tmp != 0 && tmp->IsBot()) { Bot::ProcessBotInspectionRequest(tmp->CastToBot(), this); } #endif return; } void Client::Handle_OP_InstillDoubt(const EQApplicationPacket *app) { //packet is empty as of 12/14/04 if (!p_timers.Expired(&database, pTimerInstillDoubt, false)) { Message(Chat::Red, "Ability recovery time not yet met."); return; } p_timers.Start(pTimerInstillDoubt, InstillDoubtReuseTime - 1); InstillDoubt(GetTarget()); return; } void Client::Handle_OP_ItemLinkClick(const EQApplicationPacket *app) { if (app->size != sizeof(ItemViewRequest_Struct)) { LogError("Wrong size on OP_ItemLinkClick. Got: [{}], Expected: [{}]", app->size, sizeof(ItemViewRequest_Struct)); DumpPacket(app); return; } ItemViewRequest_Struct *ivrs = (ItemViewRequest_Struct *)app->pBuffer; // todo: verify ivrs->link_hash based on a rule, in case we don't care about people being able to sniff data // from the item DB const EQ::ItemData *item = database.GetItem(ivrs->item_id); if (!item) { if (ivrs->item_id != SAYLINK_ITEM_ID) { Message(Chat::Red, "Error: The item for the link you have clicked on does not exist!"); return; } // This new scheme will shuttle the ID in the first augment for non-silent links // and the second augment for silent. std::string response = ""; bool silentsaylink = ivrs->augments[1] > 0 ? true : false; int sayid = silentsaylink ? ivrs->augments[1] : ivrs->augments[0]; if (sayid > 0) { std::string query = StringFormat("SELECT `phrase` FROM saylink WHERE `id` = '%i'", sayid); auto results = database.QueryDatabase(query); if (!results.Success()) { Message(Chat::Red, "Error: The saylink (%s) was not found in the database.", response.c_str()); return; } if (results.RowCount() != 1) { Message(Chat::Red, "Error: The saylink (%s) was not found in the database.", response.c_str()); return; } auto row = results.begin(); response = row[0]; } if ((response).size() > 0) { if (!mod_saylink(response, silentsaylink)) { return; } if (GetTarget() && GetTarget()->IsNPC()) { if (silentsaylink) { parse->EventNPC(EVENT_SAY, GetTarget()->CastToNPC(), this, response.c_str(), 0); if (response[0] == '#' && parse->PlayerHasQuestSub(EVENT_COMMAND)) { parse->EventPlayer(EVENT_COMMAND, this, response.c_str(), 0); } #ifdef BOTS else if (response[0] == '^' && parse->PlayerHasQuestSub(EVENT_BOT_COMMAND)) { parse->EventPlayer(EVENT_BOT_COMMAND, this, response.c_str(), 0); } #endif else { parse->EventPlayer(EVENT_SAY, this, response.c_str(), 0); } } else { Message(Chat::LightGray, "You say, '%s'", response.c_str()); ChannelMessageReceived(8, 0, 100, response.c_str()); } return; } else { if (silentsaylink) { if (response[0] == '#' && parse->PlayerHasQuestSub(EVENT_COMMAND)) { parse->EventPlayer(EVENT_COMMAND, this, response.c_str(), 0); } #ifdef BOTS else if (response[0] == '^' && parse->PlayerHasQuestSub(EVENT_BOT_COMMAND)) { parse->EventPlayer(EVENT_BOT_COMMAND, this, response.c_str(), 0); } #endif else { parse->EventPlayer(EVENT_SAY, this, response.c_str(), 0); } } else { Message(Chat::LightGray, "You say, '%s'", response.c_str()); ChannelMessageReceived(8, 0, 100, response.c_str()); } return; } } else { Message(Chat::Red, "Error: Say Link not found or is too long."); return; } } EQ::ItemInstance *inst = database.CreateItem(item, item->MaxCharges, ivrs->augments[0], ivrs->augments[1], ivrs->augments[2], ivrs->augments[3], ivrs->augments[4], ivrs->augments[5]); if (inst) { SendItemPacket(0, inst, ItemPacketViewLink); safe_delete(inst); } return; } void Client::Handle_OP_ItemLinkResponse(const EQApplicationPacket *app) { if (app->size != sizeof(LDONItemViewRequest_Struct)) { LogError("OP size error: OP_ItemLinkResponse expected:[{}] got:[{}]", sizeof(LDONItemViewRequest_Struct), app->size); return; } LDONItemViewRequest_Struct* item = (LDONItemViewRequest_Struct*)app->pBuffer; EQ::ItemInstance* inst = database.CreateItem(item->item_id); if (inst) { SendItemPacket(0, inst, ItemPacketViewLink); safe_delete(inst); } return; } void Client::Handle_OP_ItemName(const EQApplicationPacket *app) { if (app->size != sizeof(ItemNamePacket_Struct)) { LogError("Invalid size for ItemNamePacket_Struct: Expected: [{}], Got: [{}]", sizeof(ItemNamePacket_Struct), app->size); return; } ItemNamePacket_Struct *p = (ItemNamePacket_Struct*)app->pBuffer; const EQ::ItemData *item = nullptr; if ((item = database.GetItem(p->item_id)) != nullptr) { auto outapp = new EQApplicationPacket(OP_ItemName, sizeof(ItemNamePacket_Struct)); p = (ItemNamePacket_Struct*)outapp->pBuffer; memset(p, 0, sizeof(ItemNamePacket_Struct)); strcpy(p->name, item->Name); FastQueuePacket(&outapp); } return; } void Client::Handle_OP_ItemPreview(const EQApplicationPacket *app) { VERIFY_PACKET_LENGTH(OP_ItemPreview, app, ItemPreview_Struct); ItemPreview_Struct *ips = (ItemPreview_Struct *)app->pBuffer; const EQ::ItemData* item = database.GetItem(ips->itemid); if (item) { auto outapp = new EQApplicationPacket(OP_ItemPreview, strlen(item->Name) + strlen(item->Lore) + strlen(item->IDFile) + 898); int spacer; for (spacer = 0; spacer < 16; spacer++) { outapp->WriteUInt8(48); } outapp->WriteUInt16(256); for (spacer = 0; spacer < 7; spacer++) { outapp->WriteUInt8(0); } for (spacer = 0; spacer < 7; spacer++) { outapp->WriteUInt8(255); } outapp->WriteUInt32(0); outapp->WriteUInt32(1); outapp->WriteUInt32(0); outapp->WriteUInt8(237); // Seems to be some kind of counter? increases by 1 for each preview that you do. outapp->WriteUInt16(2041); //F907 for (spacer = 0; spacer < 36; spacer++) { outapp->WriteUInt8(0); } for (spacer = 0; spacer < 4; spacer++) { outapp->WriteUInt8(255); } for (spacer = 0; spacer < 9; spacer++) { outapp->WriteUInt8(0); } for (spacer = 0; spacer < 5; spacer++) { outapp->WriteUInt8(255); } for (spacer = 0; spacer < 5; spacer++) { outapp->WriteUInt8(0); } outapp->WriteString(item->Name); outapp->WriteString(item->Lore); outapp->WriteUInt8(0); outapp->WriteUInt32(ips->itemid); outapp->WriteUInt32(item->Weight); outapp->WriteUInt8(item->NoRent); outapp->WriteUInt8(item->NoDrop); outapp->WriteUInt8(item->Attuneable); outapp->WriteUInt8(item->Size); outapp->WriteUInt32(item->Slots); outapp->WriteUInt32(item->Price); outapp->WriteUInt32(item->Icon); outapp->WriteUInt8(0); //Unknown? outapp->WriteUInt8(0); //Placeable flag? outapp->WriteUInt32(item->BenefitFlag); outapp->WriteUInt8(item->Tradeskills); outapp->WriteUInt8(item->CR); outapp->WriteUInt8(item->DR); outapp->WriteUInt8(item->PR); outapp->WriteUInt8(item->MR); outapp->WriteUInt8(item->FR); outapp->WriteUInt8(item->AStr); outapp->WriteUInt8(item->ASta); outapp->WriteUInt8(item->AAgi); outapp->WriteUInt8(item->ADex); outapp->WriteUInt8(item->ACha); outapp->WriteUInt8(item->AInt); outapp->WriteUInt8(item->AWis); outapp->WriteSInt32(item->HP); outapp->WriteSInt32(item->Mana); outapp->WriteSInt32(item->Endur); outapp->WriteSInt32(item->AC); outapp->WriteUInt32(item->Regen); outapp->WriteUInt32(item->ManaRegen); outapp->WriteSInt32(item->EnduranceRegen); outapp->WriteUInt32(item->Classes); outapp->WriteUInt32(item->Races); outapp->WriteUInt32(item->Deity); outapp->WriteUInt32(item->SkillModValue); outapp->WriteUInt32(0); //SkillModValue outapp->WriteUInt32(item->SkillModType); outapp->WriteUInt32(0); //SkillModExtra outapp->WriteUInt32(item->BaneDmgRace); outapp->WriteUInt32(item->BaneDmgBody); outapp->WriteUInt32(item->BaneDmgRaceAmt); outapp->WriteUInt32(item->BaneDmgAmt); outapp->WriteUInt8(item->Magic); outapp->WriteUInt32(item->CastTime_); outapp->WriteUInt32(item->ReqLevel); outapp->WriteUInt32(item->RecLevel); outapp->WriteUInt32(item->RecSkill); outapp->WriteUInt32(item->BardType); outapp->WriteUInt32(item->BardValue); outapp->WriteUInt8(item->Light); outapp->WriteUInt8(item->Delay); outapp->WriteUInt8(item->ElemDmgType); outapp->WriteUInt8(item->ElemDmgAmt); outapp->WriteUInt8(item->Range); outapp->WriteUInt32(item->Damage); outapp->WriteUInt32(item->Color); outapp->WriteUInt32(0); // Prestige outapp->WriteUInt8(item->ItemType); outapp->WriteUInt32(item->Material); outapp->WriteUInt32(0); //unknown outapp->WriteUInt32(item->EliteMaterial); outapp->WriteUInt32(item->HerosForgeModel); outapp->WriteUInt32(0); // unknown outapp->WriteUInt32(0); //This is unknown057 from lucy for (spacer = 0; spacer < 77; spacer++) { //More Item stats, but some seem to be off based on packet check outapp->WriteUInt8(0); } outapp->WriteUInt32(0xFFFFFFFF); //Unknown but always seen as FF FF FF FF outapp->WriteUInt32(0); //Unknown for (spacer = 0; spacer < 6; spacer++) { //Augment stuff outapp->WriteUInt32(item->AugSlotType[spacer]); outapp->WriteUInt8(item->AugSlotVisible[spacer]); outapp->WriteUInt8(item->AugSlotUnk2[spacer]); } outapp->WriteUInt32(0); //New RoF 6th Aug Slot outapp->WriteUInt8(1); //^ outapp->WriteUInt8(0); //^^ outapp->WriteUInt32(item->LDoNSold); outapp->WriteUInt32(item->LDoNTheme); outapp->WriteUInt32(item->LDoNPrice); outapp->WriteUInt32(item->LDoNSellBackRate); for (spacer = 0; spacer < 11; spacer++) { //unknowns outapp->WriteUInt8(0); } outapp->WriteUInt32(0xFFFFFFFF); //Unknown but always seen as FF FF FF FF outapp->WriteUInt16(0); //Unknown outapp->WriteUInt32(item->Favor); // Tribute for (spacer = 0; spacer < 17; spacer++) { //unknowns outapp->WriteUInt8(0); } outapp->WriteUInt32(item->GuildFavor); // Tribute outapp->WriteUInt32(0); //Unknown outapp->WriteUInt32(0xFFFFFFFF); //Unknown but always seen as FF FF FF FF for (spacer = 0; spacer < 11; spacer++) { //unknowns outapp->WriteUInt8(0); } outapp->WriteUInt8(1); for (spacer = 0; spacer < 25; spacer++) { //unknowns outapp->WriteUInt8(0); } for (spacer = 0; spacer < 304; spacer++) { //Cast stuff and whole bunch of unknowns outapp->WriteUInt8(0); } outapp->WriteUInt8(142); // Always seen not in the item structure though 8E outapp->WriteUInt32(0); //unknown outapp->WriteUInt32(1); // Always seen as 1 outapp->WriteUInt32(0); //unknown outapp->WriteUInt32(0xCDCCCC3D); // Unknown outapp->WriteUInt32(0); outapp->WriteUInt16(8256); //0x4020/8256 outapp->WriteUInt16(0); outapp->WriteUInt32(0xFFFFFFFF); //Unknown but always seen as FF FF FF FF outapp->WriteUInt16(0); outapp->WriteUInt32(0xFFFFFFFF); //Unknown but always seen as FF FF FF FF outapp->WriteUInt32(0); //unknown outapp->WriteUInt32(0); //unknown outapp->WriteUInt16(0); //unknown outapp->WriteUInt32(32831); //0x3F80 for (spacer = 0; spacer < 24; spacer++) { //whole bunch of unknowns always 0's outapp->WriteUInt8(0); } outapp->WriteUInt8(1); for (spacer = 0; spacer < 6; spacer++) { //whole bunch of unknowns always 0's outapp->WriteUInt8(0); } QueuePacket(outapp); safe_delete(outapp); } else return; } void Client::Handle_OP_ItemVerifyRequest(const EQApplicationPacket *app) { using EQ::spells::CastingSlot; if (app->size != sizeof(ItemVerifyRequest_Struct)) { LogError("OP size error: OP_ItemVerifyRequest expected:[{}] got:[{}]", sizeof(ItemVerifyRequest_Struct), app->size); return; } ItemVerifyRequest_Struct* request = (ItemVerifyRequest_Struct*)app->pBuffer; int32 slot_id; int32 target_id; int32 spell_id = 0; slot_id = request->slot; target_id = request->target; cheat_manager.ProcessItemVerifyRequest(request->slot, request->target); EQApplicationPacket *outapp = nullptr; outapp = new EQApplicationPacket(OP_ItemVerifyReply, sizeof(ItemVerifyReply_Struct)); ItemVerifyReply_Struct* reply = (ItemVerifyReply_Struct*)outapp->pBuffer; reply->slot = slot_id; reply->target = target_id; QueuePacket(outapp); safe_delete(outapp); if (IsAIControlled()) { this->MessageString(Chat::Red, NOT_IN_CONTROL); return; } if (slot_id < 0) { LogDebug("Unknown slot being used by [{}], slot being used is: [{}]", GetName(), request->slot); return; } const EQ::ItemInstance* inst = m_inv[slot_id]; if (!inst) { Message(0, "Error: item not found in inventory slot #%i", slot_id); DeleteItemInInventory(slot_id, 0, true); return; } const EQ::ItemData* item = inst->GetItem(); if (!item) { Message(0, "Error: item not found in inventory slot #%i", slot_id); DeleteItemInInventory(slot_id, 0, true); return; } spell_id = item->Click.Effect; if ( spell_id > 0 && ( !IsValidSpell(spell_id) || casting_spell_id || delaytimer || spellend_timer.Enabled() || IsStunned() || IsFeared() || IsMezzed() || DivineAura() || (spells[spell_id].targettype == ST_Ring) || (IsSilenced() && !IsDiscipline(spell_id)) || (IsAmnesiad() && IsDiscipline(spell_id)) || (IsDetrimentalSpell(spell_id) && !zone->CanDoCombat()) || (inst->IsScaling() && inst->GetExp() <= 0) // charms don't have spells when less than 0 ) ) { SendSpellBarEnable(spell_id); return; } // Modern clients don't require pet targeted for item clicks that are ST_Pet if (spell_id > 0 && (spells[spell_id].targettype == ST_Pet || spells[spell_id].targettype == ST_SummonedPet)) target_id = GetPetID(); LogDebug("OP ItemVerifyRequest: spell=[{}], target=[{}], inv=[{}]", spell_id, target_id, slot_id); if (m_inv.SupportsClickCasting(slot_id) || ((item->ItemType == EQ::item::ItemTypePotion || item->PotionBelt) && m_inv.SupportsPotionBeltCasting(slot_id))) // sanity check { EQ::ItemInstance* p_inst = (EQ::ItemInstance*)inst; parse->EventItem(EVENT_ITEM_CLICK, this, p_inst, nullptr, "", slot_id); inst = m_inv[slot_id]; if (!inst) { return; } int r; bool tryaug = false; EQ::ItemInstance* clickaug = nullptr; EQ::ItemData* augitem = nullptr; for (r = EQ::invaug::SOCKET_BEGIN; r <= EQ::invaug::SOCKET_END; r++) { const EQ::ItemInstance* aug_i = inst->GetAugment(r); if (!aug_i) continue; const EQ::ItemData* aug = aug_i->GetItem(); if (!aug) continue; if ((aug->Click.Type == EQ::item::ItemEffectClick) || (aug->Click.Type == EQ::item::ItemEffectExpendable) || (aug->Click.Type == EQ::item::ItemEffectEquipClick) || (aug->Click.Type == EQ::item::ItemEffectClick2)) { tryaug = true; clickaug = (EQ::ItemInstance*)aug_i; augitem = (EQ::ItemData*)aug; spell_id = aug->Click.Effect; break; } } if ((spell_id <= 0) && (item->ItemType != EQ::item::ItemTypeFood && item->ItemType != EQ::item::ItemTypeDrink && item->ItemType != EQ::item::ItemTypeAlcohol && item->ItemType != EQ::item::ItemTypeSpell)) { LogDebug("Item with no effect right clicked by [{}]", GetName()); } else if (inst->IsClassCommon()) { if (!RuleB(Skills, RequireTomeHandin) && item->ItemType == EQ::item::ItemTypeSpell && (strstr((const char*)item->Name, "Tome of ") || strstr((const char*)item->Name, "Skill: "))) { DeleteItemInInventory(slot_id, 1, true); TrainDiscipline(item->ID); } else if (item->ItemType == EQ::item::ItemTypeSpell) { if (RuleB(Spells, AllowSpellMemorizeFromItem)) { DeleteItemInInventory(slot_id, 1, true); MemorizeSpellFromItem(item->ID); } else { return; } } else if ((item->Click.Type == EQ::item::ItemEffectClick) || (item->Click.Type == EQ::item::ItemEffectExpendable) || (item->Click.Type == EQ::item::ItemEffectEquipClick) || (item->Click.Type == EQ::item::ItemEffectClick2)) { if (inst->GetCharges() == 0) { //Message(0, "This item is out of charges."); MessageString(Chat::Red, ITEM_OUT_OF_CHARGES); return; } if (GetLevel() >= item->Click.Level2) { int i = parse->EventItem(EVENT_ITEM_CLICK_CAST, this, p_inst, nullptr, "", slot_id); inst = m_inv[slot_id]; if (!inst) { return; } if (i == 0) { if (!IsCastWhileInvis(item->Click.Effect)) CommonBreakInvisible(); // client can't do this for us :( CastSpell(item->Click.Effect, target_id, CastingSlot::Item, item->CastTime, 0, 0, slot_id); } } else { MessageString(Chat::Red, ITEMS_INSUFFICIENT_LEVEL); return; } } else if (tryaug) { if (clickaug->GetCharges() == 0) { //Message(0, "This item is out of charges."); MessageString(Chat::Red, ITEM_OUT_OF_CHARGES); return; } if (GetLevel() >= augitem->Click.Level2) { int i = parse->EventItem(EVENT_ITEM_CLICK_CAST, this, clickaug, nullptr, "", slot_id); inst = m_inv[slot_id]; if (!inst) { return; } if (i == 0) { if (!IsCastWhileInvis(augitem->Click.Effect)) CommonBreakInvisible(); // client can't do this for us :( CastSpell(augitem->Click.Effect, target_id, CastingSlot::Item, augitem->CastTime, 0, 0, slot_id); } } else { MessageString(Chat::Red, ITEMS_INSUFFICIENT_LEVEL); return; } } else { if (ClientVersion() >= EQ::versions::ClientVersion::SoD && !inst->IsEquipable(GetBaseRace(), GetClass())) { if (item->ItemType != EQ::item::ItemTypeFood && item->ItemType != EQ::item::ItemTypeDrink && item->ItemType != EQ::item::ItemTypeAlcohol) { LogDebug("Error: unknown item->Click.Type ([{}])", item->Click.Type); } else { /* //This is food/drink - consume it if (item->ItemType == EQ::item::ItemTypeFood && m_pp.hunger_level < 5000) { Consume(item, item->ItemType, slot_id, false); } else if (item->ItemType == EQ::item::ItemTypeDrink && m_pp.thirst_level < 5000) { Consume(item, item->ItemType, slot_id, false); } else if (item->ItemType == EQ::item::ItemTypeAlcohol) { #if EQDEBUG >= 1 LogDebug("Drinking Alcohol from slot:[{}]", slot_id); #endif // This Seems to be handled in OP_DeleteItem handling //DeleteItemInInventory(slot_id, 1, false); //entity_list.MessageCloseString(this, true, 50, 0, DRINKING_MESSAGE, GetName(), item->Name); //Should add intoxication level to the PP at some point //CheckIncreaseSkill(ALCOHOL_TOLERANCE, nullptr, 25); } EQApplicationPacket *outapp2 = nullptr; outapp2 = new EQApplicationPacket(OP_Stamina, sizeof(Stamina_Struct)); Stamina_Struct* sta = (Stamina_Struct*)outapp2->pBuffer; if (m_pp.hunger_level > 6000) sta->food = 6000; if (m_pp.thirst_level > 6000) sta->water = 6000; sta->food = m_pp.hunger_level; sta->water = m_pp.thirst_level; QueuePacket(outapp2); safe_delete(outapp2); */ } } else { LogDebug("Error: unknown item->Click.Type ([{}])", item->Click.Type); } } } else { Message(0, "Error: item not found in inventory slot #%i", slot_id); } } else { Message(0, "Error: Invalid inventory slot for using effects (inventory slot #%i)", slot_id); } return; } void Client::Handle_OP_Jump(const EQApplicationPacket *app) { SetEndurance(GetEndurance() - (GetLevel()<20 ? (225 * GetLevel() / 100) : 50)); return; } void Client::Handle_OP_KeyRing(const EQApplicationPacket *app) { KeyRingList(); } void Client::Handle_OP_KickPlayers(const EQApplicationPacket *app) { auto buf = reinterpret_cast<KickPlayers_Struct*>(app->pBuffer); if (buf->kick_expedition) { auto expedition = GetExpedition(); if (expedition) { expedition->DzKickPlayers(this); } } else if (buf->kick_task && GetTaskState() && GetTaskState()->HasActiveSharedTask()) { GetTaskState()->KickPlayersSharedTask(this); } } void Client::Handle_OP_LDoNButton(const EQApplicationPacket *app) { if (app->size < sizeof(bool)) { return; } if (GetPendingAdventureCreate()) { return; } if (IsOnAdventure()) { return; } bool* p = (bool*)app->pBuffer; if (*p == true) { auto pack = new ServerPacket(ServerOP_AdventureRequestCreate, sizeof(ServerAdventureRequestCreate_Struct) + (64 * adv_requested_member_count)); ServerAdventureRequestCreate_Struct *sac = (ServerAdventureRequestCreate_Struct*)pack->pBuffer; strcpy(sac->leader, GetName()); sac->id = adv_requested_id; sac->theme = adv_requested_theme; sac->member_count = adv_requested_member_count; memcpy((pack->pBuffer + sizeof(ServerAdventureRequestCreate_Struct)), adv_requested_data, (64 * adv_requested_member_count)); worldserver.SendPacket(pack); delete pack; PendingAdventureCreate(); ClearPendingAdventureData(); } else { ClearPendingAdventureData(); } } void Client::Handle_OP_LDoNDisarmTraps(const EQApplicationPacket *app) { Mob * target = GetTarget(); if (target->IsNPC()) { if (HasSkill(EQ::skills::SkillDisarmTraps)) { if (DistanceSquaredNoZ(m_Position, target->GetPosition()) > RuleI(Adventure, LDoNTrapDistanceUse)) { Message(Chat::Red, "%s is too far away.", target->GetCleanName()); return; } HandleLDoNDisarm(target->CastToNPC(), GetSkill(EQ::skills::SkillDisarmTraps), LDoNTypeMechanical); } else Message(Chat::Red, "You do not have the disarm trap skill."); } } void Client::Handle_OP_LDoNInspect(const EQApplicationPacket *app) { Mob * target = GetTarget(); if (target && target->GetClass() == LDON_TREASURE) Message(Chat::Yellow, "%s", target->GetCleanName()); } void Client::Handle_OP_LDoNOpen(const EQApplicationPacket *app) { Mob * target = GetTarget(); if (target && target->IsNPC()) HandleLDoNOpen(target->CastToNPC()); } void Client::Handle_OP_LDoNPickLock(const EQApplicationPacket *app) { Mob * target = GetTarget(); if (target->IsNPC()) { if (HasSkill(EQ::skills::SkillPickLock)) { if (DistanceSquaredNoZ(m_Position, target->GetPosition()) > RuleI(Adventure, LDoNTrapDistanceUse)) { Message(Chat::Red, "%s is too far away.", target->GetCleanName()); return; } HandleLDoNPickLock(target->CastToNPC(), GetSkill(EQ::skills::SkillPickLock), LDoNTypeMechanical); } else Message(Chat::Red, "You do not have the pick locks skill."); } } void Client::Handle_OP_LDoNSenseTraps(const EQApplicationPacket *app) { Mob * target = GetTarget(); if (target->IsNPC()) { if (HasSkill(EQ::skills::SkillSenseTraps)) { if (DistanceSquaredNoZ(m_Position, target->GetPosition()) > RuleI(Adventure, LDoNTrapDistanceUse)) { Message(Chat::Red, "%s is too far away.", target->GetCleanName()); return; } HandleLDoNSenseTraps(target->CastToNPC(), GetSkill(EQ::skills::SkillSenseTraps), LDoNTypeMechanical); } else Message(Chat::Red, "You do not have the sense traps skill."); } } void Client::Handle_OP_LeadershipExpToggle(const EQApplicationPacket *app) { if (app->size != 1) { LogDebug("Size mismatch in OP_LeadershipExpToggle expected [{}] got [{}]", 1, app->size); DumpPacket(app); return; } uint8 *mode = (uint8 *)app->pBuffer; if (*mode) { m_pp.leadAAActive = 1; Save(); MessageString(Chat::Yellow, LEADERSHIP_EXP_ON); } else { m_pp.leadAAActive = 0; Save(); MessageString(Chat::Yellow, LEADERSHIP_EXP_OFF); } } void Client::Handle_OP_LeaveAdventure(const EQApplicationPacket *app) { if (!IsOnAdventure()) { return; } LeaveAdventure(); } void Client::Handle_OP_LeaveBoat(const EQApplicationPacket *app) { Mob* boat = entity_list.GetMob(this->controlling_boat_id); // find the mob corresponding to the boat id if (boat) { if ((boat->GetTarget() == this) && boat->GetHateAmount(this) == 0) { // if the client somehow left while still controlling the boat (and the boat isn't attacking them) boat->SetTarget(nullptr); // fix it to stop later problems } } this->controlling_boat_id = 0; return; } void Client::Handle_OP_LFGCommand(const EQApplicationPacket *app) { if (app->size != sizeof(LFG_Struct)) { std::cout << "Wrong size on OP_LFGCommand. Got: " << app->size << ", Expected: " << sizeof(LFG_Struct) << std::endl; DumpPacket(app); return; } // Process incoming packet LFG_Struct* lfg = (LFG_Struct*)app->pBuffer; switch (lfg->value & 0xFF) { case 0: if (LFG) { database.SetLFG(CharacterID(), false); LFG = false; LFGComments[0] = '\0'; } break; case 1: if (!LFG) { LFG = true; database.SetLFG(CharacterID(), true); } LFGFromLevel = lfg->FromLevel; LFGToLevel = lfg->ToLevel; LFGMatchFilter = lfg->MatchFilter; strn0cpy(LFGComments, lfg->Comments, sizeof(LFGComments)); break; default: Message(0, "Error: unknown LFG value %i", lfg->value); } UpdateWho(); // Issue outgoing packet to notify other clients auto outapp = new EQApplicationPacket(OP_LFGAppearance, sizeof(LFG_Appearance_Struct)); LFG_Appearance_Struct* lfga = (LFG_Appearance_Struct*)outapp->pBuffer; lfga->spawn_id = this->GetID(); lfga->lfg = (uint8)LFG; entity_list.QueueClients(this, outapp, true); safe_delete(outapp); return; } void Client::Handle_OP_LFGGetMatchesRequest(const EQApplicationPacket *app) { if (app->size != sizeof(LFGGetMatchesRequest_Struct)) { LogError("Wrong size: OP_LFGGetMatchesRequest, size=[{}], expected [{}]", app->size, sizeof(LFGGetMatchesRequest_Struct)); DumpPacket(app); return; } LFGGetMatchesRequest_Struct* gmrs = (LFGGetMatchesRequest_Struct*)app->pBuffer; if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { auto pack = new ServerPacket(ServerOP_LFGMatches, sizeof(ServerLFGMatchesRequest_Struct)); ServerLFGMatchesRequest_Struct* smrs = (ServerLFGMatchesRequest_Struct*)pack->pBuffer; smrs->FromID = GetID(); smrs->QuerierLevel = GetLevel(); strcpy(smrs->FromName, GetName()); smrs->FromLevel = gmrs->FromLevel; smrs->ToLevel = gmrs->ToLevel; smrs->Classes = gmrs->Classes; worldserver.SendPacket(pack); safe_delete(pack); } } void Client::Handle_OP_LFGuild(const EQApplicationPacket *app) { if (app->size < 4) return; uint32 Command = *((uint32 *)app->pBuffer); switch (Command) { case 0: { VERIFY_PACKET_LENGTH(OP_LFGuild, app, LFGuild_PlayerToggle_Struct); LFGuild_PlayerToggle_Struct *pts = (LFGuild_PlayerToggle_Struct *)app->pBuffer; #ifdef DARWIN #if __DARWIN_C_LEVEL < 200809L if (strlen(pts->Comment) > 256) #else if (strnlen(pts->Comment, 256) > 256) #endif // __DARWIN_C_LEVEL #else if (strnlen(pts->Comment, 256) > 256) #endif // DARWIN return; auto pack = new ServerPacket(ServerOP_QueryServGeneric, strlen(GetName()) + strlen(pts->Comment) + 38); pack->WriteUInt32(zone->GetZoneID()); pack->WriteUInt32(zone->GetInstanceID()); pack->WriteString(GetName()); pack->WriteUInt32(QSG_LFGuild); pack->WriteUInt32(QSG_LFGuild_UpdatePlayerInfo); pack->WriteUInt32(GetBaseClass()); pack->WriteUInt32(GetLevel()); pack->WriteUInt32(GetSpentAA()); pack->WriteString(pts->Comment); pack->WriteUInt32(pts->Toggle); pack->WriteUInt32(pts->TimeZone); worldserver.SendPacket(pack); safe_delete(pack); break; } case 1: { VERIFY_PACKET_LENGTH(OP_LFGuild, app, LFGuild_GuildToggle_Struct); LFGuild_GuildToggle_Struct *gts = (LFGuild_GuildToggle_Struct *)app->pBuffer; #ifdef DARWIN #if __DARWIN_C_LEVEL < 200809L if (strlen(gts->Comment) > 256) #else if (strnlen(gts->Comment, 256) > 256) #endif // __DARWIN_C_LEVEL #else if (strnlen(gts->Comment, 256) > 256) #endif // __DARWIN return; auto pack = new ServerPacket(ServerOP_QueryServGeneric, strlen(GetName()) + strlen(gts->Comment) + strlen(guild_mgr.GetGuildName(GuildID())) + 43); pack->WriteUInt32(zone->GetZoneID()); pack->WriteUInt32(zone->GetInstanceID()); pack->WriteString(GetName()); pack->WriteUInt32(QSG_LFGuild); pack->WriteUInt32(QSG_LFGuild_UpdateGuildInfo); pack->WriteString(guild_mgr.GetGuildName(GuildID())); pack->WriteString(gts->Comment); pack->WriteUInt32(gts->FromLevel); pack->WriteUInt32(gts->ToLevel); pack->WriteUInt32(gts->Classes); pack->WriteUInt32(gts->AACount); pack->WriteUInt32(gts->Toggle); pack->WriteUInt32(gts->TimeZone); worldserver.SendPacket(pack); safe_delete(pack); break; } case 3: { VERIFY_PACKET_LENGTH(OP_LFGuild, app, LFGuild_SearchPlayer_Struct); auto pack = new ServerPacket(ServerOP_QueryServGeneric, strlen(GetName()) + 37); pack->WriteUInt32(zone->GetZoneID()); pack->WriteUInt32(zone->GetInstanceID()); pack->WriteString(GetName()); pack->WriteUInt32(QSG_LFGuild); pack->WriteUInt32(QSG_LFGuild_PlayerMatches); LFGuild_SearchPlayer_Struct *sps = (LFGuild_SearchPlayer_Struct *)app->pBuffer; pack->WriteUInt32(sps->FromLevel); pack->WriteUInt32(sps->ToLevel); pack->WriteUInt32(sps->MinAA); pack->WriteUInt32(sps->TimeZone); pack->WriteUInt32(sps->Classes); worldserver.SendPacket(pack); safe_delete(pack); break; } case 4: { VERIFY_PACKET_LENGTH(OP_LFGuild, app, LFGuild_SearchGuild_Struct); auto pack = new ServerPacket(ServerOP_QueryServGeneric, strlen(GetName()) + 33); pack->WriteUInt32(zone->GetZoneID()); pack->WriteUInt32(zone->GetInstanceID()); pack->WriteString(GetName()); pack->WriteUInt32(QSG_LFGuild); pack->WriteUInt32(QSG_LFGuild_GuildMatches); LFGuild_SearchGuild_Struct *sgs = (LFGuild_SearchGuild_Struct *)app->pBuffer; pack->WriteUInt32(sgs->Level); pack->WriteUInt32(sgs->AAPoints); pack->WriteUInt32(sgs->TimeZone); pack->WriteUInt32(sgs->Class); worldserver.SendPacket(pack); safe_delete(pack); break; } default: break; } } void Client::Handle_OP_LFPCommand(const EQApplicationPacket *app) { if (app->size != sizeof(LFP_Struct)) { LogError("Wrong size: OP_LFPCommand, size=[{}], expected [{}]", app->size, sizeof(LFP_Struct)); DumpPacket(app); return; } LFP_Struct *lfp = (LFP_Struct*)app->pBuffer; LFP = lfp->Action != LFPOff; database.SetLFP(CharacterID(), LFP); if (!LFP) { worldserver.StopLFP(CharacterID()); return; } GroupLFPMemberEntry LFPMembers[MAX_GROUP_MEMBERS]; for (unsigned int i = 0; i<MAX_GROUP_MEMBERS; i++) { LFPMembers[i].Name[0] = '\0'; LFPMembers[i].Class = 0; LFPMembers[i].Level = 0; LFPMembers[i].Zone = 0; LFPMembers[i].GuildID = 0xFFFF; } Group *g = GetGroup(); // Slot 0 is always for the group leader, or the player if not in a group strcpy(LFPMembers[0].Name, GetName()); LFPMembers[0].Class = GetClass(); LFPMembers[0].Level = GetLevel(); LFPMembers[0].Zone = zone->GetZoneID(); LFPMembers[0].GuildID = GuildID(); if (g) { // This should not happen. The client checks if you are in a group and will not let you put LFP on if // you are not the leader. if (!g->IsLeader(this)) { LogError("Client sent LFP on for character [{}] who is grouped but not leader", GetName()); return; } // Fill the LFPMembers array with the rest of the group members, excluding ourself // We don't fill in the class, level or zone, because we may not be able to determine // them if the other group members are not in this zone. World will fill in this information // for us, if it can. int NextFreeSlot = 1; for (unsigned int i = 0; i < MAX_GROUP_MEMBERS; i++) { if (strcasecmp(g->membername[i], LFPMembers[0].Name)) strcpy(LFPMembers[NextFreeSlot++].Name, g->membername[i]); } } worldserver.UpdateLFP(CharacterID(), lfp->Action, lfp->MatchFilter, lfp->FromLevel, lfp->ToLevel, lfp->Classes, lfp->Comments, LFPMembers); } void Client::Handle_OP_LFPGetMatchesRequest(const EQApplicationPacket *app) { if (app->size != sizeof(LFPGetMatchesRequest_Struct)) { LogError("Wrong size: OP_LFPGetMatchesRequest, size=[{}], expected [{}]", app->size, sizeof(LFPGetMatchesRequest_Struct)); DumpPacket(app); return; } LFPGetMatchesRequest_Struct* gmrs = (LFPGetMatchesRequest_Struct*)app->pBuffer; if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { auto pack = new ServerPacket(ServerOP_LFPMatches, sizeof(ServerLFPMatchesRequest_Struct)); ServerLFPMatchesRequest_Struct* smrs = (ServerLFPMatchesRequest_Struct*)pack->pBuffer; smrs->FromID = GetID(); smrs->FromLevel = gmrs->FromLevel; smrs->ToLevel = gmrs->ToLevel; smrs->QuerierLevel = GetLevel(); smrs->QuerierClass = GetClass(); strcpy(smrs->FromName, GetName()); worldserver.SendPacket(pack); safe_delete(pack); } return; } void Client::Handle_OP_LoadSpellSet(const EQApplicationPacket *app) { if (app->size != sizeof(LoadSpellSet_Struct)) { printf("Wrong size of LoadSpellSet_Struct! Expected: %zu, Got: %i\n", sizeof(LoadSpellSet_Struct), app->size); return; } int i; LoadSpellSet_Struct* ss = (LoadSpellSet_Struct*)app->pBuffer; for (i = 0; i < EQ::spells::SPELL_GEM_COUNT; i++) { if (ss->spell[i] != 0xFFFFFFFF) UnmemSpell(i, true); } } void Client::Handle_OP_Logout(const EQApplicationPacket *app) { LogDebug("[{}] sent a logout packet", GetName()); SendLogoutPackets(); auto outapp = new EQApplicationPacket(OP_LogoutReply); FastQueuePacket(&outapp); Disconnect(); return; } void Client::Handle_OP_LootItem(const EQApplicationPacket *app) { if (app->size != sizeof(LootingItem_Struct)) { LogError("Wrong size: OP_LootItem, size=[{}], expected [{}]", app->size, sizeof(LootingItem_Struct)); return; } EQApplicationPacket* outapp = nullptr; Entity* entity = entity_list.GetID(*((uint16*)app->pBuffer)); if (entity == 0) { Message(Chat::Red, "Error: OP_LootItem: Corpse not found (ent = 0)"); outapp = new EQApplicationPacket(OP_LootComplete, 0); QueuePacket(outapp); safe_delete(outapp); return; } if (entity->IsCorpse()) { entity->CastToCorpse()->LootItem(this, app); return; } else { Message(Chat::Red, "Error: Corpse not found! (!ent->IsCorpse())"); Corpse::SendEndLootErrorPacket(this); } return; } void Client::Handle_OP_LootRequest(const EQApplicationPacket *app) { if (app->size != sizeof(uint32)) { std::cout << "Wrong size: OP_LootRequest, size=" << app->size << ", expected " << sizeof(uint32) << std::endl; return; } Entity* ent = entity_list.GetID(*((uint32*)app->pBuffer)); if (ent == 0) { Message(Chat::Red, "Error: OP_LootRequest: Corpse not found (ent = 0)"); Corpse::SendLootReqErrorPacket(this); return; } if (ent->IsCorpse()) { SetLooting(ent->GetID()); //store the entity we are looting ent->CastToCorpse()->MakeLootRequestPackets(this, app); return; } else { std::cout << "npc == 0 LOOTING FOOKED3" << std::endl; Message(Chat::Red, "Error: OP_LootRequest: Corpse not a corpse?"); Corpse::SendLootReqErrorPacket(this); } return; } void Client::Handle_OP_ManaChange(const EQApplicationPacket *app) { if (app->size == 0) { // i think thats the sign to stop the songs if (IsBardSong(casting_spell_id) || bardsong != 0) InterruptSpell(SONG_ENDS, 0x121); else InterruptSpell(INTERRUPT_SPELL, 0x121); return; } else // I don't think the client sends proper manachanges { // with a length, just the 0 len ones for stopping songs //ManaChange_Struct* p = (ManaChange_Struct*)app->pBuffer; printf("OP_ManaChange from client:\n"); DumpPacket(app); } return; } /* #if 0 // I dont think there's an op for this now, and we check this // when the client is sitting void Client::Handle_OP_Medding(const EQApplicationPacket *app) { if (app->pBuffer[0]) medding = true; else medding = false; return; } #endif */ void Client::Handle_OP_MemorizeSpell(const EQApplicationPacket *app) { cheat_manager.CheckMemTimer(); OPMemorizeSpell(app); return; } void Client::Handle_OP_Mend(const EQApplicationPacket *app) { if (!HasSkill(EQ::skills::SkillMend)) return; if (!p_timers.Expired(&database, pTimerMend, false)) { Message(Chat::Red, "Ability recovery time not yet met."); return; } p_timers.Start(pTimerMend, MendReuseTime - 1); int mendhp = GetMaxHP() / 4; int currenthp = GetHP(); if (zone->random.Int(0, 199) < (int)GetSkill(EQ::skills::SkillMend)) { int criticalchance = spellbonuses.CriticalMend + itembonuses.CriticalMend + aabonuses.CriticalMend; if (zone->random.Int(0, 99) < criticalchance) { mendhp *= 2; MessageString(Chat::LightBlue, MEND_CRITICAL); } SetHP(GetHP() + mendhp); SendHPUpdate(); MessageString(Chat::LightBlue, MEND_SUCCESS); } else { /* the purpose of the following is to make the chance to worsen wounds much less common, which is more consistent with the way eq live works. according to my math, this should result in the following probability: 0 skill - 25% chance to worsen 20 skill - 23% chance to worsen 50 skill - 16% chance to worsen */ if ((GetSkill(EQ::skills::SkillMend) <= 75) && (zone->random.Int(GetSkill(EQ::skills::SkillMend), 100) < 75) && (zone->random.Int(1, 3) == 1)) { SetHP(currenthp > mendhp ? (GetHP() - mendhp) : 1); SendHPUpdate(); MessageString(Chat::LightBlue, MEND_WORSEN); } else MessageString(Chat::LightBlue, MEND_FAIL); } CheckIncreaseSkill(EQ::skills::SkillMend, nullptr, 10); return; } void Client::Handle_OP_MercenaryCommand(const EQApplicationPacket *app) { if (app->size != sizeof(MercenaryCommand_Struct)) { Message(Chat::Red, "Size mismatch in OP_MercenaryCommand expected %i got %i", sizeof(MercenaryCommand_Struct), app->size); LogDebug("Size mismatch in OP_MercenaryCommand expected [{}] got [{}]", sizeof(MercenaryCommand_Struct), app->size); DumpPacket(app); return; } MercenaryCommand_Struct* mc = (MercenaryCommand_Struct*)app->pBuffer; uint32 merc_command = mc->MercCommand; // Seen 0 (zone in with no merc or suspended), 1 (dismiss merc), 5 (normal state), 20 (unknown), 36 (zone in with merc) int32 option = mc->Option; // Seen -1 (zone in with no merc), 0 (setting to passive stance), 1 (normal or setting to balanced stance) Log(Logs::General, Logs::Mercenaries, "Command %i, Option %i received from %s.", merc_command, option, GetName()); if (!RuleB(Mercs, AllowMercs)) return; // Handle the Command here... // Will need a list of what every type of command is supposed to do // Unsure if there is a server response to this packet if (option >= 0) { Merc* merc = GetMerc(); GetMercInfo().State = option; if (merc) { uint8 numStances = 0; //get number of available stances for the current merc std::list<MercStanceInfo> mercStanceList = zone->merc_stance_list[merc->GetMercTemplateID()]; auto iter = mercStanceList.begin(); while (iter != mercStanceList.end()) { numStances++; ++iter; } MercTemplate* mercTemplate = zone->GetMercTemplate(GetMerc()->GetMercTemplateID()); if (mercTemplate) { //check to see if selected option is a valid stance slot (option is the slot the stance is in, not the actual stance) if (option >= 0 && option < numStances) { merc->SetStance((EQ::constants::StanceType)mercTemplate->Stances[option]); GetMercInfo().Stance = mercTemplate->Stances[option]; Log(Logs::General, Logs::Mercenaries, "Set Stance: %u for %s (%s)", merc->GetStance(), merc->GetName(), GetName()); } } } } } void Client::Handle_OP_MercenaryDataRequest(const EQApplicationPacket *app) { // The payload is 4 bytes. The EntityID of the Mercenary Liason which are of class 71. if (app->size != sizeof(MercenaryMerchantShopRequest_Struct)) { LogDebug("Size mismatch in OP_MercenaryDataRequest expected 4 got [{}]", app->size); DumpPacket(app); return; } MercenaryMerchantShopRequest_Struct* mmsr = (MercenaryMerchantShopRequest_Struct*)app->pBuffer; uint32 merchant_id = mmsr->MercMerchantID; uint32 altCurrentType = 19; Log(Logs::General, Logs::Mercenaries, "Data Request for Merchant ID (%i) for %s.", merchant_id, GetName()); //client is requesting data about currently owned mercenary if (merchant_id == 0) { //send info about your current merc(s) if (GetMercInfo().mercid) { Log(Logs::General, Logs::Mercenaries, "SendMercPersonalInfo Request for %s.", GetName()); SendMercPersonalInfo(); } else { Log(Logs::General, Logs::Mercenaries, "SendMercPersonalInfo Not Sent - MercID (%i) for %s.", GetMercInfo().mercid, GetName()); } } if (!RuleB(Mercs, AllowMercs)) { return; } NPC* tar = entity_list.GetNPCByID(merchant_id); if (tar) { int mercTypeCount = 0; int mercCount = 0; if (DistanceSquared(m_Position, tar->GetPosition()) > USE_NPC_RANGE2) return; if (tar->GetClass() != MERCERNARY_MASTER) { return; } mercTypeCount = tar->GetNumMercTypes(static_cast<unsigned int>(ClientVersion())); mercCount = tar->GetNumMercs(static_cast<unsigned int>(ClientVersion())); if (mercCount > MAX_MERC) return; std::list<MercType> mercTypeList = tar->GetMercTypesList(static_cast<unsigned int>(ClientVersion())); std::list<MercData> mercDataList = tar->GetMercsList(static_cast<unsigned int>(ClientVersion())); int i = 0; int StanceCount = 0; for (auto mercListItr = mercDataList.begin(); mercListItr != mercDataList.end(); ++mercListItr) { auto siter = zone->merc_stance_list[mercListItr->MercTemplateID].begin(); for (siter = zone->merc_stance_list[mercListItr->MercTemplateID].begin(); siter != zone->merc_stance_list[mercListItr->MercTemplateID].end(); ++siter) { StanceCount++; } } auto outapp = new EQApplicationPacket(OP_MercenaryDataResponse, sizeof(MercenaryMerchantList_Struct)); MercenaryMerchantList_Struct* mml = (MercenaryMerchantList_Struct*)outapp->pBuffer; mml->MercTypeCount = mercTypeCount; if (mercTypeCount > 0) { for (auto mercTypeListItr = mercTypeList.begin(); mercTypeListItr != mercTypeList.end(); ++mercTypeListItr) { mml->MercGrades[i] = mercTypeListItr->Type; // DBStringID for Type i++; } } mml->MercCount = mercCount; if (mercCount > 0) { i = 0; for (auto mercListIter = mercDataList.begin(); mercListIter != mercDataList.end(); ++mercListIter) { mml->Mercs[i].MercID = mercListIter->MercTemplateID; mml->Mercs[i].MercType = mercListIter->MercType; mml->Mercs[i].MercSubType = mercListIter->MercSubType; mml->Mercs[i].PurchaseCost = RuleB(Mercs, ChargeMercPurchaseCost) ? Merc::CalcPurchaseCost(mercListIter->MercTemplateID, GetLevel(), 0) : 0; mml->Mercs[i].UpkeepCost = RuleB(Mercs, ChargeMercUpkeepCost) ? Merc::CalcUpkeepCost(mercListIter->MercTemplateID, GetLevel(), 0) : 0; mml->Mercs[i].Status = 0; mml->Mercs[i].AltCurrencyCost = RuleB(Mercs, ChargeMercPurchaseCost) ? Merc::CalcPurchaseCost(mercListIter->MercTemplateID, GetLevel(), altCurrentType) : 0; mml->Mercs[i].AltCurrencyUpkeep = RuleB(Mercs, ChargeMercUpkeepCost) ? Merc::CalcUpkeepCost(mercListIter->MercTemplateID, GetLevel(), altCurrentType) : 0; mml->Mercs[i].AltCurrencyType = altCurrentType; mml->Mercs[i].MercUnk01 = 0; mml->Mercs[i].TimeLeft = -1; mml->Mercs[i].MerchantSlot = i + 1; mml->Mercs[i].MercUnk02 = 1; int mercStanceCount = 0; auto iter = zone->merc_stance_list[mercListIter->MercTemplateID].begin(); for (iter = zone->merc_stance_list[mercListIter->MercTemplateID].begin(); iter != zone->merc_stance_list[mercListIter->MercTemplateID].end(); ++iter) { mercStanceCount++; } mml->Mercs[i].StanceCount = mercStanceCount; mml->Mercs[i].MercUnk03 = 519044964; mml->Mercs[i].MercUnk04 = 1; //mml->Mercs[i].MercName; int stanceindex = 0; if (mercStanceCount > 0) { auto iter2 = zone->merc_stance_list[mercListIter->MercTemplateID].begin(); while (iter2 != zone->merc_stance_list[mercListIter->MercTemplateID].end()) { mml->Mercs[i].Stances[stanceindex].StanceIndex = stanceindex; mml->Mercs[i].Stances[stanceindex].Stance = (iter2->StanceID); stanceindex++; ++iter2; } } i++; } } FastQueuePacket(&outapp); } } void Client::Handle_OP_MercenaryDataUpdateRequest(const EQApplicationPacket *app) { // The payload is 0 bytes. if (app->size != 0) { Message(Chat::Red, "Size mismatch in OP_MercenaryDataUpdateRequest expected 0 got %i", app->size); LogDebug("Size mismatch in OP_MercenaryDataUpdateRequest expected 0 got [{}]", app->size); DumpPacket(app); return; } Log(Logs::General, Logs::Mercenaries, "Data Update Request Received for %s.", GetName()); if (GetMercID()) { SendMercPersonalInfo(); } } void Client::Handle_OP_MercenaryDismiss(const EQApplicationPacket *app) { // The payload is 0 or 1 bytes. if (app->size > 1) { Message(Chat::Red, "Size mismatch in OP_MercenaryDismiss expected 0 got %i", app->size); LogDebug("Size mismatch in OP_MercenaryDismiss expected 0 got [{}]", app->size); DumpPacket(app); return; } uint8 Command = 0; if (app->size > 0) { char *InBuffer = (char *)app->pBuffer; Command = VARSTRUCT_DECODE_TYPE(uint8, InBuffer); } Log(Logs::General, Logs::Mercenaries, "Dismiss Request ( %i ) Received for %s.", Command, GetName()); // Handle the dismiss here... DismissMerc(GetMercInfo().mercid); } void Client::Handle_OP_MercenaryHire(const EQApplicationPacket *app) { // The payload is 16 bytes. First four bytes are the Merc ID (Template ID) if (app->size != sizeof(MercenaryMerchantRequest_Struct)) { LogDebug("Size mismatch in OP_MercenaryHire expected [{}] got [{}]", sizeof(MercenaryMerchantRequest_Struct), app->size); DumpPacket(app); return; } MercenaryMerchantRequest_Struct* mmrq = (MercenaryMerchantRequest_Struct*)app->pBuffer; uint32 merc_template_id = mmrq->MercID; uint32 merchant_id = mmrq->MercMerchantID; uint32 merc_unk1 = mmrq->MercUnk01; uint32 merc_unk2 = mmrq->MercUnk02; Log(Logs::General, Logs::Mercenaries, "Template ID (%i), Merchant ID (%i), Unknown1 (%i), Unknown2 (%i), Client: %s", merc_template_id, merchant_id, merc_unk1, merc_unk2, GetName()); //HirePending = true; SetHoTT(0); SendTargetCommand(0); if (!RuleB(Mercs, AllowMercs)) return; MercTemplate* merc_template = zone->GetMercTemplate(merc_template_id); if (merc_template) { Mob* merchant = entity_list.GetNPCByID(merchant_id); if (!CheckCanHireMerc(merchant, merc_template_id)) { return; } // Set time remaining to max on Hire GetMercInfo().MercTimerRemaining = RuleI(Mercs, UpkeepIntervalMS); // Get merc, assign it to client & spawn Merc* merc = Merc::LoadMerc(this, merc_template, merchant_id, false); if (merc) { SpawnMerc(merc, true); merc->Save(); if (RuleB(Mercs, ChargeMercPurchaseCost)) { uint32 cost = Merc::CalcPurchaseCost(merc_template->MercTemplateID, GetLevel()) * 100; // Cost is in gold TakeMoneyFromPP(cost, true); } // approved hire request SendMercMerchantResponsePacket(0); } else { //merc failed to spawn SendMercMerchantResponsePacket(3); } } else { //merc doesn't exist in db SendMercMerchantResponsePacket(2); } } void Client::Handle_OP_MercenarySuspendRequest(const EQApplicationPacket *app) { if (app->size != sizeof(SuspendMercenary_Struct)) { Message(Chat::Red, "Size mismatch in OP_MercenarySuspendRequest expected %i got %i", sizeof(SuspendMercenary_Struct), app->size); LogDebug("Size mismatch in OP_MercenarySuspendRequest expected [{}] got [{}]", sizeof(SuspendMercenary_Struct), app->size); DumpPacket(app); return; } SuspendMercenary_Struct* sm = (SuspendMercenary_Struct*)app->pBuffer; uint32 merc_suspend = sm->SuspendMerc; // Seen 30 for suspending or unsuspending Log(Logs::General, Logs::Mercenaries, "Suspend ( %i ) received for %s.", merc_suspend, GetName()); if (!RuleB(Mercs, AllowMercs)) return; // Check if the merc is suspended and if so, unsuspend, otherwise suspend it SuspendMercCommand(); } void Client::Handle_OP_MercenaryTimerRequest(const EQApplicationPacket *app) { // The payload is 0 bytes. if (app->size > 1) { Message(Chat::Red, "Size mismatch in OP_MercenaryTimerRequest expected 0 got %i", app->size); LogDebug("Size mismatch in OP_MercenaryTimerRequest expected 0 got [{}]", app->size); DumpPacket(app); return; } Log(Logs::General, Logs::Mercenaries, "Timer Request received for %s.", GetName()); if (!RuleB(Mercs, AllowMercs)) { return; } // To Do: Load Mercenary Timer Data to properly populate this reply packet // All hard set values for now uint32 entityID = 0; uint32 mercState = 5; uint32 suspendedTime = 0; if (GetMercID()) { Merc* merc = GetMerc(); if (merc) { entityID = merc->GetID(); if (GetMercInfo().IsSuspended) { mercState = 1; suspendedTime = GetMercInfo().SuspendedTime; } } } if (entityID > 0) { SendMercTimerPacket(entityID, mercState, suspendedTime, GetMercInfo().MercTimerRemaining, RuleI(Mercs, SuspendIntervalMS)); } } void Client::Handle_OP_MoveCoin(const EQApplicationPacket *app) { if (app->size != sizeof(MoveCoin_Struct)) { LogError("Wrong size on OP_MoveCoin. Got: [{}], Expected: [{}]", app->size, sizeof(MoveCoin_Struct)); DumpPacket(app); return; } OPMoveCoin(app); return; } void Client::Handle_OP_MoveItem(const EQApplicationPacket *app) { if (!CharacterID()) { return; } if (app->size != sizeof(MoveItem_Struct)) { LogError("Wrong size: OP_MoveItem, size=[{}], expected [{}]", app->size, sizeof(MoveItem_Struct)); return; } MoveItem_Struct* mi = (MoveItem_Struct*)app->pBuffer; if (spellend_timer.Enabled() && casting_spell_id && !IsBardSong(casting_spell_id)) { if (mi->from_slot != mi->to_slot && (mi->from_slot <= EQ::invslot::GENERAL_END || mi->from_slot > 39) && IsValidSlot(mi->from_slot) && IsValidSlot(mi->to_slot)) { const EQ::ItemInstance *itm_from = GetInv().GetItem(mi->from_slot); const EQ::ItemInstance *itm_to = GetInv().GetItem(mi->to_slot); auto detect = fmt::format("Player issued a move item from {}(item id {}) to {}(item id {}) while casting {}.", mi->from_slot, itm_from ? itm_from->GetID() : 0, mi->to_slot, itm_to ? itm_to->GetID() : 0, casting_spell_id); database.SetMQDetectionFlag(AccountName(), GetName(), detect, zone->GetShortName()); Kick("Inventory desync"); // Kick client to prevent client and server from getting out-of-sync inventory slots return; } } // Illegal bagslot usage checks. Currently, user only receives a message if this check is triggered. bool mi_hack = false; if (mi->from_slot >= EQ::invbag::GENERAL_BAGS_BEGIN && mi->from_slot <= EQ::invbag::CURSOR_BAG_END) { if (mi->from_slot >= EQ::invbag::CURSOR_BAG_BEGIN) { mi_hack = true; } else { int16 from_parent = m_inv.CalcSlotId(mi->from_slot); if (!m_inv[from_parent]) { mi_hack = true; } else if (!m_inv[from_parent]->IsClassBag()) { mi_hack = true; } else if (m_inv.CalcBagIdx(mi->from_slot) >= m_inv[from_parent]->GetItem()->BagSlots) { mi_hack = true; } } } if (mi->to_slot >= EQ::invbag::GENERAL_BAGS_BEGIN && mi->to_slot <= EQ::invbag::CURSOR_BAG_END) { if (mi->to_slot >= EQ::invbag::CURSOR_BAG_BEGIN) { mi_hack = true; } else { int16 to_parent = m_inv.CalcSlotId(mi->to_slot); if (!m_inv[to_parent]) { mi_hack = true; } else if (!m_inv[to_parent]->IsClassBag()) { mi_hack = true; } else if (m_inv.CalcBagIdx(mi->to_slot) >= m_inv[to_parent]->GetItem()->BagSlots) { mi_hack = true; } } } if (mi_hack) { Message(Chat::Yellow, "Caution: Illegal use of inaccessible bag slots!"); } if (!SwapItem(mi) && IsValidSlot(mi->from_slot) && IsValidSlot(mi->to_slot)) { SwapItemResync(mi); bool error = false; InterrogateInventory(this, false, true, false, error, false); if (error) InterrogateInventory(this, true, false, true, error); } return; } void Client::Handle_OP_MoveMultipleItems(const EQApplicationPacket *app) { Kick("Unimplemented move multiple items"); // TODO: lets not desync though } void Client::Handle_OP_OpenContainer(const EQApplicationPacket *app) { // Does not exist in Ti client // SoF, SoD and UF clients send a 4-byte packet indicating the 'parent' slot // SoF, SoD and UF slots are defined by a uint32 value and currently untranslated // RoF client sends a 12-byte packet based on the RoF::Structs::ItemSlotStruct // RoF structure types are defined as signed uint16 and currently untranslated // RoF::struct.SlotType = {0 - Equipment, 1 - Bank, 2 - Shared Bank} // not tested beyond listed types // RoF::struct.Unknown2 = 0 // RoF::struct.MainSlot = { <parent slot range designated by slottype..zero-based> } // RoF::struct.SubSlot = -1 (non-child) // RoF::struct.AugSlot = -1 (non-child) // RoF::struct.Unknown1 = 141 (unsure why, but always appears to be this value..combine containers not tested) // SideNote: Watching the slot translations, Unknown1 is showing '141' as well on certain item swaps. // Manually looting a corpse results in a from '34' to '68' value for equipment items, '0' to '0' for inventory. } void Client::Handle_OP_OpenGuildTributeMaster(const EQApplicationPacket *app) { LogTribute("Received OP_OpenGuildTributeMaster of length [{}]", app->size); if (app->size != sizeof(StartTribute_Struct)) printf("Error in OP_OpenGuildTributeMaster. Expected size of: %zu, but got: %i\n", sizeof(StartTribute_Struct), app->size); else { //Opens the guild tribute master window StartTribute_Struct* st = (StartTribute_Struct*)app->pBuffer; Mob* tribmast = entity_list.GetMob(st->tribute_master_id); if (tribmast && tribmast->IsNPC() && tribmast->GetClass() == GUILD_TRIBUTE_MASTER && DistanceSquared(m_Position, tribmast->GetPosition()) <= USE_NPC_RANGE2) { st->response = 1; QueuePacket(app); tribute_master_id = st->tribute_master_id; DoTributeUpdate(); } else { st->response = 0; QueuePacket(app); } } return; } void Client::Handle_OP_OpenInventory(const EQApplicationPacket *app) { // Does not exist in Ti, UF or RoF clients // SoF and SoD both send a 4-byte packet with a uint32 value of '8' } void Client::Handle_OP_OpenTributeMaster(const EQApplicationPacket *app) { LogTribute("Received OP_OpenTributeMaster of length [{}]", app->size); if (app->size != sizeof(StartTribute_Struct)) printf("Error in OP_OpenTributeMaster. Expected size of: %zu, but got: %i\n", sizeof(StartTribute_Struct), app->size); else { //Opens the tribute master window StartTribute_Struct* st = (StartTribute_Struct*)app->pBuffer; Mob* tribmast = entity_list.GetMob(st->tribute_master_id); if (tribmast && tribmast->IsNPC() && tribmast->GetClass() == TRIBUTE_MASTER && DistanceSquared(m_Position, tribmast->GetPosition()) <= USE_NPC_RANGE2) { st->response = 1; QueuePacket(app); tribute_master_id = st->tribute_master_id; DoTributeUpdate(); } else { st->response = 0; QueuePacket(app); } } return; } void Client::Handle_OP_PDeletePetition(const EQApplicationPacket *app) { if (app->size < 2) { LogError("Wrong size: OP_PDeletePetition, size=[{}], expected [{}]", app->size, 2); return; } if (petition_list.DeletePetitionByCharName((char*)app->pBuffer)) MessageString(Chat::White, PETITION_DELETED); else MessageString(Chat::White, PETITION_NO_DELETE); return; } void Client::Handle_OP_PetCommands(const EQApplicationPacket *app) { if (app->size != sizeof(PetCommand_Struct)) { LogError("Wrong size: OP_PetCommands, size=[{}], expected [{}]", app->size, sizeof(PetCommand_Struct)); return; } char val1[20] = { 0 }; PetCommand_Struct* pet = (PetCommand_Struct*)app->pBuffer; Mob* mypet = this->GetPet(); Mob *target = entity_list.GetMob(pet->target); if (!mypet || pet->command == PET_LEADER) { if (pet->command == PET_LEADER) { // we either send the ID of an NPC we're interested in or no ID for our own pet if (target) { auto owner = target->GetOwner(); if (owner) target->SayString(PET_LEADERIS, owner->GetCleanName()); else target->SayString(I_FOLLOW_NOONE); } else if (mypet) { mypet->SayString(PET_LEADERIS, GetName()); } } return; } if (mypet->GetPetType() == petTargetLock && (pet->command != PET_HEALTHREPORT && pet->command != PET_GETLOST)) return; // just let the command "/pet get lost" work for familiars if (mypet->GetPetType() == petFamiliar && pet->command != PET_GETLOST) return; uint32 PetCommand = pet->command; // Handle Sit/Stand toggle in UF and later. /* if (GetClientVersion() >= EQClientUnderfoot) { if (PetCommand == PET_SITDOWN) if (mypet->GetPetOrder() == SPO_Sit) PetCommand = PET_STANDUP; } */ switch (PetCommand) { case PET_ATTACK: { if (!target) break; if (target->IsMezzed()) { MessageString(Chat::NPCQuestSay, CANNOT_WAKE, mypet->GetCleanName(), target->GetCleanName()); break; } if (mypet->IsFeared()) break; //prevent pet from attacking stuff while feared if (!mypet->IsAttackAllowed(target)) { mypet->SayString(this, NOT_LEGAL_TARGET); break; } // default range is 200, takes Z into account // really they do something weird where they're added to the aggro list then remove them // and will attack if they come in range -- too lazy, lets remove exploits for now if (DistanceSquared(mypet->GetPosition(), target->GetPosition()) >= RuleR(Aggro, PetAttackRange)) { // they say they're attacking then remove on live ... so they don't really say anything in this case ... break; } if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { if (target != this && DistanceSquaredNoZ(mypet->GetPosition(), target->GetPosition()) <= (RuleR(Pets, AttackCommandRange)*RuleR(Pets, AttackCommandRange))) { if (mypet->IsPetStop()) { mypet->SetPetStop(false); SetPetCommandState(PET_BUTTON_STOP, 0); } if (mypet->IsPetRegroup()) { mypet->SetPetRegroup(false); SetPetCommandState(PET_BUTTON_REGROUP, 0); } // fix GUI sit button to be unpressed and stop sitting regen SetPetCommandState(PET_BUTTON_SIT, 0); mypet->SetAppearance(eaStanding); zone->AddAggroMob(); // classic acts like qattack int hate = 1; if (mypet->IsEngaged()) { auto top = mypet->GetHateMost(); if (top && top != target) hate += mypet->GetHateAmount(top) - mypet->GetHateAmount(target) + 100; // should be enough to cause target change } mypet->AddToHateList(target, hate, 0, true, false, false, SPELL_UNKNOWN, true); MessageString(Chat::PetResponse, PET_ATTACKING, mypet->GetCleanName(), target->GetCleanName()); SetTarget(target); } } break; } case PET_QATTACK: { if (mypet->IsFeared()) break; //prevent pet from attacking stuff while feared if (!GetTarget()) break; if (GetTarget()->IsMezzed()) { MessageString(Chat::NPCQuestSay, CANNOT_WAKE, mypet->GetCleanName(), GetTarget()->GetCleanName()); break; } if (!mypet->IsAttackAllowed(GetTarget())) { mypet->SayString(this, NOT_LEGAL_TARGET); break; } if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { if (GetTarget() != this && DistanceSquaredNoZ(mypet->GetPosition(), GetTarget()->GetPosition()) <= (RuleR(Pets, AttackCommandRange)*RuleR(Pets, AttackCommandRange))) { if (mypet->IsPetStop()) { mypet->SetPetStop(false); SetPetCommandState(PET_BUTTON_STOP, 0); } if (mypet->IsPetRegroup()) { mypet->SetPetRegroup(false); SetPetCommandState(PET_BUTTON_REGROUP, 0); } // fix GUI sit button to be unpressed and stop sitting regen SetPetCommandState(PET_BUTTON_SIT, 0); mypet->SetAppearance(eaStanding); zone->AddAggroMob(); mypet->AddToHateList(GetTarget(), 1, 0, true, false, false, SPELL_UNKNOWN, true); MessageString(Chat::PetResponse, PET_ATTACKING, mypet->GetCleanName(), GetTarget()->GetCleanName()); } } break; } case PET_BACKOFF: { if (mypet->IsFeared()) break; //keeps pet running while feared if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { mypet->SayString(this, Chat::PetResponse, PET_CALMING); mypet->WipeHateList(); mypet->SetTarget(nullptr); if (mypet->IsPetStop()) { mypet->SetPetStop(false); SetPetCommandState(PET_BUTTON_STOP, 0); } } break; } case PET_HEALTHREPORT: { if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { MessageString(Chat::PetResponse, PET_REPORT_HP, ConvertArrayF(mypet->GetHPRatio(), val1)); mypet->ShowBuffList(this); } break; } case PET_GETLOST: { if (mypet->Charmed()) break; if (mypet->GetPetType() == petCharmed || !mypet->IsNPC()) { // eqlive ignores this command // we could just remove the charm // and continue mypet->BuffFadeByEffect(SE_Charm); break; } else { SetPet(nullptr); } mypet->SayString(this, Chat::PetResponse, PET_GETLOST_STRING); mypet->CastToNPC()->Depop(); //Oddly, the client (Titanium) will still allow "/pet get lost" command despite me adding the code below. If someone can figure that out, you can uncomment this code and use it. /* if((mypet->GetPetType() == petAnimation && GetAA(aaAnimationEmpathy) >= 2) || mypet->GetPetType() != petAnimation) { mypet->SayString(PET_GETLOST_STRING); mypet->CastToNPC()->Depop(); } */ break; } case PET_GUARDHERE: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { if (mypet->IsNPC()) { // Set Sit button to unpressed - send stand anim/end hpregen SetPetCommandState(PET_BUTTON_SIT, 0); mypet->SendAppearancePacket(AT_Anim, ANIM_STAND); mypet->SayString(this, Chat::PetResponse, PET_GUARDINGLIFE); mypet->SetPetOrder(SPO_Guard); mypet->CastToNPC()->SaveGuardSpot(mypet->GetPosition()); if (!mypet->GetTarget()) // want them to not twitch if they're chasing something down mypet->StopNavigation(); if (mypet->IsPetStop()) { mypet->SetPetStop(false); SetPetCommandState(PET_BUTTON_STOP, 0); } } } break; } case PET_FOLLOWME: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { mypet->SayString(this, Chat::PetResponse, PET_FOLLOWING); mypet->SetPetOrder(SPO_Follow); // fix GUI sit button to be unpressed - send stand anim/end hpregen SetPetCommandState(PET_BUTTON_SIT, 0); mypet->SendAppearancePacket(AT_Anim, ANIM_STAND); if (mypet->IsPetStop()) { mypet->SetPetStop(false); SetPetCommandState(PET_BUTTON_STOP, 0); } } break; } case PET_TAUNT: { if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { if (mypet->CastToNPC()->IsTaunting()) { MessageString(Chat::PetResponse, PET_NO_TAUNT); mypet->CastToNPC()->SetTaunting(false); } else { MessageString(Chat::PetResponse, PET_DO_TAUNT); mypet->CastToNPC()->SetTaunting(true); } } break; } case PET_TAUNT_ON: { if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { MessageString(Chat::PetResponse, PET_DO_TAUNT); mypet->CastToNPC()->SetTaunting(true); } break; } case PET_TAUNT_OFF: { if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { MessageString(Chat::PetResponse, PET_NO_TAUNT); mypet->CastToNPC()->SetTaunting(false); } break; } case PET_GUARDME: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { mypet->SayString(this, Chat::PetResponse, PET_GUARDME_STRING); mypet->SetPetOrder(SPO_Follow); // Set Sit button to unpressed - send stand anim/end hpregen SetPetCommandState(PET_BUTTON_SIT, 0); mypet->SendAppearancePacket(AT_Anim, ANIM_STAND); if (mypet->IsPetStop()) { mypet->SetPetStop(false); SetPetCommandState(PET_BUTTON_STOP, 0); } } break; } case PET_SIT: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { if (mypet->GetPetOrder() == SPO_Sit) { mypet->SayString(this, Chat::PetResponse, PET_SIT_STRING); mypet->SetPetOrder(SPO_Follow); mypet->SendAppearancePacket(AT_Anim, ANIM_STAND); } else { mypet->SayString(this, Chat::PetResponse, PET_SIT_STRING); mypet->SetPetOrder(SPO_Sit); mypet->SetRunAnimSpeed(0); if (!mypet->UseBardSpellLogic()) //maybe we can have a bard pet mypet->InterruptSpell(); //No cast 4 u. //i guess the pet should start casting mypet->SendAppearancePacket(AT_Anim, ANIM_SIT); } } break; } case PET_STANDUP: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { mypet->SayString(this, Chat::PetResponse, PET_SIT_STRING); SetPetCommandState(PET_BUTTON_SIT, 0); mypet->SetPetOrder(SPO_Follow); mypet->SendAppearancePacket(AT_Anim, ANIM_STAND); } break; } case PET_SITDOWN: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { mypet->SayString(this, Chat::PetResponse, PET_SIT_STRING); SetPetCommandState(PET_BUTTON_SIT, 1); mypet->SetPetOrder(SPO_Sit); mypet->SetRunAnimSpeed(0); if (!mypet->UseBardSpellLogic()) //maybe we can have a bard pet mypet->InterruptSpell(); //No cast 4 u. //i guess the pet should start casting mypet->SendAppearancePacket(AT_Anim, ANIM_SIT); } break; } case PET_HOLD: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC()) { if (mypet->IsHeld()) { if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_HOLD_SET_OFF); mypet->SetHeld(false); } else { if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_HOLD_SET_ON); if (m_ClientVersionBit & EQ::versions::maskUFAndLater) mypet->SayString(this, Chat::PetResponse, PET_NOW_HOLDING); else mypet->SayString(this, Chat::PetResponse, PET_ON_HOLD); mypet->SetHeld(true); } mypet->SetGHeld(false); SetPetCommandState(PET_BUTTON_GHOLD, 0); } break; } case PET_HOLD_ON: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC() && !mypet->IsHeld()) { if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_HOLD_SET_ON); if (m_ClientVersionBit & EQ::versions::maskUFAndLater) mypet->SayString(this, Chat::PetResponse, PET_NOW_HOLDING); else mypet->SayString(this, Chat::PetResponse, PET_ON_HOLD); mypet->SetHeld(true); mypet->SetGHeld(false); SetPetCommandState(PET_BUTTON_GHOLD, 0); } break; } case PET_HOLD_OFF: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC() && mypet->IsHeld()) { if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_HOLD_SET_OFF); mypet->SetHeld(false); } break; } case PET_GHOLD: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC()) { if (mypet->IsGHeld()) { if (m_ClientVersionBit & EQ::versions::maskUFAndLater) MessageString(Chat::PetResponse, PET_OFF_GHOLD); mypet->SetGHeld(false); } else { if (m_ClientVersionBit & EQ::versions::maskUFAndLater) { MessageString(Chat::PetResponse, PET_ON_GHOLD); mypet->SayString(this, Chat::PetResponse, PET_GHOLD_ON_MSG); } else { mypet->SayString(this, Chat::PetResponse, PET_ON_HOLD); } mypet->SetGHeld(true); } mypet->SetHeld(false); SetPetCommandState(PET_BUTTON_HOLD, 0); } break; } case PET_GHOLD_ON: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC()) { if (m_ClientVersionBit & EQ::versions::maskUFAndLater) { MessageString(Chat::PetResponse, PET_ON_GHOLD); mypet->SayString(this, Chat::PetResponse, PET_GHOLD_ON_MSG); } else { mypet->SayString(this, Chat::PetResponse, PET_ON_HOLD); } mypet->SetGHeld(true); mypet->SetHeld(false); SetPetCommandState(PET_BUTTON_HOLD, 0); } break; } case PET_GHOLD_OFF: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC() && mypet->IsGHeld()) { if (m_ClientVersionBit & EQ::versions::maskUFAndLater) MessageString(Chat::PetResponse, PET_OFF_GHOLD); mypet->SetGHeld(false); } break; } case PET_SPELLHOLD: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC()) { if (mypet->IsFeared()) break; if (mypet->IsNoCast()) { MessageString(Chat::PetResponse, PET_CASTING); if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_SPELLHOLD_SET_OFF); mypet->SetNoCast(false); } else { MessageString(Chat::PetResponse, PET_NOT_CASTING); if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_SPELLHOLD_SET_ON); mypet->SetNoCast(true); } } break; } case PET_SPELLHOLD_ON: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC()) { if (mypet->IsFeared()) break; if (!mypet->IsNoCast()) { MessageString(Chat::PetResponse, PET_NOT_CASTING); if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_SPELLHOLD_SET_ON); mypet->SetNoCast(true); } } break; } case PET_SPELLHOLD_OFF: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC()) { if (mypet->IsFeared()) break; if (mypet->IsNoCast()) { MessageString(Chat::PetResponse, PET_CASTING); if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_SPELLHOLD_SET_OFF); mypet->SetNoCast(false); } } break; } case PET_FOCUS: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC()) { if (mypet->IsFeared()) break; if (mypet->IsFocused()) { MessageString(Chat::PetResponse, PET_NOT_FOCUSING); if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_FOCUS_SET_OFF); mypet->SetFocused(false); } else { MessageString(Chat::PetResponse, PET_NOW_FOCUSING); if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_FOCUS_SET_ON); mypet->SetFocused(true); } } break; } case PET_FOCUS_ON: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC()) { if (mypet->IsFeared()) break; if (!mypet->IsFocused()) { MessageString(Chat::PetResponse, PET_NOW_FOCUSING); if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_FOCUS_SET_ON); mypet->SetFocused(true); } } break; } case PET_FOCUS_OFF: { if (aabonuses.PetCommands[PetCommand] && mypet->IsNPC()) { if (mypet->IsFeared()) break; if (mypet->IsFocused()) { MessageString(Chat::PetResponse, PET_NOT_FOCUSING); if (m_ClientVersionBit & EQ::versions::maskSoDAndLater) MessageString(Chat::PetResponse, PET_FOCUS_SET_OFF); mypet->SetFocused(false); } } break; } case PET_STOP: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { if (mypet->IsPetStop()) { mypet->SetPetStop(false); } else { mypet->SetPetStop(true); mypet->StopNavigation(); mypet->SetTarget(nullptr); if (mypet->IsPetRegroup()) { mypet->SetPetRegroup(false); SetPetCommandState(PET_BUTTON_REGROUP, 0); } } mypet->SayString(this, Chat::PetResponse, PET_GETLOST_STRING); } break; } case PET_STOP_ON: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { mypet->SetPetStop(true); mypet->StopNavigation(); mypet->SetTarget(nullptr); mypet->SayString(this, Chat::PetResponse, PET_GETLOST_STRING); if (mypet->IsPetRegroup()) { mypet->SetPetRegroup(false); SetPetCommandState(PET_BUTTON_REGROUP, 0); } } break; } case PET_STOP_OFF: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if ((mypet->GetPetType() == petAnimation && aabonuses.PetCommands[PetCommand]) || mypet->GetPetType() != petAnimation) { mypet->SetPetStop(false); mypet->SayString(this, Chat::PetResponse, PET_GETLOST_STRING); } break; } case PET_REGROUP: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if (aabonuses.PetCommands[PetCommand]) { if (mypet->IsPetRegroup()) { mypet->SetPetRegroup(false); mypet->SayString(this, Chat::PetResponse, PET_OFF_REGROUPING); } else { mypet->SetPetRegroup(true); mypet->SetTarget(nullptr); mypet->SayString(this, Chat::PetResponse, PET_ON_REGROUPING); if (mypet->IsPetStop()) { mypet->SetPetStop(false); SetPetCommandState(PET_BUTTON_STOP, 0); } } } break; } case PET_REGROUP_ON: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if (aabonuses.PetCommands[PetCommand]) { mypet->SetPetRegroup(true); mypet->SetTarget(nullptr); mypet->SayString(this, Chat::PetResponse, PET_ON_REGROUPING); if (mypet->IsPetStop()) { mypet->SetPetStop(false); SetPetCommandState(PET_BUTTON_STOP, 0); } } break; } case PET_REGROUP_OFF: { if (mypet->IsFeared()) break; //could be exploited like PET_BACKOFF if (aabonuses.PetCommands[PetCommand]) { mypet->SetPetRegroup(false); mypet->SayString(this, Chat::PetResponse, PET_OFF_REGROUPING); } break; } default: printf("Client attempted to use a unknown pet command:\n"); break; } } void Client::Handle_OP_Petition(const EQApplicationPacket *app) { if (app->size <= 1) return; if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); /*else if(petition_list.FindPetitionByAccountName(this->AccountName())) { Message(0,"You already have a petition in queue, you cannot petition again until this one has been responded to or you have deleted the petition."); return; }*/ else { if (petition_list.FindPetitionByAccountName(AccountName())) { Message(0, "You already have a petition in the queue, you must wait for it to be answered or use /deletepetition to delete it."); return; } auto pet = new Petition(CharacterID()); pet->SetAName(this->AccountName()); pet->SetClass(this->GetClass()); pet->SetLevel(this->GetLevel()); pet->SetCName(this->GetName()); pet->SetRace(this->GetRace()); pet->SetLastGM(""); pet->SetCName(this->GetName()); pet->SetPetitionText((char*)app->pBuffer); pet->SetZone(zone->GetZoneID()); pet->SetUrgency(0); petition_list.AddPetition(pet); database.InsertPetitionToDB(pet); petition_list.UpdateGMQueue(); petition_list.UpdateZoneListQueue(); worldserver.SendEmoteMessage(0, 0, 80, 15, "%s has made a petition. #%i", GetName(), pet->GetID()); } return; } void Client::Handle_OP_PetitionBug(const EQApplicationPacket *app) { Message(0, "Petition Bugs are not supported, please use /bug."); return; } void Client::Handle_OP_PetitionCheckIn(const EQApplicationPacket *app) { if (app->size != sizeof(Petition_Struct)) { LogError("Wrong size: OP_PetitionCheckIn, size=[{}], expected [{}]", app->size, sizeof(Petition_Struct)); return; } Petition_Struct* inpet = (Petition_Struct*)app->pBuffer; Petition* pet = petition_list.GetPetitionByID(inpet->petnumber); //if (inpet->urgency != pet->GetUrgency()) pet->SetUrgency(inpet->urgency); pet->SetLastGM(this->GetName()); pet->SetGMText(inpet->gmtext); pet->SetCheckedOut(false); petition_list.UpdatePetition(pet); petition_list.UpdateGMQueue(); petition_list.UpdateZoneListQueue(); return; } void Client::Handle_OP_PetitionCheckout(const EQApplicationPacket *app) { if (app->size != sizeof(uint32)) { std::cout << "Wrong size: OP_PetitionCheckout, size=" << app->size << ", expected " << sizeof(uint32) << std::endl; return; } if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { uint32 getpetnum = *((uint32*)app->pBuffer); Petition* getpet = petition_list.GetPetitionByID(getpetnum); if (getpet != 0) { getpet->AddCheckout(); getpet->SetCheckedOut(true); getpet->SendPetitionToPlayer(this->CastToClient()); petition_list.UpdatePetition(getpet); petition_list.UpdateGMQueue(); petition_list.UpdateZoneListQueue(); } } return; } void Client::Handle_OP_PetitionDelete(const EQApplicationPacket *app) { if (app->size != sizeof(PetitionUpdate_Struct)) { LogError("Wrong size: OP_PetitionDelete, size=[{}], expected [{}]", app->size, sizeof(PetitionUpdate_Struct)); return; } auto outapp = new EQApplicationPacket(OP_PetitionUpdate, sizeof(PetitionUpdate_Struct)); PetitionUpdate_Struct* pet = (PetitionUpdate_Struct*)outapp->pBuffer; pet->petnumber = *((int*)app->pBuffer); pet->color = 0x00; pet->status = 0xFFFFFFFF; pet->senttime = 0; strcpy(pet->accountid, ""); strcpy(pet->gmsenttoo, ""); pet->quetotal = petition_list.GetTotalPetitions(); strcpy(pet->charname, ""); FastQueuePacket(&outapp); if (petition_list.DeletePetition(pet->petnumber) == -1) std::cout << "Something is borked with: " << pet->petnumber << std::endl; petition_list.ClearPetitions(); petition_list.UpdateGMQueue(); petition_list.ReadDatabase(); petition_list.UpdateZoneListQueue(); return; } void Client::Handle_OP_PetitionQue(const EQApplicationPacket *app) { #ifdef _EQDEBUG printf("%s looking at petitions..\n", this->GetName()); #endif return; } void Client::Handle_OP_PetitionRefresh(const EQApplicationPacket *app) { // This is When Client Asks for Petition Again and Again... // break is here because it floods the zones and causes lag if it // Were to actually do something:P We update on our own schedule now. return; } void Client::Handle_OP_PetitionResolve(const EQApplicationPacket *app) { Handle_OP_PetitionDelete(app); } void Client::Handle_OP_PetitionUnCheckout(const EQApplicationPacket *app) { if (app->size != sizeof(uint32)) { std::cout << "Wrong size: OP_PetitionUnCheckout, size=" << app->size << ", expected " << sizeof(uint32) << std::endl; return; } if (!worldserver.Connected()) Message(0, "Error: World server disconnected"); else { uint32 getpetnum = *((uint32*)app->pBuffer); Petition* getpet = petition_list.GetPetitionByID(getpetnum); if (getpet != 0) { getpet->SetCheckedOut(false); petition_list.UpdatePetition(getpet); petition_list.UpdateGMQueue(); petition_list.UpdateZoneListQueue(); } } return; } void Client::Handle_OP_PlayerStateAdd(const EQApplicationPacket *app) { if (app->size != sizeof(PlayerState_Struct)) { std::cout << "Wrong size: OP_PlayerStateAdd, size=" << app->size << ", expected " << sizeof(PlayerState_Struct) << std::endl; return; } PlayerState_Struct *ps = (PlayerState_Struct *)app->pBuffer; AddPlayerState(ps->state); entity_list.QueueClients(this, app, true); } void Client::Handle_OP_PlayerStateRemove(const EQApplicationPacket *app) { if (app->size != sizeof(PlayerState_Struct)) { std::cout << "Wrong size: OP_PlayerStateRemove, size=" << app->size << ", expected " << sizeof(PlayerState_Struct) << std::endl; return; } PlayerState_Struct *ps = (PlayerState_Struct *)app->pBuffer; RemovePlayerState(ps->state); entity_list.QueueClients(this, app, true); } void Client::Handle_OP_PickPocket(const EQApplicationPacket *app) { if (app->size != sizeof(PickPocket_Struct)) { LogError("Size mismatch for Pick Pocket packet"); DumpPacket(app); } if (!HasSkill(EQ::skills::SkillPickPockets)) { return; } if (!p_timers.Expired(&database, pTimerBeggingPickPocket, false)) { Message(Chat::Red, "Ability recovery time not yet met."); database.SetMQDetectionFlag(this->AccountName(), this->GetName(), "OP_PickPocket was sent again too quickly.", zone->GetShortName()); return; } PickPocket_Struct* pick_in = (PickPocket_Struct*)app->pBuffer; Mob* victim = entity_list.GetMob(pick_in->to); if (!victim) return; p_timers.Start(pTimerBeggingPickPocket, 8); if (victim == this) { Message(0, "You catch yourself red-handed."); auto outapp = new EQApplicationPacket(OP_PickPocket, sizeof(sPickPocket_Struct)); sPickPocket_Struct* pick_out = (sPickPocket_Struct*)outapp->pBuffer; pick_out->coin = 0; pick_out->from = victim->GetID(); pick_out->to = GetID(); pick_out->myskill = GetSkill(EQ::skills::SkillPickPockets); pick_out->type = 0; //if we do not send this packet the client will lock up and require the player to relog. QueuePacket(outapp); safe_delete(outapp); } else if (victim->GetOwnerID()) { Message(0, "You cannot steal from pets!"); auto outapp = new EQApplicationPacket(OP_PickPocket, sizeof(sPickPocket_Struct)); sPickPocket_Struct* pick_out = (sPickPocket_Struct*)outapp->pBuffer; pick_out->coin = 0; pick_out->from = victim->GetID(); pick_out->to = GetID(); pick_out->myskill = GetSkill(EQ::skills::SkillPickPockets); pick_out->type = 0; //if we do not send this packet the client will lock up and require the player to relog. QueuePacket(outapp); safe_delete(outapp); } else if (victim->IsNPC()) { victim->CastToNPC()->PickPocket(this); } else { Message(0, "Stealing from clients not yet supported."); auto outapp = new EQApplicationPacket(OP_PickPocket, sizeof(sPickPocket_Struct)); sPickPocket_Struct* pick_out = (sPickPocket_Struct*)outapp->pBuffer; pick_out->coin = 0; pick_out->from = victim->GetID(); pick_out->to = GetID(); pick_out->myskill = GetSkill(EQ::skills::SkillPickPockets); pick_out->type = 0; //if we do not send this packet the client will lock up and require the player to relog. QueuePacket(outapp); safe_delete(outapp); } } void Client::Handle_OP_PopupResponse(const EQApplicationPacket *app) { if (app->size != sizeof(PopupResponse_Struct)) { LogDebug("Size mismatch in OP_PopupResponse expected [{}] got [{}]", sizeof(PopupResponse_Struct), app->size); DumpPacket(app); return; } PopupResponse_Struct *popup_response = (PopupResponse_Struct *) app->pBuffer; /** * Handle any EQEmu defined popup Ids first */ std::string response; switch (popup_response->popupid) { case POPUPID_UPDATE_SHOWSTATSWINDOW: if (GetTarget() && GetTarget()->IsClient()) { GetTarget()->CastToClient()->SendStatsWindow(this, true); } else { SendStatsWindow(this, true); } return; break; case POPUPID_DIAWIND_ONE: if (EntityVariableExists(DIAWIND_RESPONSE_ONE_KEY.c_str())) { response = GetEntityVariable(DIAWIND_RESPONSE_ONE_KEY.c_str()); if (!response.empty()) { ChannelMessageReceived(8, 0, 100, response.c_str()); } } break; case POPUPID_DIAWIND_TWO: if (EntityVariableExists(DIAWIND_RESPONSE_TWO_KEY.c_str())) { response = GetEntityVariable(DIAWIND_RESPONSE_TWO_KEY.c_str()); if (!response.empty()) { ChannelMessageReceived(8, 0, 100, response.c_str()); } } break; case EQ::popupresponse::MOB_INFO_DISMISS: SetDisplayMobInfoWindow(false); Message(Chat::Yellow, "[DevTools] Window snoozed in this zone..."); break; default: break; } char buf[16]; sprintf(buf, "%d", popup_response->popupid); parse->EventPlayer(EVENT_POPUP_RESPONSE, this, buf, 0); Mob *Target = GetTarget(); if (Target && Target->IsNPC()) { parse->EventNPC(EVENT_POPUP_RESPONSE, Target->CastToNPC(), this, buf, 0); } } void Client::Handle_OP_PotionBelt(const EQApplicationPacket *app) { if (app->size != sizeof(MovePotionToBelt_Struct)) { LogDebug("Size mismatch in OP_PotionBelt expected [{}] got [{}]", sizeof(MovePotionToBelt_Struct), app->size); DumpPacket(app); return; } MovePotionToBelt_Struct *mptbs = (MovePotionToBelt_Struct*)app->pBuffer; if (!EQ::ValueWithin(mptbs->SlotNumber, 0U, 3U)) { LogDebug("Client::Handle_OP_PotionBelt mptbs->SlotNumber out of range"); return; } if (mptbs->Action == 0) { const EQ::ItemData *BaseItem = database.GetItem(mptbs->ItemID); if (BaseItem) { m_pp.potionbelt.Items[mptbs->SlotNumber].ID = BaseItem->ID; m_pp.potionbelt.Items[mptbs->SlotNumber].Icon = BaseItem->Icon; strn0cpy(m_pp.potionbelt.Items[mptbs->SlotNumber].Name, BaseItem->Name, sizeof(BaseItem->Name)); database.SaveCharacterPotionBelt(this->CharacterID(), mptbs->SlotNumber, m_pp.potionbelt.Items[mptbs->SlotNumber].ID, m_pp.potionbelt.Items[mptbs->SlotNumber].Icon); } } else { m_pp.potionbelt.Items[mptbs->SlotNumber].ID = 0; m_pp.potionbelt.Items[mptbs->SlotNumber].Icon = 0; m_pp.potionbelt.Items[mptbs->SlotNumber].Name[0] = '\0'; } } void Client::Handle_OP_PurchaseLeadershipAA(const EQApplicationPacket *app) { if (app->size != sizeof(uint32)) { LogDebug("Size mismatch in OP_LeadershipExpToggle expected [{}] got [{}]", 1, app->size); DumpPacket(app); return; } uint32 aaid = *((uint32 *)app->pBuffer); if (aaid >= _maxLeaderAA) return; uint32 current_rank = m_pp.leader_abilities.ranks[aaid]; if (current_rank >= MAX_LEADERSHIP_TIERS) { Message(Chat::Red, "This ability can be trained no further."); return; } uint8 cost = LeadershipAACosts[aaid][current_rank]; if (cost == 0) { Message(Chat::Red, "This ability can be trained no further."); return; } //TODO: we need to enforce prerequisits if (aaid >= raidAAMarkNPC) { //it is a raid ability. if (cost > m_pp.raid_leadership_points) { Message(Chat::Red, "You do not have enough points to purchase this ability."); return; } //sell them the ability. m_pp.raid_leadership_points -= cost; m_pp.leader_abilities.ranks[aaid]++; database.SaveCharacterLeadershipAA(this->CharacterID(), &m_pp); } else { //it is a group ability. if (cost > m_pp.group_leadership_points) { Message(Chat::Red, "You do not have enough points to purchase this ability."); return; } //sell them the ability. m_pp.group_leadership_points -= cost; m_pp.leader_abilities.ranks[aaid]++; database.SaveCharacterLeadershipAA(this->CharacterID(), &m_pp); } //success, send them an update auto outapp = new EQApplicationPacket(OP_UpdateLeadershipAA, sizeof(UpdateLeadershipAA_Struct)); UpdateLeadershipAA_Struct *u = (UpdateLeadershipAA_Struct *)outapp->pBuffer; u->ability_id = aaid; u->new_rank = m_pp.leader_abilities.ranks[aaid]; if (aaid >= raidAAMarkNPC) // raid AA u->pointsleft = m_pp.raid_leadership_points; else // group AA u->pointsleft = m_pp.group_leadership_points; FastQueuePacket(&outapp); // Update all group members with the new AA the leader has purchased. if (IsRaidGrouped()) { Raid *r = GetRaid(); if (!r) return; if (aaid >= raidAAMarkNPC) { r->UpdateRaidAAs(); r->SendAllRaidLeadershipAA(); } else { uint32 gid = r->GetGroup(this); r->UpdateGroupAAs(gid); r->GroupUpdate(gid, false); } } else if (IsGrouped()) { Group *g = GetGroup(); if (!g) return; g->UpdateGroupAAs(); g->SendLeadershipAAUpdate(); } } void Client::Handle_OP_PVPLeaderBoardDetailsRequest(const EQApplicationPacket *app) { // This opcode is sent by the client when the player right clicks a name on the PVP leaderboard and sends // further details about the selected player, e.g. Race/Class/AAs/Guild etc. // if (app->size != sizeof(PVPLeaderBoardDetailsRequest_Struct)) { LogDebug("Size mismatch in OP_PVPLeaderBoardDetailsRequest expected [{}] got [{}]", sizeof(PVPLeaderBoardDetailsRequest_Struct), app->size); DumpPacket(app); return; } auto outapp = new EQApplicationPacket(OP_PVPLeaderBoardDetailsReply, sizeof(PVPLeaderBoardDetailsReply_Struct)); PVPLeaderBoardDetailsReply_Struct *pvplbdrs = (PVPLeaderBoardDetailsReply_Struct *)outapp->pBuffer; // TODO: Record and send this data. QueuePacket(outapp); safe_delete(outapp); } void Client::Handle_OP_PVPLeaderBoardRequest(const EQApplicationPacket *app) { // This Opcode is sent by the client when the Leaderboard button on the PVP Stats window is pressed. // // It has a single uint32 payload which is the sort method: // // PVPSortByKills = 0, PVPSortByPoints = 1, PVPSortByInfamy = 2 // if (app->size != sizeof(PVPLeaderBoardRequest_Struct)) { LogDebug("Size mismatch in OP_PVPLeaderBoardRequest expected [{}] got [{}]", sizeof(PVPLeaderBoardRequest_Struct), app->size); DumpPacket(app); return; } /*PVPLeaderBoardRequest_Struct *pvplbrs = (PVPLeaderBoardRequest_Struct *)app->pBuffer;*/ //unused auto outapp = new EQApplicationPacket(OP_PVPLeaderBoardReply, sizeof(PVPLeaderBoard_Struct)); /*PVPLeaderBoard_Struct *pvplb = (PVPLeaderBoard_Struct *)outapp->pBuffer;*/ //unused // TODO: Record and send this data. QueuePacket(outapp); safe_delete(outapp); } void Client::Handle_OP_QueryUCSServerStatus(const EQApplicationPacket *app) { if (zone->IsUCSServerAvailable()) { EQApplicationPacket* outapp = nullptr; std::string buffer; std::string MailKey = database.GetMailKey(CharacterID(), true); EQ::versions::UCSVersion ConnectionType = EQ::versions::ucsUnknown; // chat server packet switch (ClientVersion()) { case EQ::versions::ClientVersion::Titanium: ConnectionType = EQ::versions::ucsTitaniumChat; break; case EQ::versions::ClientVersion::SoF: ConnectionType = EQ::versions::ucsSoFCombined; break; case EQ::versions::ClientVersion::SoD: ConnectionType = EQ::versions::ucsSoDCombined; break; case EQ::versions::ClientVersion::UF: ConnectionType = EQ::versions::ucsUFCombined; break; case EQ::versions::ClientVersion::RoF: ConnectionType = EQ::versions::ucsRoFCombined; break; case EQ::versions::ClientVersion::RoF2: ConnectionType = EQ::versions::ucsRoF2Combined; break; default: ConnectionType = EQ::versions::ucsUnknown; break; } buffer = StringFormat("%s,%i,%s.%s,%c%s", Config->ChatHost.c_str(), Config->ChatPort, Config->ShortName.c_str(), GetName(), ConnectionType, MailKey.c_str() ); outapp = new EQApplicationPacket(OP_SetChatServer, (buffer.length() + 1)); memcpy(outapp->pBuffer, buffer.c_str(), buffer.length()); outapp->pBuffer[buffer.length()] = '\0'; QueuePacket(outapp); safe_delete(outapp); // mail server packet switch (ClientVersion()) { case EQ::versions::ClientVersion::Titanium: ConnectionType = EQ::versions::ucsTitaniumMail; break; default: // retain value from previous switch break; } buffer = StringFormat("%s,%i,%s.%s,%c%s", Config->MailHost.c_str(), Config->MailPort, Config->ShortName.c_str(), GetName(), ConnectionType, MailKey.c_str() ); outapp = new EQApplicationPacket(OP_SetChatServer2, (buffer.length() + 1)); memcpy(outapp->pBuffer, buffer.c_str(), buffer.length()); outapp->pBuffer[buffer.length()] = '\0'; QueuePacket(outapp); safe_delete(outapp); } } void Client::Handle_OP_RaidCommand(const EQApplicationPacket *app) { if (app->size < sizeof(RaidGeneral_Struct)) { LogError("Wrong size: OP_RaidCommand, size=[{}], expected at least [{}]", app->size, sizeof(RaidGeneral_Struct)); DumpPacket(app); return; } RaidGeneral_Struct *raid_command_packet = (RaidGeneral_Struct*)app->pBuffer; switch (raid_command_packet->action) { case RaidCommandInviteIntoExisting: case RaidCommandInvite: { Client *player_to_invite = entity_list.GetClientByName(raid_command_packet->player_name); if (!player_to_invite) break; Group *player_to_invite_group = player_to_invite->GetGroup(); if (player_to_invite->HasRaid()) { Message(Chat::Red, "%s is already in a raid.", player_to_invite->GetName()); break; } if (player_to_invite_group && player_to_invite_group->IsGroupMember(this)) { MessageString(Chat::Red, ALREADY_IN_PARTY); break; } if (player_to_invite_group && !player_to_invite_group->IsLeader(player_to_invite)) { Message(Chat::Red, "You can only invite an ungrouped player or group leader to join your raid."); break; } /* Send out invite to the client */ auto outapp = new EQApplicationPacket(OP_RaidUpdate, sizeof(RaidGeneral_Struct)); RaidGeneral_Struct *raid_command = (RaidGeneral_Struct*)outapp->pBuffer; strn0cpy(raid_command->leader_name, raid_command_packet->leader_name, 64); strn0cpy(raid_command->player_name, raid_command_packet->player_name, 64); raid_command->parameter = 0; raid_command->action = 20; player_to_invite->QueuePacket(outapp); safe_delete(outapp); break; } case RaidCommandAcceptInvite: { Client *player_accepting_invite = entity_list.GetClientByName(raid_command_packet->player_name); if (player_accepting_invite) { if (IsRaidGrouped()) { player_accepting_invite->MessageString(Chat::White, ALREADY_IN_RAID, GetName()); //group failed, must invite members not in raid... return; } Raid *raid = entity_list.GetRaidByClient(player_accepting_invite); if (raid) { raid->VerifyRaid(); Group *group = GetGroup(); if (group) { if (group->GroupCount() + raid->RaidCount() > MAX_RAID_MEMBERS) { player_accepting_invite->Message(Chat::Red, "Invite failed, group invite would create a raid larger than the maximum number of members allowed."); return; } } else { if (1 + raid->RaidCount() > MAX_RAID_MEMBERS) { player_accepting_invite->Message(Chat::Red, "Invite failed, member invite would create a raid larger than the maximum number of members allowed."); return; } } if (group) {//add us all uint32 free_group_id = raid->GetFreeGroup(); Client *addClient = nullptr; for (int x = 0; x < 6; x++) { if (group->members[x]) { Client *c = nullptr; if (group->members[x]->IsClient()) c = group->members[x]->CastToClient(); else continue; if (!addClient) { addClient = c; raid->SetGroupLeader(addClient->GetName()); } raid->SendRaidCreate(c); raid->SendMakeLeaderPacketTo(raid->leadername, c); if (group->IsLeader(group->members[x])) raid->AddMember(c, free_group_id, false, true); else raid->AddMember(c, free_group_id); raid->SendBulkRaid(c); if (raid->IsLocked()) { raid->SendRaidLockTo(c); } } } group->JoinRaidXTarget(raid); group->DisbandGroup(true); raid->GroupUpdate(free_group_id); } else { raid->SendRaidCreate(this); raid->SendMakeLeaderPacketTo(raid->leadername, this); raid->AddMember(this); raid->SendBulkRaid(this); if (raid->IsLocked()) { raid->SendRaidLockTo(this); } } } else { Group *player_invited_group = player_accepting_invite->GetGroup(); Group *group = GetGroup(); if (group) //if our target has a group { raid = new Raid(player_accepting_invite); entity_list.AddRaid(raid); raid->SetRaidDetails(); uint32 raid_free_group_id = raid->GetFreeGroup(); /* If we already have a group then cycle through adding us... */ if (player_invited_group) { Client *client_to_be_leader = nullptr; for (int x = 0; x < 6; x++) { if (player_invited_group->members[x]) { if (!client_to_be_leader) { if (player_invited_group->members[x]->IsClient()) { client_to_be_leader = player_invited_group->members[x]->CastToClient(); raid->SetGroupLeader(client_to_be_leader->GetName()); } } if (player_invited_group->IsLeader(player_invited_group->members[x])) { Client *c = nullptr; if (player_invited_group->members[x]->IsClient()) c = player_invited_group->members[x]->CastToClient(); else continue; raid->SendRaidCreate(c); raid->SendMakeLeaderPacketTo(raid->leadername, c); raid->AddMember(c, raid_free_group_id, true, true, true); raid->SendBulkRaid(c); if (raid->IsLocked()) { raid->SendRaidLockTo(c); } } else { Client *c = nullptr; if (player_invited_group->members[x]->IsClient()) c = player_invited_group->members[x]->CastToClient(); else continue; raid->SendRaidCreate(c); raid->SendMakeLeaderPacketTo(raid->leadername, c); raid->AddMember(c, raid_free_group_id); raid->SendBulkRaid(c); if (raid->IsLocked()) { raid->SendRaidLockTo(c); } } } } player_invited_group->JoinRaidXTarget(raid, true); player_invited_group->DisbandGroup(true); raid->GroupUpdate(raid_free_group_id); raid_free_group_id = raid->GetFreeGroup(); } else { raid->SendRaidCreate(player_accepting_invite); raid->AddMember(player_accepting_invite, 0xFFFFFFFF, true, false, true); } Client *client_to_add = nullptr; /* Add client to an existing group */ for (int x = 0; x < 6; x++) { if (group->members[x]) { if (!client_to_add) { if (group->members[x]->IsClient()) { client_to_add = group->members[x]->CastToClient(); raid->SetGroupLeader(client_to_add->GetName()); } } if (group->IsLeader(group->members[x])) { Client *c = nullptr; if (group->members[x]->IsClient()) c = group->members[x]->CastToClient(); else continue; raid->SendRaidCreate(c); raid->SendMakeLeaderPacketTo(raid->leadername, c); raid->AddMember(c, raid_free_group_id, false, true); raid->SendBulkRaid(c); if (raid->IsLocked()) { raid->SendRaidLockTo(c); } } else { Client *c = nullptr; if (group->members[x]->IsClient()) c = group->members[x]->CastToClient(); else continue; raid->SendRaidCreate(c); raid->SendMakeLeaderPacketTo(raid->leadername, c); raid->AddMember(c, raid_free_group_id); raid->SendBulkRaid(c); if (raid->IsLocked()) { raid->SendRaidLockTo(c); } } } } group->JoinRaidXTarget(raid); group->DisbandGroup(true); raid->GroupUpdate(raid_free_group_id); } /* Target does not have a group */ else { if (player_invited_group) { raid = new Raid(player_accepting_invite); entity_list.AddRaid(raid); raid->SetRaidDetails(); Client *addClientig = nullptr; for (int x = 0; x < 6; x++) { if (player_invited_group->members[x]) { if (!addClientig) { if (player_invited_group->members[x]->IsClient()) { addClientig = player_invited_group->members[x]->CastToClient(); raid->SetGroupLeader(addClientig->GetName()); } } if (player_invited_group->IsLeader(player_invited_group->members[x])) { Client *c = nullptr; if (player_invited_group->members[x]->IsClient()) c = player_invited_group->members[x]->CastToClient(); else continue; raid->SendRaidCreate(c); raid->SendMakeLeaderPacketTo(raid->leadername, c); raid->AddMember(c, 0, true, true, true); raid->SendBulkRaid(c); if (raid->IsLocked()) { raid->SendRaidLockTo(c); } } else { Client *c = nullptr; if (player_invited_group->members[x]->IsClient()) c = player_invited_group->members[x]->CastToClient(); else continue; raid->SendRaidCreate(c); raid->SendMakeLeaderPacketTo(raid->leadername, c); raid->AddMember(c, 0); raid->SendBulkRaid(c); if (raid->IsLocked()) { raid->SendRaidLockTo(c); } } } } raid->SendRaidCreate(this); raid->SendMakeLeaderPacketTo(raid->leadername, this); raid->SendBulkRaid(this); player_invited_group->JoinRaidXTarget(raid, true); raid->AddMember(this); player_invited_group->DisbandGroup(true); raid->GroupUpdate(0); if (raid->IsLocked()) { raid->SendRaidLockTo(this); } } else { // neither has a group raid = new Raid(player_accepting_invite); entity_list.AddRaid(raid); raid->SetRaidDetails(); raid->SendRaidCreate(player_accepting_invite); raid->SendRaidCreate(this); raid->SendMakeLeaderPacketTo(raid->leadername, this); raid->AddMember(player_accepting_invite, 0xFFFFFFFF, true, false, true); raid->SendBulkRaid(this); raid->AddMember(this); if (raid->IsLocked()) { raid->SendRaidLockTo(this); } } } } } break; } case RaidCommandDisband: { Raid *raid = entity_list.GetRaidByClient(this); if (raid) { uint32 group = raid->GetGroup(raid_command_packet->leader_name); if (group < 12) { uint32 i = raid->GetPlayerIndex(raid_command_packet->leader_name); if (raid->members[i].IsGroupLeader) { //assign group leader to someone else for (int x = 0; x < MAX_RAID_MEMBERS; x++) { if (strlen(raid->members[x].membername) > 0 && i != x) { if (raid->members[x].GroupNumber == group) { raid->SetGroupLeader(raid_command_packet->leader_name, false); raid->SetGroupLeader(raid->members[x].membername); raid->UpdateGroupAAs(group); break; } } } } if (raid->members[i].IsRaidLeader) { for (int x = 0; x < MAX_RAID_MEMBERS; x++) { if (strlen(raid->members[x].membername) > 0 && strcmp(raid->members[x].membername, raid->members[i].membername) != 0) { raid->SetRaidLeader(raid->members[i].membername, raid->members[x].membername); raid->UpdateRaidAAs(); raid->SendAllRaidLeadershipAA(); break; } } } } raid->RemoveMember(raid_command_packet->leader_name); Client *c = entity_list.GetClientByName(raid_command_packet->leader_name); if (c) raid->SendGroupDisband(c); else { auto pack = new ServerPacket(ServerOP_RaidGroupDisband, sizeof(ServerRaidGeneralAction_Struct)); ServerRaidGeneralAction_Struct* rga = (ServerRaidGeneralAction_Struct*)pack->pBuffer; rga->rid = GetID(); rga->zoneid = zone->GetZoneID(); rga->instance_id = zone->GetInstanceID(); strn0cpy(rga->playername, raid_command_packet->leader_name, 64); worldserver.SendPacket(pack); safe_delete(pack); } //r->SendRaidGroupRemove(ri->leader_name, grp); raid->GroupUpdate(group);// break //} } break; } case RaidCommandMoveGroup: { Raid *raid = entity_list.GetRaidByClient(this); if (raid) { /* Moving to group */ if (raid_command_packet->parameter < 12) { uint8 group_count = raid->GroupCount(raid_command_packet->parameter); if (group_count < 6) { Client *c = entity_list.GetClientByName(raid_command_packet->leader_name); uint32 old_group = raid->GetGroup(raid_command_packet->leader_name); if (raid_command_packet->parameter == old_group) //don't rejoin grp if we order to join same group. break; if (raid->members[raid->GetPlayerIndex(raid_command_packet->leader_name)].IsGroupLeader) { raid->SetGroupLeader(raid_command_packet->leader_name, false); /* We were the leader of our old group */ if (old_group < 12) { /* Assign new group leader if we can */ for (int x = 0; x < MAX_RAID_MEMBERS; x++) { if (raid->members[x].GroupNumber == old_group) { if (strcmp(raid_command_packet->leader_name, raid->members[x].membername) != 0 && strlen(raid_command_packet->leader_name) > 0) { raid->SetGroupLeader(raid->members[x].membername); raid->UpdateGroupAAs(old_group); Client *client_to_update = entity_list.GetClientByName(raid->members[x].membername); if (client_to_update) { raid->SendRaidRemove(raid->members[x].membername, client_to_update); raid->SendRaidCreate(client_to_update); raid->SendMakeLeaderPacketTo(raid->leadername, client_to_update); raid->SendRaidAdd(raid->members[x].membername, client_to_update); raid->SendBulkRaid(client_to_update); if (raid->IsLocked()) { raid->SendRaidLockTo(client_to_update); } } else { auto pack = new ServerPacket(ServerOP_RaidChangeGroup, sizeof(ServerRaidGeneralAction_Struct)); ServerRaidGeneralAction_Struct *raid_command_packet = (ServerRaidGeneralAction_Struct*)pack->pBuffer; raid_command_packet->rid = raid->GetID(); raid_command_packet->zoneid = zone->GetZoneID(); raid_command_packet->instance_id = zone->GetInstanceID(); strn0cpy(raid_command_packet->playername, raid->members[x].membername, 64); worldserver.SendPacket(pack); safe_delete(pack); } break; } } } } } if (group_count == 0) { raid->SetGroupLeader(raid_command_packet->leader_name); raid->UpdateGroupAAs(raid_command_packet->parameter); } raid->MoveMember(raid_command_packet->leader_name, raid_command_packet->parameter); if (c) { raid->SendGroupDisband(c); } else { auto pack = new ServerPacket(ServerOP_RaidGroupDisband, sizeof(ServerRaidGeneralAction_Struct)); ServerRaidGeneralAction_Struct* raid_command = (ServerRaidGeneralAction_Struct*)pack->pBuffer; raid_command->rid = raid->GetID(); raid_command->zoneid = zone->GetZoneID(); raid_command->instance_id = zone->GetInstanceID(); strn0cpy(raid_command->playername, raid_command_packet->leader_name, 64); worldserver.SendPacket(pack); safe_delete(pack); } /* Send group update to our new group */ raid->GroupUpdate(raid_command_packet->parameter); /* If our old was a group send update there too */ if (old_group < 12) raid->GroupUpdate(old_group); } } /* Move player to ungrouped bank */ else { Client *c = entity_list.GetClientByName(raid_command_packet->leader_name); uint32 oldgrp = raid->GetGroup(raid_command_packet->leader_name); if (raid->members[raid->GetPlayerIndex(raid_command_packet->leader_name)].IsGroupLeader) { raid->SetGroupLeader(raid_command_packet->leader_name, false); for (int x = 0; x < MAX_RAID_MEMBERS; x++) { if (raid->members[x].GroupNumber == oldgrp && strlen(raid->members[x].membername) > 0 && strcmp(raid->members[x].membername, raid_command_packet->leader_name) != 0){ raid->SetGroupLeader(raid->members[x].membername); raid->UpdateGroupAAs(oldgrp); Client *client_leaving_group = entity_list.GetClientByName(raid->members[x].membername); if (client_leaving_group) { raid->SendRaidRemove(raid->members[x].membername, client_leaving_group); raid->SendRaidCreate(client_leaving_group); raid->SendMakeLeaderPacketTo(raid->leadername, client_leaving_group); raid->SendRaidAdd(raid->members[x].membername, client_leaving_group); raid->SendBulkRaid(client_leaving_group); if (raid->IsLocked()) { raid->SendRaidLockTo(client_leaving_group); } } else { auto pack = new ServerPacket( ServerOP_RaidChangeGroup, sizeof(ServerRaidGeneralAction_Struct)); ServerRaidGeneralAction_Struct *raid_command = (ServerRaidGeneralAction_Struct*)pack->pBuffer; raid_command->rid = raid->GetID(); strn0cpy(raid_command->playername, raid->members[x].membername, 64); raid_command->zoneid = zone->GetZoneID(); raid_command->instance_id = zone->GetInstanceID(); worldserver.SendPacket(pack); safe_delete(pack); } break; } } } raid->MoveMember(raid_command_packet->leader_name, 0xFFFFFFFF); if (c) { raid->SendGroupDisband(c); } else { auto pack = new ServerPacket(ServerOP_RaidGroupDisband, sizeof(ServerRaidGeneralAction_Struct)); ServerRaidGeneralAction_Struct* raid_command = (ServerRaidGeneralAction_Struct*)pack->pBuffer; raid_command->rid = raid->GetID(); raid_command->zoneid = zone->GetZoneID(); raid_command->instance_id = zone->GetInstanceID(); strn0cpy(raid_command->playername, raid_command_packet->leader_name, 64); worldserver.SendPacket(pack); safe_delete(pack); } raid->GroupUpdate(oldgrp); } } Client *client_moved = entity_list.GetClientByName(raid_command_packet->leader_name); if (client_moved && client_moved->GetRaid()) { client_moved->GetRaid()->SendHPManaEndPacketsTo(client_moved); client_moved->GetRaid()->SendHPManaEndPacketsFrom(client_moved); Log(Logs::General, Logs::HPUpdate, "Client::Handle_OP_RaidCommand :: %s sending and recieving HP/Mana/End updates", client_moved->GetCleanName() ); } break; } case RaidCommandRaidLock: { Raid *raid = entity_list.GetRaidByClient(this); if (raid) { if (!raid->IsLocked()) raid->LockRaid(true); else raid->SendRaidLockTo(this); } break; } case RaidCommandRaidUnlock: { Raid *raid = entity_list.GetRaidByClient(this); if (raid) { if (raid->IsLocked()) raid->LockRaid(false); else raid->SendRaidUnlockTo(this); } break; } case RaidCommandLootType2: case RaidCommandLootType: { Raid *raid = entity_list.GetRaidByClient(this); if (raid) { Message(Chat::Yellow, "Loot type changed to: %d.", raid_command_packet->parameter); raid->ChangeLootType(raid_command_packet->parameter); } break; } case RaidCommandAddLooter2: case RaidCommandAddLooter: { Raid *raid = entity_list.GetRaidByClient(this); if (raid) { Message(Chat::Yellow, "Adding %s as a raid looter.", raid_command_packet->leader_name); raid->AddRaidLooter(raid_command_packet->leader_name); } break; } case RaidCommandRemoveLooter2: case RaidCommandRemoveLooter: { Raid *raid = entity_list.GetRaidByClient(this); if (raid) { Message(Chat::Yellow, "Removing %s as a raid looter.", raid_command_packet->leader_name); raid->RemoveRaidLooter(raid_command_packet->leader_name); } break; } case RaidCommandMakeLeader: { Raid *raid = entity_list.GetRaidByClient(this); if (raid) { if (strcmp(raid->leadername, GetName()) == 0) { raid->SetRaidLeader(GetName(), raid_command_packet->leader_name); raid->UpdateRaidAAs(); raid->SendAllRaidLeadershipAA(); } } break; } case RaidCommandSetMotd: { Raid *raid = entity_list.GetRaidByClient(this); if (!raid) break; // we don't use the RaidGeneral here! RaidMOTD_Struct *motd = (RaidMOTD_Struct *)app->pBuffer; raid->SetRaidMOTD(std::string(motd->motd)); raid->SaveRaidMOTD(); raid->SendRaidMOTDToWorld(); break; } default: { Message(Chat::Red, "Raid command (%d) NYI", raid_command_packet->action); break; } } } void Client::Handle_OP_RandomReq(const EQApplicationPacket *app) { if (app->size != sizeof(RandomReq_Struct)) { LogError("Wrong size: OP_RandomReq, size=[{}], expected [{}]", app->size, sizeof(RandomReq_Struct)); return; } const RandomReq_Struct* rndq = (const RandomReq_Struct*)app->pBuffer; uint32 randLow = rndq->low > rndq->high ? rndq->high : rndq->low; uint32 randHigh = rndq->low > rndq->high ? rndq->low : rndq->high; uint32 randResult; if (randLow == 0 && randHigh == 0) { // defaults randLow = 0; randHigh = 100; } randResult = zone->random.Int(randLow, randHigh); auto outapp = new EQApplicationPacket(OP_RandomReply, sizeof(RandomReply_Struct)); RandomReply_Struct* rr = (RandomReply_Struct*)outapp->pBuffer; rr->low = randLow; rr->high = randHigh; rr->result = randResult; strcpy(rr->name, GetName()); entity_list.QueueCloseClients(this, outapp, false, 400); safe_delete(outapp); return; } void Client::Handle_OP_ReadBook(const EQApplicationPacket *app) { if (app->size != sizeof(BookRequest_Struct)) { LogError("Wrong size: OP_ReadBook, size=[{}], expected [{}]", app->size, sizeof(BookRequest_Struct)); return; } BookRequest_Struct* book = (BookRequest_Struct*)app->pBuffer; ReadBook(book); if (ClientVersion() >= EQ::versions::ClientVersion::SoF) { EQApplicationPacket EndOfBook(OP_FinishWindow, 0); QueuePacket(&EndOfBook); } return; } void Client::Handle_OP_RecipeAutoCombine(const EQApplicationPacket *app) { if (app->size != sizeof(RecipeAutoCombine_Struct)) { LogError("Invalid size for RecipeAutoCombine_Struct: Expected: [{}], Got: [{}]", sizeof(RecipeAutoCombine_Struct), app->size); return; } RecipeAutoCombine_Struct* rac = (RecipeAutoCombine_Struct*)app->pBuffer; Object::HandleAutoCombine(this, rac); return; } void Client::Handle_OP_RecipeDetails(const EQApplicationPacket *app) { if (app->size < sizeof(uint32)) { LogError("Invalid size for RecipeDetails Request: Expected: [{}], Got: [{}]", sizeof(uint32), app->size); return; } uint32 *recipe_id = (uint32*)app->pBuffer; SendTradeskillDetails(*recipe_id); return; } void Client::Handle_OP_RecipesFavorite(const EQApplicationPacket *app) { if (app->size != sizeof(TradeskillFavorites_Struct)) { LogError("Invalid size for TradeskillFavorites_Struct: Expected: [{}], Got: [{}]", sizeof(TradeskillFavorites_Struct), app->size); return; } TradeskillFavorites_Struct* tsf = (TradeskillFavorites_Struct*)app->pBuffer; LogDebug("Requested Favorites for: [{}] - [{}]\n", tsf->object_type, tsf->some_id); // results show that object_type is combiner type // some_id = 0 if world combiner, item number otherwise // make where clause segment for container(s) std::string containers; uint32 combineObjectSlots; if (tsf->some_id == 0) { containers += StringFormat(" = %u ", tsf->object_type); // world combiner so no item number combineObjectSlots = 10; } else { containers += StringFormat(" in (%u, %u) ", tsf->object_type, tsf->some_id); // container in inventory auto item = database.GetItem(tsf->some_id); if (!item) { LogError("Invalid container ID: [{}]. GetItem returned null. Defaulting to BagSlots = 10.\n", tsf->some_id); combineObjectSlots = 10; } else { combineObjectSlots = item->BagSlots; } } std::string favoriteIDs; //gotta be big enough for 500 IDs bool first = true; //Assumes item IDs are <10 characters long for (uint16 favoriteIndex = 0; favoriteIndex < 500; ++favoriteIndex) { if (tsf->favorite_recipes[favoriteIndex] == 0) continue; if (first) { favoriteIDs += StringFormat("%u", tsf->favorite_recipes[favoriteIndex]); first = false; } else favoriteIDs += StringFormat(",%u", tsf->favorite_recipes[favoriteIndex]); } if (first) //no favorites.... return; // TODO: Clean this up const std::string query = StringFormat( SQL ( SELECT tr.id, tr.name, tr.trivial, SUM(tre.componentcount), tr.tradeskill FROM tradeskill_recipe AS tr LEFT JOIN tradeskill_recipe_entries AS tre ON tr.id = tre.recipe_id WHERE tr.enabled <> 0 AND tr.id IN (%s) AND tr.must_learn & 0x20 <> 0x20 AND ( ( tr.must_learn & 0x3 <> 0 ) OR (tr.must_learn & 0x3 = 0) ) %s GROUP BY tr.id HAVING sum( if( tre.item_id %s AND tre.iscontainer > 0, 1, 0 ) ) > 0 AND SUM(tre.componentcount) <= %u LIMIT 100 ), favoriteIDs.c_str(), ContentFilterCriteria::apply().c_str(), containers.c_str(), combineObjectSlots ); SendTradeskillSearchResults(query, tsf->object_type, tsf->some_id); } void Client::Handle_OP_RecipesSearch(const EQApplicationPacket *app) { if (app->size != sizeof(RecipesSearch_Struct)) { LogError( "Invalid size for RecipesSearch_Struct: Expected: [{}], Got: [{}]", sizeof(RecipesSearch_Struct), app->size ); return; } auto* p_recipes_search_struct = (RecipesSearch_Struct*)app->pBuffer; p_recipes_search_struct->query[55] = '\0'; //just to be sure. LogTradeskills( "[Handle_OP_RecipesSearch] Requested search recipes for object_type [{}] some_id [{}]", p_recipes_search_struct->object_type, p_recipes_search_struct->some_id ); char containers_where_clause[30]; uint32 combine_object_slots; if (p_recipes_search_struct->some_id == 0) { // world combiner so no item number snprintf(containers_where_clause, 29, "= %u", p_recipes_search_struct->object_type); combine_object_slots = 10; } else { // container in inventory snprintf(containers_where_clause, 29, "in (%u,%u)", p_recipes_search_struct->object_type, p_recipes_search_struct->some_id); auto item = database.GetItem(p_recipes_search_struct->some_id); if (!item) { LogError( "Invalid container ID: [{}]. GetItem returned null. Defaulting to BagSlots = 10.", p_recipes_search_struct->some_id ); combine_object_slots = 10; } else { combine_object_slots = item->BagSlots; } } std::string search_clause; if (p_recipes_search_struct->query[0] != 0) { char buf[120]; //larger than 2X rss->query database.DoEscapeString(buf, p_recipes_search_struct->query, strlen(p_recipes_search_struct->query)); search_clause = StringFormat("name rlike '%s' AND", buf); } //arbitrary limit of 200 recipes, makes sense to me. // TODO: Clean this up std::string query = fmt::format( SQL( SELECT tr.id, tr.name, tr.trivial, SUM(tre.componentcount), tr.tradeskill FROM tradeskill_recipe AS tr LEFT JOIN tradeskill_recipe_entries AS tre ON tr.id = tre.recipe_id WHERE {} tr.trivial >= {} AND tr.trivial <= {} AND tr.enabled <> 0 AND tr.must_learn & 0x20 <> 0x20 AND ( ( tr.must_learn & 0x3 <> 0 ) OR (tr.must_learn & 0x3 = 0) ) {} GROUP BY tr.id HAVING sum( if ( tre.item_id {} AND tre.iscontainer > 0, 1, 0 ) ) > 0 AND SUM(tre.componentcount) <= {} LIMIT 200 ), search_clause, p_recipes_search_struct->mintrivial, p_recipes_search_struct->maxtrivial, ContentFilterCriteria::apply(), containers_where_clause, combine_object_slots ); SendTradeskillSearchResults(query, p_recipes_search_struct->object_type, p_recipes_search_struct->some_id); } void Client::Handle_OP_ReloadUI(const EQApplicationPacket *app) { if (IsInAGuild()) { SendGuildRanks(); SendGuildMembers(); } return; } void Client::Handle_OP_RemoveBlockedBuffs(const EQApplicationPacket *app) { if (!RuleB(Spells, EnableBlockedBuffs)) return; if (app->size != sizeof(BlockedBuffs_Struct)) { LogDebug("Size mismatch in OP_RemoveBlockedBuffs expected [{}] got [{}]", sizeof(BlockedBuffs_Struct), app->size); DumpPacket(app); return; } BlockedBuffs_Struct *bbs = (BlockedBuffs_Struct*)app->pBuffer; std::set<uint32> *BlockedBuffs = bbs->Pet ? &PetBlockedBuffs : &PlayerBlockedBuffs; std::set<uint32> RemovedBuffs; if (bbs->Count > 0) { std::set<uint32>::iterator Iterator; auto outapp = new EQApplicationPacket(OP_RemoveBlockedBuffs, sizeof(BlockedBuffs_Struct)); BlockedBuffs_Struct *obbs = (BlockedBuffs_Struct*)outapp->pBuffer; for (unsigned int i = 0; i < BLOCKED_BUFF_COUNT; ++i) obbs->SpellID[i] = 0; obbs->Pet = bbs->Pet; obbs->Initialise = 0; obbs->Flags = 0x5a; for (unsigned int i = 0; i < bbs->Count; ++i) { Iterator = BlockedBuffs->find(bbs->SpellID[i]); if (Iterator != BlockedBuffs->end()) { RemovedBuffs.insert(bbs->SpellID[i]); BlockedBuffs->erase(Iterator); } } obbs->Count = RemovedBuffs.size(); Iterator = RemovedBuffs.begin(); unsigned int Element = 0; while (Iterator != RemovedBuffs.end()) { obbs->SpellID[Element++] = (*Iterator); ++Iterator; } FastQueuePacket(&outapp); } } void Client::Handle_OP_RemoveTrap(const EQApplicationPacket *app) { if (app->size != 4) {// just an int LogDebug("Size mismatch in OP_RemoveTrap expected 4 got [{}]", app->size); DumpPacket(app); return; } auto id = app->ReadUInt32(0); bool good = false; for (int i = 0; i < trap_mgr.count; ++i) { if (trap_mgr.auras[i].spawn_id == id) { good = true; break; } } if (good) RemoveAura(id); else MessageString(Chat::SpellFailure, NOT_YOUR_TRAP); // pretty sure this was red } void Client::Handle_OP_Report(const EQApplicationPacket *app) { if (!CanUseReport) { MessageString(Chat::System, REPORT_ONCE); return; } uint32 size = app->size; uint32 current_point = 0; std::string reported, reporter; std::string current_string; int mode = 0; while (current_point < size) { if (mode < 2) { if (app->pBuffer[current_point] == '|') { mode++; } else { if (mode == 0) { reported += app->pBuffer[current_point]; } else { reporter += app->pBuffer[current_point]; } } current_point++; } else { if (app->pBuffer[current_point] == 0x0a) { current_string += '\n'; } else if (app->pBuffer[current_point] == 0x00) { CanUseReport = false; database.AddReport(reporter, reported, current_string); return; } else { current_string += app->pBuffer[current_point]; } current_point++; } } CanUseReport = false; database.AddReport(reporter, reported, current_string); } void Client::Handle_OP_RequestDuel(const EQApplicationPacket *app) { if (app->size != sizeof(Duel_Struct)) return; EQApplicationPacket* outapp = app->Copy(); Duel_Struct* ds = (Duel_Struct*)outapp->pBuffer; uint32 duel = ds->duel_initiator; ds->duel_initiator = ds->duel_target; ds->duel_target = duel; Entity* entity = entity_list.GetID(ds->duel_target); if (GetID() != ds->duel_target && entity->IsClient() && (entity->CastToClient()->IsDueling() && entity->CastToClient()->GetDuelTarget() != 0)) { MessageString(Chat::NPCQuestSay, DUEL_CONSIDERING, entity->GetName()); return; } if (IsDueling()) { MessageString(Chat::NPCQuestSay, DUEL_INPROGRESS); return; } if (GetID() != ds->duel_target && entity->IsClient() && GetDuelTarget() == 0 && !IsDueling() && !entity->CastToClient()->IsDueling() && entity->CastToClient()->GetDuelTarget() == 0) { SetDuelTarget(ds->duel_target); entity->CastToClient()->SetDuelTarget(GetID()); ds->duel_target = ds->duel_initiator; entity->CastToClient()->FastQueuePacket(&outapp); entity->CastToClient()->SetDueling(false); SetDueling(false); } else safe_delete(outapp); return; } void Client::Handle_OP_RequestTitles(const EQApplicationPacket *app) { EQApplicationPacket *outapp = title_manager.MakeTitlesPacket(this); if (outapp != nullptr) FastQueuePacket(&outapp); } void Client::Handle_OP_RespawnWindow(const EQApplicationPacket *app) { // This opcode is sent by the client when the player choses which bind to return to. // The client sends just a 4 byte packet with the selection number in it // if (app->size != 4) { LogDebug("Size mismatch in OP_RespawnWindow expected [{}] got [{}]", 4, app->size); DumpPacket(app); return; } char *Buffer = (char *)app->pBuffer; uint32 Option = VARSTRUCT_DECODE_TYPE(uint32, Buffer); HandleRespawnFromHover(Option); } void Client::Handle_OP_Rewind(const EQApplicationPacket *app) { if ((rewind_timer.GetRemainingTime() > 1 && rewind_timer.Enabled())) { MessageString(Chat::System, REWIND_WAIT); } else { CastToClient()->MovePC(zone->GetZoneID(), zone->GetInstanceID(), m_RewindLocation.x, m_RewindLocation.y, m_RewindLocation.z, 0, 2, Rewind); rewind_timer.Start(30000, true); } } void Client::Handle_OP_RezzAnswer(const EQApplicationPacket *app) { VERIFY_PACKET_LENGTH(OP_RezzAnswer, app, Resurrect_Struct); const Resurrect_Struct* ra = (const Resurrect_Struct*)app->pBuffer; LogSpells("Received OP_RezzAnswer from client. Pendingrezzexp is [{}], action is [{}]", PendingRezzXP, ra->action ? "ACCEPT" : "DECLINE"); OPRezzAnswer(ra->action, ra->spellid, ra->zone_id, ra->instance_id, ra->x, ra->y, ra->z); if (ra->action == 1) { EQApplicationPacket* outapp = app->Copy(); // Send the OP_RezzComplete to the world server. This finds it's way to the zone that // the rezzed corpse is in to mark the corpse as rezzed. outapp->SetOpcode(OP_RezzComplete); worldserver.RezzPlayer(outapp, 0, 0, OP_RezzComplete); safe_delete(outapp); } return; } void Client::Handle_OP_Sacrifice(const EQApplicationPacket *app) { if (app->size != sizeof(Sacrifice_Struct)) { LogDebug("Size mismatch in OP_Sacrifice expected [{}] got [{}]", sizeof(Sacrifice_Struct), app->size); DumpPacket(app); return; } Sacrifice_Struct *ss = (Sacrifice_Struct*)app->pBuffer; if (!PendingSacrifice) { LogError("Unexpected OP_Sacrifice reply"); DumpPacket(app); return; } if (ss->Confirm) { Client *Caster = entity_list.GetClientByName(SacrificeCaster.c_str()); if (Caster) Sacrifice(Caster); } PendingSacrifice = false; SacrificeCaster.clear(); } void Client::Handle_OP_SafeFallSuccess(const EQApplicationPacket *app) // bit of a misnomer, sent whenever safe fall is used (success of fail) { if (HasSkill(EQ::skills::SkillSafeFall)) //this should only get called if the client has safe fall, but just in case... CheckIncreaseSkill(EQ::skills::SkillSafeFall, nullptr); //check for skill up } void Client::Handle_OP_SafePoint(const EQApplicationPacket *app) { return; } void Client::Handle_OP_Save(const EQApplicationPacket *app) { // The payload is 192 bytes - Not sure what is contained in payload Save(); return; } void Client::Handle_OP_SaveOnZoneReq(const EQApplicationPacket *app) { Handle_OP_Save(app); } void Client::Handle_OP_SelectTribute(const EQApplicationPacket *app) { LogTribute("Received OP_SelectTribute of length [{}]", app->size); //we should enforce being near a real tribute master to change this //but im not sure how I wanna do that right now. if (app->size != sizeof(SelectTributeReq_Struct)) LogError("Invalid size on OP_SelectTribute packet"); else { SelectTributeReq_Struct *t = (SelectTributeReq_Struct *)app->pBuffer; SendTributeDetails(t->client_id, t->tribute_id); } return; } void Client::Handle_OP_SenseHeading(const EQApplicationPacket *app) { if (!HasSkill(EQ::skills::SkillSenseHeading)) return; int chancemod = 0; CheckIncreaseSkill(EQ::skills::SkillSenseHeading, nullptr, chancemod); return; } void Client::Handle_OP_SenseTraps(const EQApplicationPacket *app) { if (!HasSkill(EQ::skills::SkillSenseTraps)) return; if (!p_timers.Expired(&database, pTimerSenseTraps, false)) { Message(Chat::Red, "Ability recovery time not yet met."); return; } int reuse = SenseTrapsReuseTime - GetSkillReuseTime(EQ::skills::SkillSenseTraps); if (reuse < 1) reuse = 1; p_timers.Start(pTimerSenseTraps, reuse - 1); float trap_curdist = 0; Trap* trap = entity_list.FindNearbyTrap(this, 800, trap_curdist); CheckIncreaseSkill(EQ::skills::SkillSenseTraps, nullptr); if (trap && trap->skill > 0) { int uskill = GetSkill(EQ::skills::SkillSenseTraps); if ((zone->random.Int(0, 99) + uskill) >= (zone->random.Int(0, 99) + trap->skill*0.75)) { auto diff = trap->m_Position - glm::vec3(GetPosition()); if (diff.x == 0 && diff.y == 0) Message(Chat::Skills, "You sense a trap right under your feet!"); else if (diff.x > 10 && diff.y > 10) Message(Chat::Skills, "You sense a trap to the NorthWest."); else if (diff.x < -10 && diff.y > 10) Message(Chat::Skills, "You sense a trap to the NorthEast."); else if (diff.y > 10) Message(Chat::Skills, "You sense a trap to the North."); else if (diff.x > 10 && diff.y < -10) Message(Chat::Skills, "You sense a trap to the SouthWest."); else if (diff.x < -10 && diff.y < -10) Message(Chat::Skills, "You sense a trap to the SouthEast."); else if (diff.y < -10) Message(Chat::Skills, "You sense a trap to the South."); else if (diff.x > 10) Message(Chat::Skills, "You sense a trap to the West."); else Message(Chat::Skills, "You sense a trap to the East."); trap->detected = true; float angle = CalculateHeadingToTarget(trap->m_Position.x, trap->m_Position.y); if (angle < 0) angle = (256 + angle); angle *= 2; MovePC(zone->GetZoneID(), zone->GetInstanceID(), GetX(), GetY(), GetZ(), angle); return; } } Message(Chat::Skills, "You did not find any traps nearby."); return; } void Client::Handle_OP_SetGuildMOTD(const EQApplicationPacket *app) { LogGuilds("Received OP_SetGuildMOTD"); if (app->size != sizeof(GuildMOTD_Struct)) { // client calls for a motd on login even if they arent in a guild printf("Error: app size of %i != size of GuildMOTD_Struct of %zu\n", app->size, sizeof(GuildMOTD_Struct)); return; } if (!IsInAGuild()) { Message(Chat::Red, "You are not in a guild!"); return; } if (!guild_mgr.CheckPermission(GuildID(), GuildRank(), GUILD_MOTD)) { Message(Chat::Red, "You do not have permissions to edit your guild's MOTD."); return; } GuildMOTD_Struct* gmotd = (GuildMOTD_Struct*)app->pBuffer; LogGuilds("Setting MOTD for [{}] ([{}]) to: [{}] - [{}]", guild_mgr.GetGuildName(GuildID()), GuildID(), GetName(), gmotd->motd); if (!guild_mgr.SetGuildMOTD(GuildID(), gmotd->motd, GetName())) { Message(0, "Motd update failed."); } return; } void Client::Handle_OP_SetRunMode(const EQApplicationPacket *app) { if (app->size < sizeof(SetRunMode_Struct)) { LogError("Received invalid sized OP_SetRunMode: got [{}], expected [{}]", app->size, sizeof(SetRunMode_Struct)); DumpPacket(app); return; } SetRunMode_Struct* rms = (SetRunMode_Struct*)app->pBuffer; if (rms->mode) runmode = true; else runmode = false; return; } void Client::Handle_OP_SetServerFilter(const EQApplicationPacket *app) { if (app->size != sizeof(SetServerFilter_Struct)) { LogError("Received invalid sized OP_SetServerFilter: got [{}], expected [{}]", app->size, sizeof(SetServerFilter_Struct)); DumpPacket(app); return; } SetServerFilter_Struct* filter = (SetServerFilter_Struct*)app->pBuffer; ServerFilter(filter); return; } void Client::Handle_OP_SetStartCity(const EQApplicationPacket *app) { // if the character has a start city, don't let them use the command if (m_pp.binds[4].zone_id != 0 && m_pp.binds[4].zone_id != 189) { Message(Chat::Yellow, "Your home city has already been set.", m_pp.binds[4].zone_id, ZoneName(m_pp.binds[4].zone_id)); return; } if (app->size < 1) { LogError("Wrong size: OP_SetStartCity, size=[{}], expected [{}]", app->size, 1); DumpPacket(app); return; } float x = 0.0f, y = 0.0f, z = 0.0f, heading = 0.0f; uint32 zone_id = 0; uint32 start_city = (uint32)strtol((const char*)app->pBuffer, nullptr, 10); std::string query = fmt::format( SQL( SELECT `zone_id`, `bind_id`, `x`, `y`, `z`, `heading` FROM `start_zones` WHERE player_class = {} AND player_deity = {} AND player_race = {} {} ), m_pp.class_, m_pp.deity, m_pp.race, ContentFilterCriteria::apply().c_str() ); auto results = content_db.QueryDatabase(query); if (!results.Success()) { LogError("No valid start zones found for /setstartcity"); return; } bool valid_city = false; for (auto row = results.begin(); row != results.end(); ++row) { if (atoi(row[1]) != 0) zone_id = atoi(row[1]); else zone_id = atoi(row[0]); if (zone_id != start_city) continue; valid_city = true; x = atof(row[2]); y = atof(row[3]); z = atof(row[4]); heading = atof(row[5]); } if (valid_city) { Message(Chat::Yellow, "Your home city has been set"); SetStartZone(start_city, x, y, z, heading); return; } query = fmt::format( SQL( SELECT `zone_id`, `bind_id` FROM `start_zones` WHERE player_class = {} AND player_deity = {} AND player_race = {} ), m_pp.class_, m_pp.deity, m_pp.race ); results = content_db.QueryDatabase(query); if (!results.Success()) return; Message(Chat::Yellow, "Use \"/setstartcity #\" to choose a home city from the following list:"); for (auto row = results.begin(); row != results.end(); ++row) { if (atoi(row[1]) != 0) zone_id = atoi(row[1]); else zone_id = atoi(row[0]); std::string zone_long_name = zone_store.GetZoneLongName(zone_id); Message(Chat::Yellow, "%d - %s", zone_id, zone_long_name.c_str()); } } void Client::Handle_OP_SetTitle(const EQApplicationPacket *app) { if (app->size != sizeof(SetTitle_Struct)) { LogDebug("Size mismatch in OP_SetTitle expected [{}] got [{}]", sizeof(SetTitle_Struct), app->size); DumpPacket(app); return; } SetTitle_Struct *sts = (SetTitle_Struct *)app->pBuffer; std::string Title; if (!sts->is_suffix) { Title = title_manager.GetPrefix(sts->title_id); SetAATitle(Title.c_str()); } else { Title = title_manager.GetSuffix(sts->title_id); SetTitleSuffix(Title.c_str()); } } void Client::Handle_OP_Shielding(const EQApplicationPacket *app) { /* /shield command mechanics Warriors get this skill at level 30 Used by typing /shield while targeting a player While active for the duration of 12 seconds baseline. The 'shield target' will take 50 pct less damage and the 'shielder' will be hit with the damage taken by the 'shield target' after all applicable mitigiont is calculated, the damage on the 'shielder' will be reduced by 25 percent, this reduction can be increased to 50 pct if equiping a shield. You receive a 1% increase in mitigation for every 2 AC on the shield. Shielder must stay with in a close distance (15 units) to your 'shield target'. If either move out of range, shield ends, no message given. Both duration and shield range can be modified by AA. Recast is 3 minutes. For custom use cases, Mob::ShieldAbility can be used in quests with all parameters being altered. This functional is also used for SPA 201 SE_PetShield, which functions in a simalar manner with pet shielding owner. Note: If either the shielder or the shield target die all variables are reset on both. */ if (app->size != sizeof(Shielding_Struct)) { LogError("OP size error: OP_Shielding expected:[{}] got:[{}]", sizeof(Shielding_Struct), app->size); return; } if (GetLevel() < 30) { //Client gives message return; } if (GetClass() != WARRIOR){ return; } pTimerType timer = pTimerShieldAbility; if (!p_timers.Expired(&database, timer, false)) { uint32 remain = p_timers.GetRemainingTime(timer); Message(Chat::White, "You can use the ability /shield in %d minutes %d seconds.", ((remain) / 60), (remain % 60)); return; } Shielding_Struct* shield = (Shielding_Struct*)app->pBuffer; if (ShieldAbility(shield->target_id, 15, 12000, 50, 25, true, false)) { p_timers.Start(timer, SHIELD_ABILITY_RECAST_TIME); } return; } void Client::Handle_OP_ShopEnd(const EQApplicationPacket *app) { EQApplicationPacket empty(OP_ShopEndConfirm); QueuePacket(&empty); return; } void Client::Handle_OP_ShopPlayerBuy(const EQApplicationPacket *app) { if (app->size != sizeof(Merchant_Sell_Struct)) { LogError("Invalid size on OP_ShopPlayerBuy: Expected [{}], Got [{}]", sizeof(Merchant_Sell_Struct), app->size); return; } RDTSC_Timer t1; t1.start(); Merchant_Sell_Struct* mp = (Merchant_Sell_Struct*)app->pBuffer; #if EQDEBUG >= 5 LogDebug("[{}], purchase item", GetName()); DumpPacket(app); #endif int merchantid; bool tmpmer_used = false; Mob* tmp = entity_list.GetMob(mp->npcid); if (tmp == 0 || !tmp->IsNPC() || tmp->GetClass() != MERCHANT) return; if (mp->quantity < 1) return; //you have to be somewhat close to them to be properly using them if (DistanceSquared(m_Position, tmp->GetPosition()) > USE_NPC_RANGE2) return; merchantid = tmp->CastToNPC()->MerchantType; uint32 item_id = 0; std::list<MerchantList> merlist = zone->merchanttable[merchantid]; std::list<MerchantList>::const_iterator itr; for (itr = merlist.begin(); itr != merlist.end(); ++itr) { MerchantList ml = *itr; if (GetLevel() < ml.level_required) { continue; } if (mp->itemslot == ml.slot) { item_id = ml.item; break; } } const EQ::ItemData* item = nullptr; uint32 prevcharges = 0; if (item_id == 0) { //check to see if its on the temporary table std::list<TempMerchantList> tmp_merlist = zone->tmpmerchanttable[tmp->GetNPCTypeID()]; std::list<TempMerchantList>::const_iterator tmp_itr; TempMerchantList ml; for (tmp_itr = tmp_merlist.begin(); tmp_itr != tmp_merlist.end(); ++tmp_itr) { ml = *tmp_itr; if (mp->itemslot == ml.slot) { item_id = ml.item; tmpmer_used = true; prevcharges = ml.charges; break; } } } item = database.GetItem(item_id); if (!item) { //error finding item, client didnt get the update packet for whatever reason, roleplay a tad Message(Chat::Yellow, "%s tells you 'Sorry, that item is for display purposes only.' as they take the item off the shelf.", tmp->GetCleanName()); auto delitempacket = new EQApplicationPacket(OP_ShopDelItem, sizeof(Merchant_DelItem_Struct)); Merchant_DelItem_Struct* delitem = (Merchant_DelItem_Struct*)delitempacket->pBuffer; delitem->itemslot = mp->itemslot; delitem->npcid = mp->npcid; delitem->playerid = mp->playerid; delitempacket->priority = 6; entity_list.QueueCloseClients(tmp, delitempacket); //que for anyone that could be using the merchant so they see the update safe_delete(delitempacket); return; } if (CheckLoreConflict(item)) { Message(Chat::Yellow, "You can only have one of a lore item."); return; } if (tmpmer_used && (mp->quantity > prevcharges || item->MaxCharges > 1)) { if (prevcharges > item->MaxCharges && item->MaxCharges > 1) mp->quantity = item->MaxCharges; else mp->quantity = prevcharges; } // Item's stackable, but the quantity they want to buy exceeds the max stackable quantity. if (item->Stackable && mp->quantity > item->StackSize) mp->quantity = item->StackSize; auto outapp = new EQApplicationPacket(OP_ShopPlayerBuy, sizeof(Merchant_Sell_Struct)); Merchant_Sell_Struct* mpo = (Merchant_Sell_Struct*)outapp->pBuffer; mpo->quantity = mp->quantity; mpo->playerid = mp->playerid; mpo->npcid = mp->npcid; mpo->itemslot = mp->itemslot; int16 freeslotid = INVALID_INDEX; int16 charges = 0; if (item->Stackable || tmpmer_used) charges = mp->quantity; else if ( item->MaxCharges >= 1) charges = item->MaxCharges; EQ::ItemInstance* inst = database.CreateItem(item, charges); int SinglePrice = 0; if (RuleB(Merchant, UsePriceMod)) SinglePrice = (item->Price * (RuleR(Merchant, SellCostMod)) * item->SellRate * Client::CalcPriceMod(tmp, false)); else SinglePrice = (item->Price * (RuleR(Merchant, SellCostMod)) * item->SellRate); if (item->MaxCharges > 1) mpo->price = SinglePrice; else mpo->price = SinglePrice * mp->quantity; if (mpo->price < 0) { safe_delete(outapp); safe_delete(inst); return; } // this area needs some work..two inventory insertion check failure points // below do not return player's money..is this the intended behavior? if (!TakeMoneyFromPP(mpo->price)) { auto hacker_str = fmt::format("Vendor Cheat: attempted to buy {} of {}: {} that cost {} cp but only has {} pp {} gp {} sp {} cp", mpo->quantity, item->ID, item->Name, mpo->price, m_pp.platinum, m_pp.gold, m_pp.silver, m_pp.copper); database.SetMQDetectionFlag(AccountName(), GetName(), hacker_str, zone->GetShortName()); safe_delete(outapp); safe_delete(inst); return; } bool stacked = TryStacking(inst); if (!stacked) freeslotid = m_inv.FindFreeSlot(false, true, item->Size); // shouldn't we be reimbursing if these two fail? //make sure we are not completely full... if (freeslotid == EQ::invslot::slotCursor) { if (m_inv.GetItem(EQ::invslot::slotCursor) != nullptr) { Message(Chat::Red, "You do not have room for any more items."); safe_delete(outapp); safe_delete(inst); return; } } if (!stacked && freeslotid == INVALID_INDEX) { Message(Chat::Red, "You do not have room for any more items."); safe_delete(outapp); safe_delete(inst); return; } std::string packet; if (!stacked && inst) { PutItemInInventory(freeslotid, *inst); SendItemPacket(freeslotid, inst, ItemPacketTrade); } else if (!stacked) { LogError("OP_ShopPlayerBuy: item->ItemClass Unknown! Type: [{}]", item->ItemClass); } QueuePacket(outapp); if (inst && tmpmer_used) { int32 new_charges = prevcharges - mp->quantity; zone->SaveTempItem(merchantid, tmp->GetNPCTypeID(), item_id, new_charges); if (new_charges <= 0) { auto delitempacket = new EQApplicationPacket(OP_ShopDelItem, sizeof(Merchant_DelItem_Struct)); Merchant_DelItem_Struct* delitem = (Merchant_DelItem_Struct*)delitempacket->pBuffer; delitem->itemslot = mp->itemslot; delitem->npcid = mp->npcid; delitem->playerid = mp->playerid; delitempacket->priority = 6; entity_list.QueueClients(tmp, delitempacket); //que for anyone that could be using the merchant so they see the update safe_delete(delitempacket); } else { // Update the charges/quantity in the merchant window inst->SetCharges(new_charges); inst->SetPrice(SinglePrice); inst->SetMerchantSlot(mp->itemslot); inst->SetMerchantCount(new_charges); SendItemPacket(mp->itemslot, inst, ItemPacketMerchant); } } safe_delete(inst); safe_delete(outapp); // start QS code // stacking purchases not supported at this time - entire process will need some work to catch them properly if (RuleB(QueryServ, PlayerLogMerchantTransactions)) { auto qspack = new ServerPacket(ServerOP_QSPlayerLogMerchantTransactions, sizeof(QSMerchantLogTransaction_Struct) + sizeof(QSTransactionItems_Struct)); QSMerchantLogTransaction_Struct* qsaudit = (QSMerchantLogTransaction_Struct*)qspack->pBuffer; qsaudit->zone_id = zone->GetZoneID(); qsaudit->merchant_id = tmp->CastToNPC()->MerchantType; qsaudit->merchant_money.platinum = 0; qsaudit->merchant_money.gold = 0; qsaudit->merchant_money.silver = 0; qsaudit->merchant_money.copper = 0; qsaudit->merchant_count = 1; qsaudit->char_id = character_id; qsaudit->char_money.platinum = (mpo->price / 1000); qsaudit->char_money.gold = (mpo->price / 100) % 10; qsaudit->char_money.silver = (mpo->price / 10) % 10; qsaudit->char_money.copper = mpo->price % 10; qsaudit->char_count = 0; qsaudit->items[0].char_slot = freeslotid == INVALID_INDEX ? 0 : freeslotid; qsaudit->items[0].item_id = item->ID; qsaudit->items[0].charges = mpo->quantity; const EQ::ItemInstance* audit_inst = m_inv[freeslotid]; if (audit_inst) { qsaudit->items[0].aug_1 = audit_inst->GetAugmentItemID(0); qsaudit->items[0].aug_2 = audit_inst->GetAugmentItemID(1); qsaudit->items[0].aug_3 = audit_inst->GetAugmentItemID(2); qsaudit->items[0].aug_4 = audit_inst->GetAugmentItemID(3); qsaudit->items[0].aug_5 = audit_inst->GetAugmentItemID(4); } else { qsaudit->items[0].aug_1 = 0; qsaudit->items[0].aug_2 = 0; qsaudit->items[0].aug_3 = 0; qsaudit->items[0].aug_4 = 0; qsaudit->items[0].aug_5 = 0; if (freeslotid != INVALID_INDEX) { LogError("Handle_OP_ShopPlayerBuy: QS Audit could not locate merchant ([{}]) purchased item in player ([{}]) inventory slot ([{}])", qsaudit->merchant_id, qsaudit->char_id, freeslotid); } } audit_inst = nullptr; if (worldserver.Connected()) { worldserver.SendPacket(qspack); } safe_delete(qspack); } // end QS code if (RuleB(EventLog, RecordBuyFromMerchant)) LogMerchant(this, tmp, mpo->quantity, mpo->price, item, true); if ((RuleB(Character, EnableDiscoveredItems))) { if (!GetGM() && !IsDiscovered(item_id)) DiscoverItem(item_id); } t1.stop(); std::cout << "At 1: " << t1.getDuration() << std::endl; return; } void Client::Handle_OP_ShopPlayerSell(const EQApplicationPacket *app) { if (app->size != sizeof(Merchant_Purchase_Struct)) { LogError("Invalid size on OP_ShopPlayerSell: Expected [{}], Got [{}]", sizeof(Merchant_Purchase_Struct), app->size); return; } RDTSC_Timer t1(true); Merchant_Purchase_Struct* mp = (Merchant_Purchase_Struct*)app->pBuffer; Mob* vendor = entity_list.GetMob(mp->npcid); if (vendor == 0 || !vendor->IsNPC() || vendor->GetClass() != MERCHANT) return; //you have to be somewhat close to them to be properly using them if (DistanceSquared(m_Position, vendor->GetPosition()) > USE_NPC_RANGE2) return; uint32 price = 0; uint32 itemid = GetItemIDAt(mp->itemslot); if (itemid == 0) return; const EQ::ItemData* item = database.GetItem(itemid); EQ::ItemInstance* inst = GetInv().GetItem(mp->itemslot); if (!item || !inst) { Message(Chat::Red, "You seemed to have misplaced that item.."); return; } if (mp->quantity > 1) { if ((inst->GetCharges() < 0) || (mp->quantity > (uint32)inst->GetCharges())) return; } if (!item->NoDrop) { //Message(Chat::Red,"%s tells you, 'LOL NOPE'", vendor->GetName()); return; } uint32 cost_quantity = mp->quantity; if (inst->IsCharged()) uint32 cost_quantity = 1; uint32 i; if (RuleB(Merchant, UsePriceMod)) { for (i = 1; i <= cost_quantity; i++) { price = (uint32)((item->Price * i)*(RuleR(Merchant, BuyCostMod))*Client::CalcPriceMod(vendor, true) + 0.5); // need to round up, because client does it automatically when displaying price if (price > 4000000000) { cost_quantity = i; mp->quantity = i; break; } } } else { for (i = 1; i <= cost_quantity; i++) { price = (uint32)((item->Price * i)*(RuleR(Merchant, BuyCostMod)) + 0.5); // need to round up, because client does it automatically when displaying price if (price > 4000000000) { cost_quantity = i; mp->quantity = i; break; } } } AddMoneyToPP(price, false); if (inst->IsStackable() || inst->IsCharged()) { unsigned int i_quan = inst->GetCharges(); if (mp->quantity > i_quan || inst->IsCharged()) mp->quantity = i_quan; } else mp->quantity = 1; if (RuleB(EventLog, RecordSellToMerchant)) LogMerchant(this, vendor, mp->quantity, price, item, false); int charges = mp->quantity; int freeslot = 0; if ((freeslot = zone->SaveTempItem(vendor->CastToNPC()->MerchantType, vendor->GetNPCTypeID(), itemid, charges, true)) > 0) { EQ::ItemInstance* inst2 = inst->Clone(); while (true) { if (inst2 == nullptr) break; if (RuleB(Merchant, UsePriceMod)) { inst2->SetPrice(item->Price*(RuleR(Merchant, SellCostMod))*item->SellRate*Client::CalcPriceMod(vendor, false)); } else inst2->SetPrice(item->Price*(RuleR(Merchant, SellCostMod))*item->SellRate); inst2->SetMerchantSlot(freeslot); uint32 MerchantQuantity = zone->GetTempMerchantQuantity(vendor->GetNPCTypeID(), freeslot); if (inst2->IsStackable()) { inst2->SetCharges(MerchantQuantity); } inst2->SetMerchantCount(MerchantQuantity); SendItemPacket(freeslot - 1, inst2, ItemPacketMerchant); safe_delete(inst2); break; } } // start QS code if (RuleB(QueryServ, PlayerLogMerchantTransactions)) { auto qspack = new ServerPacket(ServerOP_QSPlayerLogMerchantTransactions, sizeof(QSMerchantLogTransaction_Struct) + sizeof(QSTransactionItems_Struct)); QSMerchantLogTransaction_Struct* qsaudit = (QSMerchantLogTransaction_Struct*)qspack->pBuffer; qsaudit->zone_id = zone->GetZoneID(); qsaudit->merchant_id = vendor->CastToNPC()->MerchantType; qsaudit->merchant_money.platinum = (price / 1000); qsaudit->merchant_money.gold = (price / 100) % 10; qsaudit->merchant_money.silver = (price / 10) % 10; qsaudit->merchant_money.copper = price % 10; qsaudit->merchant_count = 0; qsaudit->char_id = character_id; qsaudit->char_money.platinum = 0; qsaudit->char_money.gold = 0; qsaudit->char_money.silver = 0; qsaudit->char_money.copper = 0; qsaudit->char_count = 1; qsaudit->items[0].char_slot = mp->itemslot; qsaudit->items[0].item_id = itemid; qsaudit->items[0].charges = charges; qsaudit->items[0].aug_1 = m_inv[mp->itemslot]->GetAugmentItemID(1); qsaudit->items[0].aug_2 = m_inv[mp->itemslot]->GetAugmentItemID(2); qsaudit->items[0].aug_3 = m_inv[mp->itemslot]->GetAugmentItemID(3); qsaudit->items[0].aug_4 = m_inv[mp->itemslot]->GetAugmentItemID(4); qsaudit->items[0].aug_5 = m_inv[mp->itemslot]->GetAugmentItemID(5); if (worldserver.Connected()) { worldserver.SendPacket(qspack); } safe_delete(qspack); } // end QS code // Now remove the item from the player, this happens regardless of outcome if (!inst->IsStackable()) this->DeleteItemInInventory(mp->itemslot, 0, false); else { // HACK: DeleteItemInInventory uses int8 for quantity type. There is no consistent use of types in code in this path so for now iteratively delete from inventory. if (mp->quantity > 255) { uint32 temp = mp->quantity; while (temp > 255 && temp != 0) { // Delete chunks of 255 this->DeleteItemInInventory(mp->itemslot, 255, false); temp -= 255; } if (temp != 0) { // Delete remaining this->DeleteItemInInventory(mp->itemslot, temp, false); } } else { this->DeleteItemInInventory(mp->itemslot, mp->quantity, false); } } //This forces the price to show up correctly for charged items. if (inst->IsCharged()) mp->quantity = 1; auto outapp = new EQApplicationPacket(OP_ShopPlayerSell, sizeof(Merchant_Purchase_Struct)); Merchant_Purchase_Struct* mco = (Merchant_Purchase_Struct*)outapp->pBuffer; mco->npcid = vendor->GetID(); mco->itemslot = mp->itemslot; mco->quantity = mp->quantity; mco->price = price; QueuePacket(outapp); safe_delete(outapp); SendMoneyUpdate(); t1.start(); Save(1); t1.stop(); std::cout << "Save took: " << t1.getDuration() << std::endl; return; } void Client::Handle_OP_ShopRequest(const EQApplicationPacket *app) { if (app->size != sizeof(Merchant_Click_Struct)) { LogError("Wrong size: OP_ShopRequest, size=[{}], expected [{}]", app->size, sizeof(Merchant_Click_Struct)); return; } Merchant_Click_Struct* mc = (Merchant_Click_Struct*)app->pBuffer; // Send back opcode OP_ShopRequest - tells client to open merchant window. //EQApplicationPacket* outapp = new EQApplicationPacket(OP_ShopRequest, sizeof(Merchant_Click_Struct)); //Merchant_Click_Struct* mco=(Merchant_Click_Struct*)outapp->pBuffer; int merchantid = 0; Mob* tmp = entity_list.GetMob(mc->npcid); if (tmp == 0 || !tmp->IsNPC() || tmp->GetClass() != MERCHANT) return; //you have to be somewhat close to them to be properly using them if (DistanceSquared(m_Position, tmp->GetPosition()) > USE_NPC_RANGE2) return; merchantid = tmp->CastToNPC()->MerchantType; int action = 1; if (merchantid == 0) { auto outapp = new EQApplicationPacket(OP_ShopRequest, sizeof(Merchant_Click_Struct)); Merchant_Click_Struct* mco = (Merchant_Click_Struct*)outapp->pBuffer; mco->npcid = mc->npcid; mco->playerid = 0; mco->command = 1; //open... mco->rate = 1.0; QueuePacket(outapp); safe_delete(outapp); return; } if (tmp->IsEngaged()) { this->MessageString(Chat::White, MERCHANT_BUSY); action = 0; } if (GetFeigned() || IsInvisible()) { Message(0, "You cannot use a merchant right now."); action = 0; } int primaryfaction = tmp->CastToNPC()->GetPrimaryFaction(); int factionlvl = GetFactionLevel(CharacterID(), tmp->CastToNPC()->GetNPCTypeID(), GetRace(), GetClass(), GetDeity(), primaryfaction, tmp); if (factionlvl >= 7) { MerchantRejectMessage(tmp, primaryfaction); action = 0; } if (tmp->Charmed()) action = 0; // 1199 I don't have time for that now. etc if (!tmp->CastToNPC()->IsMerchantOpen()) { tmp->SayString(zone->random.Int(1199, 1202)); action = 0; } auto outapp = new EQApplicationPacket(OP_ShopRequest, sizeof(Merchant_Click_Struct)); Merchant_Click_Struct* mco = (Merchant_Click_Struct*)outapp->pBuffer; mco->npcid = mc->npcid; mco->playerid = 0; mco->command = action; // Merchant command 0x01 = open if (RuleB(Merchant, UsePriceMod)) { mco->rate = 1 / ((RuleR(Merchant, BuyCostMod))*Client::CalcPriceMod(tmp, true)); // works } else mco->rate = 1 / (RuleR(Merchant, BuyCostMod)); outapp->priority = 6; QueuePacket(outapp); safe_delete(outapp); if (action == 1) BulkSendMerchantInventory(merchantid, tmp->GetNPCTypeID()); return; } void Client::Handle_OP_Sneak(const EQApplicationPacket *app) { if (!HasSkill(EQ::skills::SkillSneak) && GetSkill(EQ::skills::SkillSneak) == 0) { return; //You cannot sneak if you do not have sneak } if (!p_timers.Expired(&database, pTimerSneak, false)) { Message(Chat::Red, "Ability recovery time not yet met."); return; } p_timers.Start(pTimerSneak, SneakReuseTime - 1); bool was = sneaking; if (sneaking) { sneaking = false; hidden = false; improved_hidden = false; auto outapp = new EQApplicationPacket(OP_SpawnAppearance, sizeof(SpawnAppearance_Struct)); SpawnAppearance_Struct* sa_out = (SpawnAppearance_Struct*)outapp->pBuffer; sa_out->spawn_id = GetID(); sa_out->type = 0x03; sa_out->parameter = 0; entity_list.QueueClients(this, outapp, true); safe_delete(outapp); } else { CheckIncreaseSkill(EQ::skills::SkillSneak, nullptr, 5); } float hidechance = ((GetSkill(EQ::skills::SkillSneak) / 300.0f) + .25) * 100; float random = zone->random.Real(0, 99); if (!was && random < hidechance) { sneaking = true; } auto outapp = new EQApplicationPacket(OP_SpawnAppearance, sizeof(SpawnAppearance_Struct)); SpawnAppearance_Struct* sa_out = (SpawnAppearance_Struct*)outapp->pBuffer; sa_out->spawn_id = GetID(); sa_out->type = 0x0F; sa_out->parameter = sneaking; QueuePacket(outapp); safe_delete(outapp); if (GetClass() == ROGUE) { outapp = new EQApplicationPacket(OP_SimpleMessage, 12); SimpleMessage_Struct *msg = (SimpleMessage_Struct *)outapp->pBuffer; msg->color = 0x010E; if (sneaking) { msg->string_id = 347; } else { msg->string_id = 348; } FastQueuePacket(&outapp); } return; } void Client::Handle_OP_SpawnAppearance(const EQApplicationPacket *app) { if (app->size != sizeof(SpawnAppearance_Struct)) { std::cout << "Wrong size on OP_SpawnAppearance. Got: " << app->size << ", Expected: " << sizeof(SpawnAppearance_Struct) << std::endl; return; } SpawnAppearance_Struct* sa = (SpawnAppearance_Struct*)app->pBuffer; cheat_manager.ProcessSpawnApperance(sa->spawn_id, sa->type, sa->parameter); if (sa->spawn_id != GetID()) return; if (sa->type == AT_Invis) { if (sa->parameter != 0) { if (!HasSkill(EQ::skills::SkillHide) && GetSkill(EQ::skills::SkillHide) == 0) { if (ClientVersion() < EQ::versions::ClientVersion::SoF) { auto hack_str = fmt::format("Player sent OP_SpawnAppearance with AT_Invis: {}", sa->parameter); database.SetMQDetectionFlag(this->account_name, this->name, hack_str, zone->GetShortName()); } } return; } invisible = false; hidden = false; improved_hidden = false; entity_list.QueueClients(this, app, true); return; } else if (sa->type == AT_Anim) { if (IsAIControlled()) return; if (sa->parameter == ANIM_STAND) { SetAppearance(eaStanding); playeraction = 0; SetFeigned(false); BindWound(this, false, true); camp_timer.Disable(); } else if (sa->parameter == ANIM_SIT) { SetAppearance(eaSitting); playeraction = 1; if (!UseBardSpellLogic()) InterruptSpell(); SetFeigned(false); BindWound(this, false, true); tmSitting = Timer::GetCurrentTime(); BuffFadeBySitModifier(); } else if (sa->parameter == ANIM_CROUCH) { if (!UseBardSpellLogic()) InterruptSpell(); SetAppearance(eaCrouching); playeraction = 2; SetFeigned(false); } else if (sa->parameter == ANIM_DEATH) { // feign death too SetAppearance(eaDead); playeraction = 3; InterruptSpell(); } else if (sa->parameter == ANIM_LOOT) { SetAppearance(eaLooting); playeraction = 4; SetFeigned(false); } else { LogError("Client [{}] :: unknown appearance [{}]", name, (int)sa->parameter); return; } entity_list.QueueClients(this, app, true); } else if (sa->type == AT_Anon) { if (!anon_toggle_timer.Check()) { return; } // For Anon/Roleplay if (sa->parameter == 1) { // Anon m_pp.anon = 1; } else if ((sa->parameter == 2) || (sa->parameter == 3)) { // This is Roleplay, or anon+rp m_pp.anon = 2; } else if (sa->parameter == 0) { // This is Non-Anon m_pp.anon = 0; } else { LogError("Client [{}] :: unknown Anon/Roleplay Switch [{}]", name, (int)sa->parameter); return; } entity_list.QueueClients(this, app, true); UpdateWho(); } else if ((sa->type == AT_HP) && (dead == 0)) { return; } else if (sa->type == AT_AFK) { if (afk_toggle_timer.Check()) { AFK = (sa->parameter == 1); entity_list.QueueClients(this, app, true); } } else if (sa->type == AT_Split) { m_pp.autosplit = (sa->parameter == 1); } else if (sa->type == AT_Sneak) { if (sneaking == 0) return; if (sa->parameter != 0) { if (!HasSkill(EQ::skills::SkillSneak)) { auto hack_str = fmt::format("Player sent OP_SpawnAppearance with AT_Sneak: {}", sa->parameter); database.SetMQDetectionFlag(this->account_name, this->name, hack_str, zone->GetShortName()); } return; } sneaking = 0; entity_list.QueueClients(this, app, true); } else if (sa->type == AT_Size) { auto hack_str = fmt::format("Player sent OP_SpawnAppearance with AT_Size: {}", sa->parameter); database.SetMQDetectionFlag(this->account_name, this->name, hack_str, zone->GetShortName()); } else if (sa->type == AT_Light) // client emitting light (lightstone, shiny shield) { //don't do anything with this } else if (sa->type == AT_Levitate) { // don't do anything with this, we tell the client when it's // levitating, not the other way around } else if (sa->type == AT_ShowHelm) { if (helm_toggle_timer.Check()) { m_pp.showhelm = (sa->parameter == 1); entity_list.QueueClients(this, app, true); } } else if (sa->type == AT_GroupConsent) { m_pp.groupAutoconsent = (sa->parameter == 1); ConsentCorpses("Group", (sa->parameter != 1)); } else if (sa->type == AT_RaidConsent) { m_pp.raidAutoconsent = (sa->parameter == 1); ConsentCorpses("Raid", (sa->parameter != 1)); } else if (sa->type == AT_GuildConsent) { m_pp.guildAutoconsent = (sa->parameter == 1); ConsentCorpses("Guild", (sa->parameter != 1)); } else { std::cout << "Unknown SpawnAppearance type: 0x" << std::hex << std::setw(4) << std::setfill('0') << sa->type << std::dec << " value: 0x" << std::hex << std::setw(8) << std::setfill('0') << sa->parameter << std::dec << std::endl; } return; } void Client::Handle_OP_Split(const EQApplicationPacket *app) { if (app->size != sizeof(Split_Struct)) { LogError("Wrong size: OP_Split, size=[{}], expected [{}]", app->size, sizeof(Split_Struct)); return; } // The client removes the money on its own, but we have to // update our state anyway, and make sure they had enough to begin // with. Split_Struct *split = (Split_Struct *)app->pBuffer; //Per the note above, Im not exactly sure what to do on error //to notify the client of the error... Group *group = nullptr; Raid *raid = nullptr; if (IsRaidGrouped()) raid = GetRaid(); else if (IsGrouped()) group = GetGroup(); // is there an actual error message for this? if (raid == nullptr && group == nullptr) { Message(Chat::Red, "You can not split money if you're not in a group."); return; } if (!TakeMoneyFromPP(static_cast<uint64>(split->copper) + 10 * static_cast<uint64>(split->silver) + 100 * static_cast<uint64>(split->gold) + 1000 * static_cast<uint64>(split->platinum))) { Message(Chat::Red, "You do not have enough money to do that split."); return; } if (raid) raid->SplitMoney(raid->GetGroup(this), split->copper, split->silver, split->gold, split->platinum); else if (group) group->SplitMoney(split->copper, split->silver, split->gold, split->platinum); return; } void Client::Handle_OP_Surname(const EQApplicationPacket *app) { if (app->size != sizeof(Surname_Struct)) { LogDebug("Size mismatch in Surname expected [{}] got [{}]", sizeof(Surname_Struct), app->size); return; } if (!p_timers.Expired(&database, pTimerSurnameChange, false) && !GetGM()) { Message(Chat::Yellow, "You may only change surnames once every 7 days, your /surname is currently on cooldown."); return; } if (GetLevel() < 20) { MessageString(Chat::Yellow, SURNAME_LEVEL); return; } Surname_Struct* surname = (Surname_Struct*)app->pBuffer; char *c = nullptr; bool first = true; for (c = surname->lastname; *c; c++) { if (first) { *c = toupper(*c); first = false; } else { *c = tolower(*c); } } if (strlen(surname->lastname) >= 20) { MessageString(Chat::Yellow, SURNAME_TOO_LONG); return; } if (!database.CheckNameFilter(surname->lastname, true)) { MessageString(Chat::Yellow, SURNAME_REJECTED); return; } ChangeLastName(surname->lastname); p_timers.Start(pTimerSurnameChange, 604800); EQApplicationPacket* outapp = app->Copy(); outapp = app->Copy(); surname = (Surname_Struct*)outapp->pBuffer; surname->unknown0064 = 1; FastQueuePacket(&outapp); return; } void Client::Handle_OP_SwapSpell(const EQApplicationPacket *app) { if (app->size != sizeof(SwapSpell_Struct)) { std::cout << "Wrong size on OP_SwapSpell. Got: " << app->size << ", Expected: " << sizeof(SwapSpell_Struct) << std::endl; return; } const SwapSpell_Struct* swapspell = (const SwapSpell_Struct*)app->pBuffer; int swapspelltemp; const auto sbs = EQ::spells::DynamicLookup(ClientVersion(), GetGM())->SpellbookSize; if (swapspell->from_slot < 0 || swapspell->from_slot >= sbs) return; if (swapspell->to_slot < 0 || swapspell->to_slot >= sbs) return; swapspelltemp = m_pp.spell_book[swapspell->from_slot]; if (swapspelltemp < 0) { return; } m_pp.spell_book[swapspell->from_slot] = m_pp.spell_book[swapspell->to_slot]; m_pp.spell_book[swapspell->to_slot] = swapspelltemp; /* Save Spell Swaps */ if (!database.SaveCharacterSpell(this->CharacterID(), m_pp.spell_book[swapspell->from_slot], swapspell->from_slot)) { database.DeleteCharacterSpell(this->CharacterID(), m_pp.spell_book[swapspell->from_slot], swapspell->from_slot); } if (!database.SaveCharacterSpell(this->CharacterID(), swapspelltemp, swapspell->to_slot)) { database.DeleteCharacterSpell(this->CharacterID(), swapspelltemp, swapspell->to_slot); } QueuePacket(app); return; } void Client::Handle_OP_TargetCommand(const EQApplicationPacket *app) { if (app->size != sizeof(ClientTarget_Struct)) { LogError("OP size error: OP_TargetMouse expected:[{}] got:[{}]", sizeof(ClientTarget_Struct), app->size); return; } if (GetTarget()) { GetTarget()->IsTargeted(-1); } // Locate and cache new target ClientTarget_Struct* ct = (ClientTarget_Struct*)app->pBuffer; pClientSideTarget = ct->new_target; if (!IsAIControlled()) { Mob *nt = entity_list.GetMob(ct->new_target); if (nt) { SetTarget(nt); bool inspect_buffs = false; // rank 1 gives you ability to see NPC buffs in target window (SoD+) if (nt->IsNPC()) { if (IsRaidGrouped()) { Raid *raid = GetRaid(); if (raid) { uint32 gid = raid->GetGroup(this); if (gid < 12 && raid->GroupCount(gid) > 2) inspect_buffs = raid->GetLeadershipAA(groupAAInspectBuffs, gid); } } else { Group *group = GetGroup(); if (group && group->GroupCount() > 2) inspect_buffs = group->GetLeadershipAA(groupAAInspectBuffs); } } if (GetGM() || RuleB(Spells, AlwaysSendTargetsBuffs) || nt == this || inspect_buffs || (nt->IsClient() && !nt->CastToClient()->GetPVP()) || (nt->IsPet() && nt->GetOwner() && nt->GetOwner()->IsClient() && !nt->GetOwner()->CastToClient()->GetPVP()) || #ifdef BOTS (nt->IsBot() && nt->GetOwner() && nt->GetOwner()->IsClient() && !nt->GetOwner()->CastToClient()->GetPVP()) || // TODO: bot pets #endif (nt->IsMerc() && nt->GetOwner() && nt->GetOwner()->IsClient() && !nt->GetOwner()->CastToClient()->GetPVP())) { nt->SendBuffsToClient(this); } } else { SetTarget(nullptr); SetHoTT(0); UpdateXTargetType(TargetsTarget, nullptr); Group *g = GetGroup(); if (g && g->HasRole(this, RoleAssist)) g->SetGroupAssistTarget(0); if (g && g->HasRole(this, RoleTank)) g->SetGroupTankTarget(0); if (g && g->HasRole(this, RolePuller)) g->SetGroupPullerTarget(0); return; } } else { SetTarget(nullptr); SetHoTT(0); UpdateXTargetType(TargetsTarget, nullptr); return; } // HoTT if (GetTarget() && GetTarget()->GetTarget()) { SetHoTT(GetTarget()->GetTarget()->GetID()); UpdateXTargetType(TargetsTarget, GetTarget()->GetTarget()); } else { SetHoTT(0); UpdateXTargetType(TargetsTarget, nullptr); } Group *g = GetGroup(); if (g && g->HasRole(this, RoleAssist)) g->SetGroupAssistTarget(GetTarget()); if (g && g->HasRole(this, RoleTank)) g->SetGroupTankTarget(GetTarget()); if (g && g->HasRole(this, RolePuller)) g->SetGroupPullerTarget(GetTarget()); // For /target, send reject or success packet if (app->GetOpcode() == OP_TargetCommand) { if (GetTarget() && !GetTarget()->CastToMob()->IsInvisible(this) && (DistanceSquared(m_Position, GetTarget()->GetPosition()) <= TARGETING_RANGE*TARGETING_RANGE || GetGM())) { if (GetTarget()->GetBodyType() == BT_NoTarget2 || GetTarget()->GetBodyType() == BT_Special || GetTarget()->GetBodyType() == BT_NoTarget) { //Targeting something we shouldn't with /target //but the client allows this without MQ so you don't flag it auto outapp = new EQApplicationPacket(OP_TargetReject, sizeof(TargetReject_Struct)); outapp->pBuffer[0] = 0x2f; outapp->pBuffer[1] = 0x01; outapp->pBuffer[4] = 0x0d; if (GetTarget()) { SetTarget(nullptr); } QueuePacket(outapp); safe_delete(outapp); return; } QueuePacket(app); GetTarget()->IsTargeted(1); SendHPUpdate(); } else { auto outapp = new EQApplicationPacket(OP_TargetReject, sizeof(TargetReject_Struct)); outapp->pBuffer[0] = 0x2f; outapp->pBuffer[1] = 0x01; outapp->pBuffer[4] = 0x0d; if (GetTarget()) { SetTarget(nullptr); } QueuePacket(outapp); safe_delete(outapp); } } else { if (GetTarget()) { if (GetGM()) { GetTarget()->IsTargeted(1); return; } else if (RuleB(Character, AllowMQTarget)) { GetTarget()->IsTargeted(1); return; } else if (cheat_manager.GetExemptStatus(Assist)) { GetTarget()->IsTargeted(1); cheat_manager.SetExemptStatus(Assist, false); return; } else if (GetTarget()->IsClient()) { //make sure this client is in our raid/group GetTarget()->IsTargeted(1); return; } else if (GetTarget()->GetBodyType() == BT_NoTarget2 || GetTarget()->GetBodyType() == BT_Special || GetTarget()->GetBodyType() == BT_NoTarget) { auto hacker_str = fmt::format("{} attempting to target something untargetable, {} bodytype: {}", GetName(), GetTarget()->GetName(), (int)GetTarget()->GetBodyType()); database.SetMQDetectionFlag(AccountName(), GetName(), hacker_str, zone->GetShortName()); SetTarget((Mob*)nullptr); return; } else if (cheat_manager.GetExemptStatus(Port)) { GetTarget()->IsTargeted(1); return; } else if (cheat_manager.GetExemptStatus(Sense)) { GetTarget()->IsTargeted(1); cheat_manager.SetExemptStatus(Sense, false); return; } else if (IsXTarget(GetTarget())) { GetTarget()->IsTargeted(1); return; } else if (GetTarget()->IsPetOwnerClient()) { GetTarget()->IsTargeted(1); return; } else if (GetBindSightTarget()) { if (DistanceSquared(GetBindSightTarget()->GetPosition(), GetTarget()->GetPosition()) > (zone->newzone_data.maxclip*zone->newzone_data.maxclip)) { if (DistanceSquared(m_Position, GetTarget()->GetPosition()) > (zone->newzone_data.maxclip*zone->newzone_data.maxclip)) { auto hacker_str = fmt::format( "{} attempting to target something beyond the clip plane of {:.2f} " "units, from ({:.2f}, {:.2f}, {:.2f}) to {} ({:.2f}, {:.2f}, " "{:.2f})", GetName(), (zone->newzone_data.maxclip * zone->newzone_data.maxclip), GetX(), GetY(), GetZ(), GetTarget()->GetName(), GetTarget()->GetX(), GetTarget()->GetY(), GetTarget()->GetZ()); database.SetMQDetectionFlag(AccountName(), GetName(), hacker_str, zone->GetShortName()); SetTarget(nullptr); return; } } } else if (DistanceSquared(m_Position, GetTarget()->GetPosition()) > (zone->newzone_data.maxclip*zone->newzone_data.maxclip)) { auto hacker_str = fmt::format("{} attempting to target something beyond the clip plane of {:.2f} " "units, from ({:.2f}, {:.2f}, {:.2f}) to {} ({:.2f}, {:.2f}, {:.2f})", GetName(), (zone->newzone_data.maxclip * zone->newzone_data.maxclip), GetX(), GetY(), GetZ(), GetTarget()->GetName(), GetTarget()->GetX(), GetTarget()->GetY(), GetTarget()->GetZ()); database.SetMQDetectionFlag(AccountName(), GetName(), hacker_str, zone->GetShortName()); SetTarget(nullptr); return; } GetTarget()->IsTargeted(1); } } return; } void Client::Handle_OP_TargetMouse(const EQApplicationPacket *app) { Handle_OP_TargetCommand(app); } void Client::Handle_OP_TaskHistoryRequest(const EQApplicationPacket *app) { if (app->size != sizeof(TaskHistoryRequest_Struct)) { LogDebug("Size mismatch in OP_TaskHistoryRequest expected [{}] got [{}]", sizeof(TaskHistoryRequest_Struct), app->size); DumpPacket(app); return; } TaskHistoryRequest_Struct *ths = (TaskHistoryRequest_Struct*)app->pBuffer; if (RuleB(TaskSystem, EnableTaskSystem) && task_state) task_state->SendTaskHistory(this, ths->TaskIndex); } void Client::Handle_OP_Taunt(const EQApplicationPacket *app) { if (app->size != sizeof(ClientTarget_Struct)) { std::cout << "Wrong size on OP_Taunt. Got: " << app->size << ", Expected: " << sizeof(ClientTarget_Struct) << std::endl; return; } if (!p_timers.Expired(&database, pTimerTaunt, false)) { Message(Chat::Red, "Ability recovery time not yet met."); return; } p_timers.Start(pTimerTaunt, TauntReuseTime - 1); if (GetTarget() == nullptr || !GetTarget()->IsNPC()) return; if (!zone->CanDoCombat()) { Message(Chat::Red, "You cannot taunt in a no combat zone."); return; } Taunt(GetTarget()->CastToNPC(), false); return; } void Client::Handle_OP_TestBuff(const EQApplicationPacket *app) { if (!RuleB(Character, EnableTestBuff)) { return; } parse->EventPlayer(EVENT_TEST_BUFF, this, "", 0); return; } void Client::Handle_OP_TGB(const EQApplicationPacket *app) { OPTGB(app); return; } void Client::Handle_OP_Track(const EQApplicationPacket *app) { if (GetClass() != RANGER && GetClass() != DRUID && GetClass() != BARD) return; if (GetSkill(EQ::skills::SkillTracking) == 0) SetSkill(EQ::skills::SkillTracking, 1); else CheckIncreaseSkill(EQ::skills::SkillTracking, nullptr, 15); if (!entity_list.MakeTrackPacket(this)) LogError("Unable to generate OP_Track packet requested by client"); return; } void Client::Handle_OP_TrackTarget(const EQApplicationPacket *app) { int PlayerClass = GetClass(); if ((PlayerClass != RANGER) && (PlayerClass != DRUID) && (PlayerClass != BARD)) return; if (app->size != sizeof(TrackTarget_Struct)) { LogError("Invalid size for OP_TrackTarget: Expected: [{}], Got: [{}]", sizeof(TrackTarget_Struct), app->size); return; } TrackTarget_Struct *tts = (TrackTarget_Struct*)app->pBuffer; TrackingID = tts->EntityID; } void Client::Handle_OP_TrackUnknown(const EQApplicationPacket *app) { // size 0 send right after OP_Track return; } void Client::Handle_OP_TradeAcceptClick(const EQApplicationPacket *app) { Mob* with = trade->With(); trade->state = TradeAccepted; if (with && with->IsClient()) { //finish trade... // Have both accepted? Client* other = with->CastToClient(); other->QueuePacket(app); if (other->trade->state == trade->state) { other->trade->state = TradeCompleting; trade->state = TradeCompleting; if (CheckTradeLoreConflict(other) || other->CheckTradeLoreConflict(this)) { MessageString(Chat::Red, TRADE_CANCEL_LORE); other->MessageString(Chat::Red, TRADE_CANCEL_LORE); this->FinishTrade(this); other->FinishTrade(other); other->trade->Reset(); trade->Reset(); } else if (CheckTradeNonDroppable()) { MessageString(Chat::Red, TRADE_HAS_BEEN_CANCELLED); other->MessageString(Chat::Red, TRADE_HAS_BEEN_CANCELLED); this->FinishTrade(this); other->FinishTrade(other); other->trade->Reset(); trade->Reset(); Message(Chat::Yellow, "Hacking activity detected in trade transaction."); // TODO: query (this) as a hacker } else if (other->CheckTradeNonDroppable()) { MessageString(Chat::Red, TRADE_HAS_BEEN_CANCELLED); other->MessageString(Chat::Red, TRADE_HAS_BEEN_CANCELLED); this->FinishTrade(this); other->FinishTrade(other); other->trade->Reset(); trade->Reset(); other->Message(Chat::Yellow, "Hacking activity detected in trade transaction."); // TODO: query (other) as a hacker } else { // Audit trade to database for both trade streams other->trade->LogTrade(); trade->LogTrade(); // start QS code if (RuleB(QueryServ, PlayerLogTrades)) { QSPlayerLogTrade_Struct event_entry; std::list<void*> event_details; memset(&event_entry, 0, sizeof(QSPlayerLogTrade_Struct)); // Perform actual trade this->FinishTrade(other, true, &event_entry, &event_details); other->FinishTrade(this, false, &event_entry, &event_details); event_entry._detail_count = event_details.size(); auto qs_pack = new ServerPacket( ServerOP_QSPlayerLogTrades, sizeof(QSPlayerLogTrade_Struct) + (sizeof(QSTradeItems_Struct) * event_entry._detail_count)); QSPlayerLogTrade_Struct* qs_buf = (QSPlayerLogTrade_Struct*)qs_pack->pBuffer; memcpy(qs_buf, &event_entry, sizeof(QSPlayerLogTrade_Struct)); int offset = 0; for (auto iter = event_details.begin(); iter != event_details.end(); ++iter, ++offset) { QSTradeItems_Struct* detail = reinterpret_cast<QSTradeItems_Struct*>(*iter); qs_buf->items[offset] = *detail; safe_delete(detail); } event_details.clear(); if (worldserver.Connected()) worldserver.SendPacket(qs_pack); safe_delete(qs_pack); // end QS code } else { this->FinishTrade(other); other->FinishTrade(this); } other->trade->Reset(); trade->Reset(); } // All done auto outapp = new EQApplicationPacket(OP_FinishTrade, 0); other->QueuePacket(outapp); this->FastQueuePacket(&outapp); } } // Trading with a Mob object that is not a Client. else if (with) { auto outapp = new EQApplicationPacket(OP_FinishTrade, 0); QueuePacket(outapp); safe_delete(outapp); if (with->IsNPC()) { // Audit trade to database for player trade stream if (RuleB(QueryServ, PlayerLogHandins)) { QSPlayerLogHandin_Struct event_entry; std::list<void*> event_details; memset(&event_entry, 0, sizeof(QSPlayerLogHandin_Struct)); FinishTrade(with->CastToNPC(), false, &event_entry, &event_details); event_entry._detail_count = event_details.size(); auto qs_pack = new ServerPacket(ServerOP_QSPlayerLogHandins, sizeof(QSPlayerLogHandin_Struct) + (sizeof(QSHandinItems_Struct) * event_entry._detail_count)); QSPlayerLogHandin_Struct* qs_buf = (QSPlayerLogHandin_Struct*)qs_pack->pBuffer; memcpy(qs_buf, &event_entry, sizeof(QSPlayerLogHandin_Struct)); int offset = 0; for (auto iter = event_details.begin(); iter != event_details.end(); ++iter, ++offset) { QSHandinItems_Struct* detail = reinterpret_cast<QSHandinItems_Struct*>(*iter); qs_buf->items[offset] = *detail; safe_delete(detail); } event_details.clear(); if (worldserver.Connected()) worldserver.SendPacket(qs_pack); safe_delete(qs_pack); } else { FinishTrade(with->CastToNPC()); } } #ifdef BOTS // TODO: Log Bot trades else if (with->IsBot()) with->CastToBot()->FinishTrade(this, Bot::BotTradeClientNormal); #endif trade->Reset(); } return; } void Client::Handle_OP_TradeBusy(const EQApplicationPacket *app) { if (app->size != sizeof(TradeBusy_Struct)) { LogError("Wrong size: OP_TradeBusy, size=[{}], expected [{}]", app->size, sizeof(TradeBusy_Struct)); return; } // Trade request recipient is cancelling the trade due to being busy // Trade requester gets message "I'm busy right now" // Send busy message on to trade initiator if client TradeBusy_Struct* msg = (TradeBusy_Struct*)app->pBuffer; Mob* tradee = entity_list.GetMob(msg->to_mob_id); if (tradee && tradee->IsClient()) { tradee->CastToClient()->QueuePacket(app); } return; } void Client::Handle_OP_Trader(const EQApplicationPacket *app) { // Bazaar Trader: // // SoF sends 1 or more unhandled OP_Trader packets of size 96 when a trade has completed. // I don't know what they are for (yet), but it doesn't seem to matter that we ignore them. uint32 max_items = 80; /* if (GetClientVersion() >= EQClientRoF) max_items = 200; */ //Show Items if (app->size == sizeof(Trader_ShowItems_Struct)) { Trader_ShowItems_Struct* sis = (Trader_ShowItems_Struct*)app->pBuffer; switch (sis->Code) { case BazaarTrader_EndTraderMode: { Trader_EndTrader(); LogTrading("Client::Handle_OP_Trader: End Trader Session"); break; } case BazaarTrader_EndTransaction: { Client* c = entity_list.GetClientByID(sis->TraderID); if (c) { c->WithCustomer(0); LogTrading("Client::Handle_OP_Trader: End Transaction"); } else LogTrading("Client::Handle_OP_Trader: Null Client Pointer"); break; } case BazaarTrader_ShowItems: { Trader_ShowItems(); LogTrading("Client::Handle_OP_Trader: Show Trader Items"); break; } default: { LogTrading("Unhandled action code in OP_Trader ShowItems_Struct"); break; } } } else if (app->size == sizeof(ClickTrader_Struct)) { if (Buyer) { Trader_EndTrader(); Message(Chat::Red, "You cannot be a Trader and Buyer at the same time."); return; } ClickTrader_Struct* ints = (ClickTrader_Struct*)app->pBuffer; if (ints->Code == BazaarTrader_StartTraderMode) { GetItems_Struct* gis = GetTraderItems(); LogTrading("Client::Handle_OP_Trader: Start Trader Mode"); // Verify there are no NODROP or items with a zero price bool TradeItemsValid = true; for (uint32 i = 0; i < max_items; i++) { if (gis->Items[i] == 0) break; if (ints->ItemCost[i] == 0) { Message(Chat::Red, "Item in Trader Satchel with no price. Unable to start trader mode"); TradeItemsValid = false; break; } const EQ::ItemData *Item = database.GetItem(gis->Items[i]); if (!Item) { Message(Chat::Red, "Unexpected error. Unable to start trader mode"); TradeItemsValid = false; break; } if (Item->NoDrop == 0) { Message(Chat::Red, "NODROP Item in Trader Satchel. Unable to start trader mode"); TradeItemsValid = false; break; } } if (!TradeItemsValid) { Trader_EndTrader(); return; } for (uint32 i = 0; i < max_items; i++) { if (database.GetItem(gis->Items[i])) { database.SaveTraderItem(this->CharacterID(), gis->Items[i], gis->SerialNumber[i], gis->Charges[i], ints->ItemCost[i], i); auto inst = FindTraderItemBySerialNumber(gis->SerialNumber[i]); if (inst) inst->SetPrice(ints->ItemCost[i]); } else { //return; //sony doesnt memset so assume done on first bad item break; } } safe_delete(gis); this->Trader_StartTrader(); // This refreshes the Trader window to display the End Trader button if (ClientVersion() >= EQ::versions::ClientVersion::RoF) { auto outapp = new EQApplicationPacket(OP_Trader, sizeof(TraderStatus_Struct)); TraderStatus_Struct* tss = (TraderStatus_Struct*)outapp->pBuffer; tss->Code = BazaarTrader_StartTraderMode2; QueuePacket(outapp); safe_delete(outapp); } } else { LogTrading("Client::Handle_OP_Trader: Unknown TraderStruct code of: [{}]\n", ints->Code); LogError("Unknown TraderStruct code of: [{}]\n", ints->Code); } } else if (app->size == sizeof(TraderStatus_Struct)) { TraderStatus_Struct* tss = (TraderStatus_Struct*)app->pBuffer; LogTrading("Client::Handle_OP_Trader: Trader Status Code: [{}]", tss->Code); switch (tss->Code) { case BazaarTrader_EndTraderMode: { Trader_EndTrader(); LogTrading("Client::Handle_OP_Trader: End Trader Session"); break; } case BazaarTrader_ShowItems: { Trader_ShowItems(); LogTrading("Client::Handle_OP_Trader: Show Trader Items"); break; } default: { LogTrading("Unhandled action code in OP_Trader ShowItems_Struct"); break; } } } else if (app->size == sizeof(TraderPriceUpdate_Struct)) { LogTrading("Client::Handle_OP_Trader: Trader Price Update"); HandleTraderPriceUpdate(app); } else { LogTrading("Unknown size for OP_Trader: [{}]\n", app->size); LogError("Unknown size for OP_Trader: [{}]\n", app->size); DumpPacket(app); return; } return; } void Client::Handle_OP_TraderBuy(const EQApplicationPacket *app) { // Bazaar Trader: // // Client has elected to buy an item from a Trader // if (app->size != sizeof(TraderBuy_Struct)) { LogError("Wrong size: OP_TraderBuy, size=[{}], expected [{}]", app->size, sizeof(TraderBuy_Struct)); return; } TraderBuy_Struct* tbs = (TraderBuy_Struct*)app->pBuffer; if (Client* Trader = entity_list.GetClientByID(tbs->TraderID)) { BuyTraderItem(tbs, Trader, app); LogTrading("Client::Handle_OP_TraderBuy: Buy Trader Item "); } else { LogTrading("Client::Handle_OP_TraderBuy: Null Client Pointer"); } return; } void Client::Handle_OP_TradeRequest(const EQApplicationPacket *app) { if (app->size != sizeof(TradeRequest_Struct)) { LogError("Wrong size: OP_TradeRequest, size=[{}], expected [{}]", app->size, sizeof(TradeRequest_Struct)); return; } // Client requesting a trade session from an npc/client // Trade session not started until OP_TradeRequestAck is sent TradeRequest_Struct* msg = (TradeRequest_Struct*)app->pBuffer; Mob* tradee = entity_list.GetMob(msg->to_mob_id); // If the tradee is an untargettable mob - ignore // Helps in cases where servers use invisible_man, body type 11 for quests // and the client opens a trade by mistake. if (tradee && (tradee->GetBodyType() == 11)) { return; } CommonBreakInvisible(); // Pass trade request on to recipient if (tradee && tradee->IsClient()) { tradee->CastToClient()->QueuePacket(app); } #ifndef BOTS else if (tradee && tradee->IsNPC()) { #else else if (tradee && (tradee->IsNPC() || tradee->IsBot())) { #endif if (!tradee->IsEngaged()) { trade->Start(msg->to_mob_id); EQApplicationPacket *outapp = new EQApplicationPacket(OP_TradeRequestAck, sizeof(TradeRequest_Struct)); TradeRequest_Struct *acc = (TradeRequest_Struct *) outapp->pBuffer; acc->from_mob_id = msg->to_mob_id; acc->to_mob_id = msg->from_mob_id; FastQueuePacket(&outapp); safe_delete(outapp); } } return; } void Client::Handle_OP_TradeRequestAck(const EQApplicationPacket *app) { if (app->size != sizeof(TradeRequest_Struct)) { LogError("Wrong size: OP_TradeRequestAck, size=[{}], expected [{}]", app->size, sizeof(TradeRequest_Struct)); return; } // Trade request recipient is acknowledging they are able to trade // After this, the trade session has officially started // Send ack on to trade initiator if client TradeRequest_Struct* msg = (TradeRequest_Struct*)app->pBuffer; Mob* tradee = entity_list.GetMob(msg->to_mob_id); if (tradee && tradee->IsClient()) { trade->Start(msg->to_mob_id); tradee->CastToClient()->QueuePacket(app); } return; } void Client::Handle_OP_TraderShop(const EQApplicationPacket *app) { // Bazaar Trader: if (app->size == sizeof(TraderClick_Struct)) { TraderClick_Struct* tcs = (TraderClick_Struct*)app->pBuffer; LogTrading("Handle_OP_TraderShop: TraderClick_Struct TraderID [{}], Code [{}], Unknown008 [{}], Approval [{}]", tcs->TraderID, tcs->Code, tcs->Unknown008, tcs->Approval); if (tcs->Code == BazaarWelcome) { LogTrading("Client::Handle_OP_TraderShop: Sent Bazaar Welcome Info"); SendBazaarWelcome(); } else { // This is when a potential purchaser right clicks on this client who is in Trader mode to // browse their goods. auto outapp = new EQApplicationPacket(OP_TraderShop, sizeof(TraderClick_Struct)); TraderClick_Struct* outtcs = (TraderClick_Struct*)outapp->pBuffer; Client* Trader = entity_list.GetClientByID(tcs->TraderID); if (Trader) { outtcs->Approval = Trader->WithCustomer(GetID()); LogTrading("Client::Handle_OP_TraderShop: Shop Request ([{}]) to ([{}]) with Approval: [{}]", GetCleanName(), Trader->GetCleanName(), outtcs->Approval); } else { LogTrading("Client::Handle_OP_TraderShop: entity_list.GetClientByID(tcs->traderid)" " returned a nullptr pointer"); safe_delete(outapp); return; } outtcs->TraderID = tcs->TraderID; outtcs->Unknown008 = 0x3f800000; QueuePacket(outapp); if (outtcs->Approval) { this->BulkSendTraderInventory(Trader->CharacterID()); Trader->Trader_CustomerBrowsing(this); TraderID = tcs->TraderID; LogTrading("Client::Handle_OP_TraderShop: Trader Inventory Sent"); } else { MessageString(Chat::Yellow, TRADER_BUSY); LogTrading("Client::Handle_OP_TraderShop: Trader Busy"); } safe_delete(outapp); return; } } else if (app->size == sizeof(BazaarWelcome_Struct)) { // RoF+ // Client requested Bazaar Welcome Info (Trader and Item Total Counts) SendBazaarWelcome(); LogTrading("Client::Handle_OP_TraderShop: Sent Bazaar Welcome Info"); } else if (app->size == sizeof(TraderBuy_Struct)) { // RoF+ // Customer has purchased an item from the Trader TraderBuy_Struct* tbs = (TraderBuy_Struct*)app->pBuffer; if (Client* Trader = entity_list.GetClientByID(tbs->TraderID)) { BuyTraderItem(tbs, Trader, app); LogTrading("Handle_OP_TraderShop: Buy Action [{}], Price [{}], Trader [{}], ItemID [{}], Quantity [{}], ItemName, [{}]", tbs->Action, tbs->Price, tbs->TraderID, tbs->ItemID, tbs->Quantity, tbs->ItemName); } else { LogTrading("OP_TraderShop: Null Client Pointer"); } } else if (app->size == 4) { // RoF+ // Customer has closed the trade window uint32 Command = *((uint32 *)app->pBuffer); if (Command == 4) { Client* c = entity_list.GetClientByID(TraderID); TraderID = 0; if (c) { c->WithCustomer(0); LogTrading("Client::Handle_OP_Trader: End Transaction - Code [{}]", Command); } else { LogTrading("Client::Handle_OP_Trader: Null Client Pointer for Trader - Code [{}]", Command); } EQApplicationPacket empty(OP_ShopEndConfirm); QueuePacket(&empty); } else { LogTrading("Client::Handle_OP_Trader: Unhandled Code [{}]", Command); } } else { LogTrading("Unknown size for OP_TraderShop: [{}]\n", app->size); LogError("Unknown size for OP_TraderShop: [{}]\n", app->size); DumpPacket(app); return; } } void Client::Handle_OP_TradeSkillCombine(const EQApplicationPacket *app) { if (app->size != sizeof(NewCombine_Struct)) { LogError("Invalid size for NewCombine_Struct: Expected: [{}], Got: [{}]", sizeof(NewCombine_Struct), app->size); return; } /*if (m_tradeskill_object == nullptr) { Message(Chat::Red, "Error: Server is not aware of the tradeskill container you are attempting to use"); return; }*/ //fixed this to work for non-world objects // Delegate to tradeskill object to perform combine NewCombine_Struct* in_combine = (NewCombine_Struct*)app->pBuffer; Object::HandleCombine(this, in_combine, m_tradeskill_object); return; } void Client::Handle_OP_Translocate(const EQApplicationPacket *app) { if (app->size != sizeof(Translocate_Struct)) { LogDebug("Size mismatch in OP_Translocate expected [{}] got [{}]", sizeof(Translocate_Struct), app->size); DumpPacket(app); return; } Translocate_Struct *its = (Translocate_Struct*)app->pBuffer; if (!PendingTranslocate) return; if ((RuleI(Spells, TranslocateTimeLimit) > 0) && (time(nullptr) > (TranslocateTime + RuleI(Spells, TranslocateTimeLimit)))) { Message(Chat::Red, "You did not accept the Translocate within the required time limit."); PendingTranslocate = false; return; } if (its->Complete == 1) { int SpellID = PendingTranslocateData.spell_id; int i = parse->EventSpell(EVENT_SPELL_EFFECT_TRANSLOCATE_COMPLETE, nullptr, this, SpellID, 0); if (i == 0) { // If the spell has a translocate to bind effect, AND we are already in the zone the client // is bound in, use the GoToBind method. If we send OP_Translocate in this case, the client moves itself // to the bind coords it has from the PlayerProfile, but with the X and Y reversed. I suspect they are // reversed in the pp, and since spells like Gate are handled serverside, this has not mattered before. if (((SpellID == 1422) || (SpellID == 1334) || (SpellID == 3243)) && (zone->GetZoneID() == PendingTranslocateData.zone_id && zone->GetInstanceID() == PendingTranslocateData.instance_id)) { PendingTranslocate = false; GoToBind(); return; } ////Was sending the packet back to initiate client zone... ////but that could be abusable, so lets go through proper channels MovePC(PendingTranslocateData.zone_id, PendingTranslocateData.instance_id, PendingTranslocateData.x, PendingTranslocateData.y, PendingTranslocateData.z, PendingTranslocateData.heading, 0, ZoneSolicited); } } PendingTranslocate = false; } void Client::Handle_OP_TributeItem(const EQApplicationPacket *app) { LogTribute("Received OP_TributeItem of length [{}]", app->size); //player donates an item... if (app->size != sizeof(TributeItem_Struct)) printf("Error in OP_TributeItem. Expected size of: %zu, but got: %i\n", sizeof(StartTribute_Struct), app->size); else { TributeItem_Struct* t = (TributeItem_Struct*)app->pBuffer; tribute_master_id = t->tribute_master_id; //make sure they are dealing with a valid tribute master Mob* tribmast = entity_list.GetMob(t->tribute_master_id); if (!tribmast || !tribmast->IsNPC() || tribmast->GetClass() != TRIBUTE_MASTER) return; if (DistanceSquared(m_Position, tribmast->GetPosition()) > USE_NPC_RANGE2) return; t->tribute_points = TributeItem(t->slot, t->quantity); LogTribute("Sending tribute item reply with [{}] points", t->tribute_points); QueuePacket(app); } return; } void Client::Handle_OP_TributeMoney(const EQApplicationPacket *app) { LogTribute("Received OP_TributeMoney of length [{}]", app->size); //player donates money if (app->size != sizeof(TributeMoney_Struct)) printf("Error in OP_TributeMoney. Expected size of: %zu, but got: %i\n", sizeof(StartTribute_Struct), app->size); else { TributeMoney_Struct* t = (TributeMoney_Struct*)app->pBuffer; tribute_master_id = t->tribute_master_id; //make sure they are dealing with a valid tribute master Mob* tribmast = entity_list.GetMob(t->tribute_master_id); if (!tribmast || !tribmast->IsNPC() || tribmast->GetClass() != TRIBUTE_MASTER) return; if (DistanceSquared(m_Position, tribmast->GetPosition()) > USE_NPC_RANGE2) return; t->tribute_points = TributeMoney(t->platinum); LogTribute("Sending tribute money reply with [{}] points", t->tribute_points); QueuePacket(app); } return; } void Client::Handle_OP_TributeNPC(const EQApplicationPacket *app) { LogTribute("Received OP_TributeNPC of length [{}]", app->size); return; } void Client::Handle_OP_TributeToggle(const EQApplicationPacket *app) { LogTribute("Received OP_TributeToggle of length [{}]", app->size); if (app->size != sizeof(uint32)) LogError("Invalid size on OP_TributeToggle packet"); else { uint32 *val = (uint32 *)app->pBuffer; ToggleTribute(*val ? true : false); } return; } void Client::Handle_OP_TributeUpdate(const EQApplicationPacket *app) { LogTribute("Received OP_TributeUpdate of length [{}]", app->size); //sent when the client changes their tribute settings... if (app->size != sizeof(TributeInfo_Struct)) LogError("Invalid size on OP_TributeUpdate packet"); else { TributeInfo_Struct *t = (TributeInfo_Struct *)app->pBuffer; ChangeTributeSettings(t); } return; } void Client::Handle_OP_VetClaimRequest(const EQApplicationPacket *app) { if (app->size < sizeof(VeteranClaim)) { LogDebug("OP_VetClaimRequest size lower than expected: got [{}] expected at least [{}]", app->size, sizeof(VeteranClaim)); DumpPacket(app); return; } VeteranClaim *vcr = (VeteranClaim *)app->pBuffer; if (vcr->claim_id == 0xFFFFFFFF) { // request update packet SendRewards(); return; } // try to claim something! auto vetapp = new EQApplicationPacket(OP_VetClaimReply, sizeof(VeteranClaim)); VeteranClaim *cr = (VeteranClaim *)vetapp->pBuffer; strcpy(cr->name, GetName()); cr->claim_id = vcr->claim_id; if (!TryReward(vcr->claim_id)) cr->action = 1; else cr->action = 0; FastQueuePacket(&vetapp); } void Client::Handle_OP_VoiceMacroIn(const EQApplicationPacket *app) { if (app->size != sizeof(VoiceMacroIn_Struct)) { LogDebug("Size mismatch in OP_VoiceMacroIn expected [{}] got [{}]", sizeof(VoiceMacroIn_Struct), app->size); DumpPacket(app); return; } if (!RuleB(Chat, EnableVoiceMacros)) return; VoiceMacroIn_Struct* vmi = (VoiceMacroIn_Struct*)app->pBuffer; VoiceMacroReceived(vmi->Type, vmi->Target, vmi->MacroNumber); } void Client::Handle_OP_UpdateAura(const EQApplicationPacket *app) { if (app->size != sizeof(AuraDestory_Struct)) { LogDebug("Size mismatch in OP_UpdateAura expected [{}] got [{}]", sizeof(AuraDestory_Struct), app->size); return; } // client only sends this for removing auto aura = (AuraDestory_Struct *)app->pBuffer; if (aura->action != 1) return; // could log I guess, but should only ever get this action RemoveAura(aura->entity_id); QueuePacket(app); // if we don't resend this, the client gets confused return; } void Client::Handle_OP_WearChange(const EQApplicationPacket *app) { if (app->size != sizeof(WearChange_Struct)) { std::cout << "Wrong size: OP_WearChange, size=" << app->size << ", expected " << sizeof(WearChange_Struct) << std::endl; return; } WearChange_Struct* wc = (WearChange_Struct*)app->pBuffer; if (wc->spawn_id != GetID()) return; // Hero Forge ID needs to be fixed here as RoF2 appears to send an incorrect value. if (wc->hero_forge_model != 0 && wc->wear_slot_id >= 0 && wc->wear_slot_id < EQ::textures::weaponPrimary) wc->hero_forge_model = GetHerosForgeModel(wc->wear_slot_id); // we could maybe ignore this and just send our own from moveitem entity_list.QueueClients(this, app, true); } void Client::Handle_OP_WhoAllRequest(const EQApplicationPacket *app) { if (app->size != sizeof(Who_All_Struct)) { std::cout << "Wrong size on OP_WhoAll. Got: " << app->size << ", Expected: " << sizeof(Who_All_Struct) << std::endl; return; } Who_All_Struct* whoall = (Who_All_Struct*)app->pBuffer; if (whoall->type == 0) // SoF only, for regular /who entity_list.ZoneWho(this, whoall); else WhoAll(whoall); return; } void Client::Handle_OP_XTargetAutoAddHaters(const EQApplicationPacket *app) { if (app->size != 1) { LogDebug("Size mismatch in OP_XTargetAutoAddHaters, expected 1, got [{}]", app->size); DumpPacket(app); return; } XTargetAutoAddHaters = app->ReadUInt8(0); SetDirtyAutoHaters(); } void Client::Handle_OP_XTargetOpen(const EQApplicationPacket *app) { if (app->size != 4) { LogDebug("Size mismatch in OP_XTargetOpen, expected 1, got [{}]", app->size); DumpPacket(app); return; } auto outapp = new EQApplicationPacket(OP_XTargetOpenResponse, 0); FastQueuePacket(&outapp); } void Client::Handle_OP_XTargetRequest(const EQApplicationPacket *app) { if (app->size < 12) { LogDebug("Size mismatch in OP_XTargetRequest, expected at least 12, got [{}]", app->size); DumpPacket(app); return; } uint32 Unknown000 = app->ReadUInt32(0); if (Unknown000 != 1) return; uint32 Slot = app->ReadUInt32(4); if (Slot >= XTARGET_HARDCAP) return; XTargetType Type = (XTargetType)app->ReadUInt32(8); XTargets[Slot].Type = Type; XTargets[Slot].ID = 0; XTargets[Slot].Name[0] = 0; switch (Type) { case Empty: case Auto: { break; } case CurrentTargetPC: { char Name[65]; app->ReadString(Name, 12, 64); Client *c = entity_list.GetClientByName(Name); if (c) { XTargets[Slot].ID = c->GetID(); strncpy(XTargets[Slot].Name, c->GetName(), 64); } else { strncpy(XTargets[Slot].Name, Name, 64); } SendXTargetPacket(Slot, c); break; } case CurrentTargetNPC: { char Name[65]; app->ReadString(Name, 12, 64); Mob *m = entity_list.GetMob(Name); if (m) { XTargets[Slot].ID = m->GetID(); SendXTargetPacket(Slot, m); break; } } case TargetsTarget: { if (GetTarget()) UpdateXTargetType(TargetsTarget, GetTarget()->GetTarget()); else UpdateXTargetType(TargetsTarget, nullptr); break; } case GroupTank: { Group *g = GetGroup(); if (g) { Client *c = entity_list.GetClientByName(g->GetMainTankName()); if (c) { XTargets[Slot].ID = c->GetID(); strncpy(XTargets[Slot].Name, c->GetName(), 64); } else { strncpy(XTargets[Slot].Name, g->GetMainTankName(), 64); } SendXTargetPacket(Slot, c); } break; } case GroupTankTarget: { Group *g = GetGroup(); if (g) g->NotifyTankTarget(this); break; } case GroupAssist: { Group *g = GetGroup(); if (g) { Client *c = entity_list.GetClientByName(g->GetMainAssistName()); if (c) { XTargets[Slot].ID = c->GetID(); strncpy(XTargets[Slot].Name, c->GetName(), 64); } else { strncpy(XTargets[Slot].Name, g->GetMainAssistName(), 64); } SendXTargetPacket(Slot, c); } break; } case GroupAssistTarget: { Group *g = GetGroup(); if (g) g->NotifyAssistTarget(this); break; } case Puller: { Group *g = GetGroup(); if (g) { Client *c = entity_list.GetClientByName(g->GetPullerName()); if (c) { XTargets[Slot].ID = c->GetID(); strncpy(XTargets[Slot].Name, c->GetName(), 64); } else { strncpy(XTargets[Slot].Name, g->GetPullerName(), 64); } SendXTargetPacket(Slot, c); } break; } case PullerTarget: { Group *g = GetGroup(); if (g) g->NotifyPullerTarget(this); break; } case GroupMarkTarget1: case GroupMarkTarget2: case GroupMarkTarget3: { Group *g = GetGroup(); if (g) g->SendMarkedNPCsToMember(this); break; } case RaidAssist1: case RaidAssist2: case RaidAssist3: case RaidAssist1Target: case RaidAssist2Target: case RaidAssist3Target: case RaidMarkTarget1: case RaidMarkTarget2: case RaidMarkTarget3: { // Not implemented yet. break; } case MyPet: { Mob *m = GetPet(); if (m) { XTargets[Slot].ID = m->GetID(); SendXTargetPacket(Slot, m); } break; } case MyPetTarget: { Mob *m = GetPet(); if (m) m = m->GetTarget(); if (m) { XTargets[Slot].ID = m->GetID(); SendXTargetPacket(Slot, m); } break; } default: LogDebug("Unhandled XTarget Type [{}]", Type); break; } } void Client::Handle_OP_YellForHelp(const EQApplicationPacket *app) { auto outapp = new EQApplicationPacket(OP_YellForHelp, 4); *(uint32 *)outapp->pBuffer = GetID(); entity_list.QueueCloseClients(this, outapp, true, 100.0); safe_delete(outapp); return; } void Client::Handle_OP_ResetAA(const EQApplicationPacket *app) { if (Admin() >= 50) { Message(0, "Resetting AA points."); ResetAA(); } return; } void Client::Handle_OP_MovementHistoryList(const EQApplicationPacket *app) { cheat_manager.ProcessMovementHistory(app); } void Client::Handle_OP_UnderWorld(const EQApplicationPacket *app) { UnderWorld *m_UnderWorld = (UnderWorld *) app->pBuffer; if (app->size != sizeof(UnderWorld)) { LogDebug("Size mismatch in OP_UnderWorld, expected {}, got [{}]", sizeof(UnderWorld), app->size); DumpPacket(app); return; } auto dist = Distance( glm::vec3(m_UnderWorld->x, m_UnderWorld->y, zone->newzone_data.underworld), glm::vec3(m_UnderWorld->x, m_UnderWorld->y, m_UnderWorld->z)); cheat_manager.MovementCheck(glm::vec3(m_UnderWorld->x, m_UnderWorld->y, m_UnderWorld->z)); if (m_UnderWorld->spawn_id == GetID() && dist <= 5.0f && zone->newzone_data.underworld_teleport_index != 0) { cheat_manager.SetExemptStatus(Port, true); } } void Client::Handle_OP_SharedTaskRemovePlayer(const EQApplicationPacket *app) { if (app->size != sizeof(SharedTaskRemovePlayer_Struct)) { LogPacketClientServer( "Wrong size on Handle_OP_SharedTaskRemovePlayer | got [{}] expected [{}]", app->size, sizeof(SharedTaskRemovePlayer_Struct) ); return; } auto *r = (SharedTaskRemovePlayer_Struct *) app->pBuffer; LogTasks( "[Handle_OP_SharedTaskRemovePlayer] field1 [{}] field2 [{}] player_name [{}]", r->field1, r->field2, r->player_name ); // live no-ops this command if not in a shared task if (GetTaskState()->HasActiveSharedTask()) { // struct auto p = new ServerPacket( ServerOP_SharedTaskRemovePlayer, sizeof(ServerSharedTaskRemovePlayer_Struct) ); auto *rp = (ServerSharedTaskRemovePlayer_Struct *) p->pBuffer; // fill rp->source_character_id = CharacterID(); rp->task_id = GetTaskState()->GetActiveSharedTask().task_id; strn0cpy(rp->player_name, r->player_name, sizeof(r->player_name)); LogTasks( "[Handle_OP_SharedTaskRemovePlayer] source_character_id [{}] task_id [{}] player_name [{}]", rp->source_character_id, rp->task_id, rp->player_name ); // send worldserver.SendPacket(p); safe_delete(p); } } void Client::Handle_OP_SharedTaskAddPlayer(const EQApplicationPacket *app) { if (app->size != sizeof(SharedTaskAddPlayer_Struct)) { LogPacketClientServer( "Wrong size on Handle_OP_SharedTaskAddPlayer | got [{}] expected [{}]", app->size, sizeof(SharedTaskAddPlayer_Struct) ); return; } auto *r = (SharedTaskAddPlayer_Struct *) app->pBuffer; LogTasks( "[SharedTaskAddPlayer_Struct] field1 [{}] field2 [{}] player_name [{}]", r->field1, r->field2, r->player_name ); if (!GetTaskState()->HasActiveSharedTask()) { // this message is generated client-side in newer clients Message(Chat::System, SharedTaskMessage::GetEQStr(SharedTaskMessage::COULD_NOT_USE_COMMAND)); } else { // struct auto p = new ServerPacket( ServerOP_SharedTaskAddPlayer, sizeof(ServerSharedTaskAddPlayer_Struct) ); auto *rp = (ServerSharedTaskAddPlayer_Struct *) p->pBuffer; // fill rp->source_character_id = CharacterID(); rp->task_id = GetTaskState()->GetActiveSharedTask().task_id; strn0cpy(rp->player_name, r->player_name, sizeof(r->player_name)); LogTasks( "[Handle_OP_SharedTaskRemovePlayer] source_character_id [{}] task_id [{}] player_name [{}]", rp->source_character_id, rp->task_id, rp->player_name ); // send worldserver.SendPacket(p); safe_delete(p); } } void Client::Handle_OP_SharedTaskMakeLeader(const EQApplicationPacket *app) { if (app->size != sizeof(SharedTaskMakeLeader_Struct)) { LogPacketClientServer( "Wrong size on Handle_OP_SharedTaskMakeLeader | got [{}] expected [{}]", app->size, sizeof(SharedTaskMakeLeader_Struct) ); return; } auto *r = (SharedTaskMakeLeader_Struct *) app->pBuffer; LogTasks( "[SharedTaskMakeLeader_Struct] field1 [{}] field2 [{}] player_name [{}]", r->field1, r->field2, r->player_name ); // live no-ops this command if not in a shared task if (GetTaskState()->HasActiveSharedTask()) { // struct auto p = new ServerPacket( ServerOP_SharedTaskMakeLeader, sizeof(ServerSharedTaskMakeLeader_Struct) ); auto *rp = (ServerSharedTaskMakeLeader_Struct *) p->pBuffer; // fill rp->source_character_id = CharacterID(); rp->task_id = GetTaskState()->GetActiveSharedTask().task_id; strn0cpy(rp->player_name, r->player_name, sizeof(r->player_name)); LogTasks( "[Handle_OP_SharedTaskRemovePlayer] source_character_id [{}] task_id [{}] player_name [{}]", rp->source_character_id, rp->task_id, rp->player_name ); // send worldserver.SendPacket(p); safe_delete(p); } } void Client::Handle_OP_SharedTaskInviteResponse(const EQApplicationPacket *app) { if (app->size != sizeof(SharedTaskInviteResponse_Struct)) { LogPacketClientServer( "Wrong size on SharedTaskInviteResponse | got [{}] expected [{}]", app->size, sizeof(SharedTaskInviteResponse_Struct) ); return; } auto *r = (SharedTaskInviteResponse_Struct *) app->pBuffer; LogTasks( "[SharedTaskInviteResponse] unknown00 [{}] invite_id [{}] accepted [{}]", r->unknown00, r->invite_id, r->accepted ); // struct auto p = new ServerPacket( ServerOP_SharedTaskInviteAcceptedPlayer, sizeof(ServerSharedTaskInviteAccepted_Struct) ); auto *c = (ServerSharedTaskInviteAccepted_Struct *) p->pBuffer; // fill c->source_character_id = CharacterID(); c->shared_task_id = r->invite_id; c->accepted = r->accepted; strn0cpy(c->player_name, GetName(), sizeof(c->player_name)); LogTasks( "[ServerOP_SharedTaskInviteAcceptedPlayer] source_character_id [{}] shared_task_id [{}]", c->source_character_id, c->shared_task_id ); // send worldserver.SendPacket(p); safe_delete(p); } void Client::Handle_OP_SharedTaskAccept(const EQApplicationPacket* app) { auto buf = reinterpret_cast<SharedTaskAccept_Struct*>(app->pBuffer); LogTasksDetail( "[OP_SharedTaskAccept] unknown00 [{}] unknown04 [{}] npc_entity_id [{}] task_id [{}]", buf->unknown00, buf->unknown04, buf->npc_entity_id, buf->task_id ); if (buf->task_id > 0 && RuleB(TaskSystem, EnableTaskSystem) && task_state) { task_state->AcceptNewTask(this, buf->task_id, buf->npc_entity_id, std::time(nullptr)); } } void Client::Handle_OP_SharedTaskQuit(const EQApplicationPacket* app) { if (GetTaskState()->HasActiveSharedTask()) { CancelTask(TASKSLOTSHAREDTASK, TaskType::Shared); } } void Client::Handle_OP_TaskTimers(const EQApplicationPacket* app) { GetTaskState()->ListTaskTimers(this); } void Client::Handle_OP_SharedTaskPlayerList(const EQApplicationPacket* app) { if (GetTaskState()->HasActiveSharedTask()) { uint32_t size = sizeof(ServerSharedTaskPlayerList_Struct); auto pack = std::make_unique<ServerPacket>(ServerOP_SharedTaskPlayerList, size); auto buf = reinterpret_cast<ServerSharedTaskPlayerList_Struct*>(pack->pBuffer); buf->source_character_id = CharacterID(); buf->task_id = GetTaskState()->GetActiveSharedTask().task_id; worldserver.SendPacket(pack.get()); } } int64 Client::GetSharedTaskId() const { return m_shared_task_id; } void Client::SetSharedTaskId(int64 shared_task_id) { Client::m_shared_task_id = shared_task_id; }
1
11,024
Don't need this-> here.
EQEmu-Server
cpp
@@ -20,6 +20,9 @@ The metadata server is only accessible on GCE. import httplib import socket +from google.auth.compute_engine import _metadata +from google.auth.transport import requests + from google.cloud.forseti.common.util import errors from google.cloud.forseti.common.util import logger
1
# Copyright 2017 The Forseti Security Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Metadata server utilities. The metadata server is only accessible on GCE. """ import httplib import socket from google.cloud.forseti.common.util import errors from google.cloud.forseti.common.util import logger METADATA_SERVER_HOSTNAME = 'metadata.google.internal' METADATA_SERVER_CONN_TIMEOUT = 2 REQUIRED_METADATA_HEADER = {'Metadata-Flavor': 'Google'} HTTP_SUCCESS = httplib.OK HTTP_GET = 'GET' LOGGER = logger.get_logger(__name__) def _obtain_http_client(hostname=METADATA_SERVER_HOSTNAME): """Get an HTTP client to the GCP metadata server. Args: hostname (str): A qualified hostname. Returns: HttpClient: A simple HTTP client to the GCP metadata server. """ return httplib.HTTPConnection(hostname, timeout=METADATA_SERVER_CONN_TIMEOUT) def _issue_http_request(method, path, headers): """Perform a request on a specified httplib connection object. Args: method (str): The http request method. path (str): The path on the server. headers (dict): A key-value pairs of headers. Returns: httplib.HTTPResponse: The HTTP response object. Raises: MetadataServerHttpError: When we can't reach the requested host. """ http_client = _obtain_http_client() try: http_client.request(method, path, headers=headers) return http_client.getresponse() except (socket.error, httplib.HTTPException) as e: LOGGER.error('Error with request: %s', e) raise errors.MetadataServerHttpError # TODO: Should use memoize or similar so that after the first check # the cached result is always returned, regardless of how often it is # called. def can_reach_metadata_server(): """Determine if we can reach the metadata server. Returns: bool: True if metadata server can be reached, False otherwise. """ path = '/computeMetadata/v1/instance/id' response = None try: response = _issue_http_request( HTTP_GET, path, REQUIRED_METADATA_HEADER) except errors.MetadataServerHttpError: pass return response and response.status == HTTP_SUCCESS def get_value_for_attribute(attribute): """For a given key return the value. Args: attribute (str): Some metadata key. Returns: str: The value of the requested key, if key isn't present then None. """ path = '/computeMetadata/v1/instance/attributes/%s' % attribute try: http_response = _issue_http_request( HTTP_GET, path, REQUIRED_METADATA_HEADER) return http_response.read() except (TypeError, ValueError, errors.MetadataServerHttpError) as e: LOGGER.error('Unable to read value for attribute key %s ' 'from metadata server: %s', attribute, e) return None def get_project_id(): """Get the project id from the metadata server. Returns: str: The of the project id, on error, returns None. """ path = '/computeMetadata/v1/project/project-id' try: http_response = _issue_http_request( HTTP_GET, path, REQUIRED_METADATA_HEADER) return http_response.read() except errors.MetadataServerHttpError as e: LOGGER.error('Unable to read project id from metadata server: %s', e) return None
1
30,240
I'm a little concerned about relying on a private module, as they could change the implementation at some point, I'd like to have some test to validate this is working as intended.
forseti-security-forseti-security
py
@@ -1095,7 +1095,7 @@ class Resource: send_alert(self.request, message, url) operator = COMPARISON.LT - if value == "": + if value == "" or isinstance(value, dict): raise_invalid(self.request, **error_details) filters.append(Filter(self.model.modified_field, value, operator))
1
import functools import logging import re import warnings from uuid import uuid4 import colander import venusian from pyramid import exceptions as pyramid_exceptions from pyramid.decorator import reify from pyramid.httpexceptions import ( HTTPNotFound, HTTPNotModified, HTTPPreconditionFailed, HTTPServiceUnavailable, ) from pyramid.security import Everyone from kinto.core import Service from kinto.core.errors import ERRORS, http_error, raise_invalid, request_GET, send_alert from kinto.core.events import ACTIONS from kinto.core.storage import MISSING, Filter, Sort from kinto.core.storage import exceptions as storage_exceptions from kinto.core.utils import ( COMPARISON, apply_json_patch, classname, decode64, dict_subset, encode64, find_nested_value, json, recursive_update_dict, ) from .model import Model from .schema import JsonPatchRequestSchema, ResourceSchema from .viewset import ViewSet logger = logging.getLogger(__name__) def register(depth=1, **kwargs): """Ressource class decorator. Register the decorated class in the cornice registry. Pass all its keyword arguments to the register_resource function. """ def wrapped(resource): register_resource(resource, depth=depth + 1, **kwargs) return resource return wrapped def register_resource(resource_cls, settings=None, viewset=None, depth=1, **kwargs): """Register a resource in the cornice registry. :param resource_cls: The resource class to register. It should be a class or have a "name" attribute. :param viewset: A ViewSet object, which will be used to find out which arguments should be appended to the views, and where the views are. :param depth: A depth offset. It will be used to determine what is the level of depth in the call tree. (set to 1 by default.) Any additional keyword parameters will be used to override the viewset attributes. """ if viewset is None: viewset = resource_cls.default_viewset(**kwargs) else: viewset.update(**kwargs) resource_name = viewset.get_name(resource_cls) def register_service(endpoint_type, settings): """Registers a service in cornice, for the given type. """ path_pattern = getattr(viewset, f"{endpoint_type}_path") path_values = {"resource_name": resource_name} path = path_pattern.format_map(path_values) name = viewset.get_service_name(endpoint_type, resource_cls) service = Service(name, path, depth=depth, **viewset.get_service_arguments()) # Attach viewset and resource to the service for later reference. service.viewset = viewset service.resource = resource_cls service.type = endpoint_type # Attach plural and object paths. service.plural_path = viewset.plural_path.format_map(path_values) service.object_path = ( viewset.object_path.format_map(path_values) if viewset.object_path is not None else None ) methods = getattr(viewset, f"{endpoint_type}_methods") for method in methods: if not viewset.is_endpoint_enabled( endpoint_type, resource_name, method.lower(), settings ): continue argument_getter = getattr(viewset, f"{endpoint_type}_arguments") view_args = argument_getter(resource_cls, method) view = viewset.get_view(endpoint_type, method.lower()) service.add_view(method, view, klass=resource_cls, **view_args) # We support JSON-patch on PATCH views. Since the body payload # of JSON Patch is not a dict (mapping) but an array, we can't # use the same schema as for other PATCH protocols. We add another # dedicated view for PATCH, but targetting a different content_type # predicate. if method.lower() == "patch": view_args["content_type"] = "application/json-patch+json" view_args["schema"] = JsonPatchRequestSchema() service.add_view(method, view, klass=resource_cls, **view_args) return service def callback(context, name, ob): # get the callbacks registred by the inner services # and call them from here when the @resource classes are being # scanned by venusian. config = context.config.with_package(info.module) # Storage is mandatory for resources. if not hasattr(config.registry, "storage"): msg = "Mandatory storage backend is missing from configuration." raise pyramid_exceptions.ConfigurationError(msg) # A service for the list. service = register_service("plural", config.registry.settings) config.add_cornice_service(service) # An optional one for object endpoint. if getattr(viewset, "object_path") is not None: service = register_service("object", config.registry.settings) config.add_cornice_service(service) info = venusian.attach(resource_cls, callback, category="pyramid", depth=depth) return callback class Resource: """Resource class providing every HTTP endpoint. A resource provides all the necessary mechanism for: - storage and retrieval of objects according to HTTP verbs - permission checking and tracking - concurrency control - synchronization - OpenAPI metadata Permissions are verified in :class:`kinto.core.authorization.AuthorizationPolicy` based on the verb and context (eg. a put can create or update). The resulting context is passed in the `context` constructor parameter. """ default_viewset = ViewSet """Default :class:`kinto.core.resource.viewset.ViewSet` class to use when the resource is registered.""" default_model = Model """Default :class:`kinto.core.resource.model.Model` class to use for interacting the :mod:`kinto.core.storage` and :mod:`kinto.core.permission` backends.""" schema = ResourceSchema """Schema to validate objects.""" permissions = ("read", "write") """List of allowed permissions names.""" def __init__(self, request, context=None): """ :param request: The current request object. :param context: The resulting context obtained from :class:`kinto.core.authorization.AuthorizationPolicy`. """ self.request = request self.context = context content_type = str(self.request.headers.get("Content-Type")).lower() self._is_json_patch = content_type == "application/json-patch+json" self._is_merge_patch = content_type == "application/merge-patch+json" # Models are isolated by user. parent_id = self.get_parent_id(request) # The principal of an anonymous is system.Everyone current_principal = self.request.prefixed_userid or Everyone if not hasattr(self, "model"): self.model = self.default_model( storage=request.registry.storage, permission=request.registry.permission, id_generator=self.id_generator, resource_name=classname(self), parent_id=parent_id, current_principal=current_principal, prefixed_principals=request.prefixed_principals, ) # Initialize timestamp as soon as possible. self.timestamp if self.context: self.model.get_permission_object_id = functools.partial( self.context.get_permission_object_id, self.request ) @reify def id_generator(self): # ID generator by resource name in settings. default_id_generator = self.request.registry.id_generators[""] resource_name = self.request.current_resource_name id_generator = self.request.registry.id_generators.get(resource_name, default_id_generator) return id_generator @reify def timestamp(self): """Return the current resource timestamp. :rtype: int """ try: return self.model.timestamp() except storage_exceptions.BackendError as e: is_readonly = self.request.registry.settings["readonly"] if not is_readonly: raise e # If the instance is configured to be readonly, and if the # resource is empty, the backend will try to bump the timestamp. # It fails if the configured db user has not write privileges. logger.exception(e) error_msg = ( "Resource timestamp cannot be written. " "Plural endpoint must be hit at least once from a " "writable instance." ) raise http_error(HTTPServiceUnavailable(), errno=ERRORS.BACKEND, message=error_msg) @reify def object_id(self): """Return the object id for this request. It's either in the match dict or in the posted body. """ if self.request.method.lower() == "post": try: # Since ``id`` does not belong to schema, it is not in validated # data. Must look up in body directly instead of request.validated. _id = self.request.json["data"][self.model.id_field] self._raise_400_if_invalid_id(_id) return _id except (KeyError, ValueError): return None return self.request.matchdict.get("id") def get_parent_id(self, request): """Return the parent_id of the resource with regards to the current request. The resource will isolate the objects from one parent id to another. For example, in Kinto, the ``group``s and ``collection``s are isolated by ``bucket``. In order to obtain a resource where users can only see their own objects, just return the user id as the parent id: .. code-block:: python def get_parent_id(self, request): return request.prefixed_userid :param request: The request used to access the resource. :rtype: str """ return "" def _get_known_fields(self): """Return all the `field` defined in the ressource schema.""" known_fields = [c.name for c in self.schema().children] + [ self.model.id_field, self.model.modified_field, self.model.deleted_field, ] return known_fields def is_known_field(self, field): """Return ``True`` if `field` is defined in the resource schema. If the resource schema allows unknown fields, this will always return ``True``. :param str field: Field name :rtype: bool """ if self.schema.get_option("preserve_unknown"): return True known_fields = self._get_known_fields() # Test first level only: ``target.data.id`` -> ``target`` field = field.split(".", 1)[0] return field in known_fields # # End-points # def plural_head(self): """Model ``HEAD`` endpoint: empty reponse with a ``Total-Objects`` header. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified` if ``If-None-Match`` header is provided and collection not modified in the interim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and collection modified in the iterim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest` if filters or sorting are invalid. """ return self._plural_get(True) def plural_get(self): """Model ``GET`` endpoint: retrieve multiple objects. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified` if ``If-None-Match`` header is provided and the objects not modified in the interim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and the objects modified in the iterim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest` if filters or sorting are invalid. """ return self._plural_get(False) def _plural_get(self, head_request): self._add_timestamp_header(self.request.response) self._add_cache_header(self.request.response) self._raise_304_if_not_modified() # Plural endpoints are considered resources that always exist self._raise_412_if_modified(obj={}) headers = self.request.response.headers filters = self._extract_filters() limit = self._extract_limit() sorting = self._extract_sorting(limit) partial_fields = self._extract_partial_fields() filter_fields = [f.field for f in filters] include_deleted = self.model.modified_field in filter_fields pagination_rules, offset = self._extract_pagination_rules_from_token(limit, sorting) # The reason why we call self.model.get_objects() with `limit=limit + 1` is to avoid # having to count the total number of objects in the database just to be able # to *decide* whether or not to have a `Next-Page` header. # This way, we can quickly depend on the number of objects returned and compare that # with what the client requested. # For example, if there are 100 objects in the database and the client used limit=100, # it would, internally, ask for 101 objects. So if you retrieved 100 objects # it means we got less than we asked for and thus there is not another page. # Equally, if there are 200 objects in the database and the client used # limit=100 it would, internally, ask for 101 objects and actually get that. Then, # you know there is another page. if head_request: count = self.model.count_objects(filters=filters) headers["Total-Objects"] = headers["Total-Records"] = str(count) return self.postprocess([]) objects = self.model.get_objects( filters=filters, sorting=sorting, limit=limit + 1, # See bigger explanation above. pagination_rules=pagination_rules, include_deleted=include_deleted, ) offset = offset + len(objects) if limit and len(objects) == limit + 1: lastobject = objects[-2] next_page = self._next_page_url(sorting, limit, lastobject, offset) headers["Next-Page"] = next_page if partial_fields: objects = [dict_subset(obj, partial_fields) for obj in objects] # See bigger explanation above about the use of limits. The need for slicing # here is because we might have asked for 1 more object just to see if there's # a next page. But we have to honor the limit in our returned response. return self.postprocess(objects[:limit]) def plural_post(self): """Model ``POST`` endpoint: create an object. If the new object id conflicts against an existing one, the posted object is ignored, and the existing object is returned, with a ``200`` status. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and the objects modified in the iterim. .. seealso:: Add custom behaviour by overriding :meth:`kinto.core.resource.Resource.process_object` """ new_object = self.request.validated["body"].get("data", {}) existing = None # If id was specified, then add it to posted body and look-up # the existing object. if self.object_id is not None: new_object[self.model.id_field] = self.object_id try: existing = self._get_object_or_404(self.object_id) except HTTPNotFound: pass self._raise_412_if_modified(obj=existing) if existing: obj = existing action = ACTIONS.READ else: new_object = self.process_object(new_object) obj = self.model.create_object(new_object) self.request.response.status_code = 201 action = ACTIONS.CREATE timestamp = obj[self.model.modified_field] self._add_timestamp_header(self.request.response, timestamp=timestamp) return self.postprocess(obj, action=action) def plural_delete(self): """Model ``DELETE`` endpoint: delete multiple objects. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and the objects modified in the iterim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest` if filters are invalid. """ # Plural endpoint are considered resources that always exist self._raise_412_if_modified(obj={}) filters = self._extract_filters() limit = self._extract_limit() sorting = self._extract_sorting(limit) pagination_rules, offset = self._extract_pagination_rules_from_token(limit, sorting) objects = self.model.get_objects( filters=filters, sorting=sorting, limit=limit + 1, pagination_rules=pagination_rules ) deleted = self.model.delete_objects( filters=filters, sorting=sorting, limit=limit, pagination_rules=pagination_rules ) if deleted: lastobject = deleted[-1] # Add pagination header, but only if there are more objects beyond the limit. if limit and len(objects) == limit + 1: next_page = self._next_page_url(sorting, limit, lastobject, offset) self.request.response.headers["Next-Page"] = next_page timestamp = max({d[self.model.modified_field] for d in deleted}) self._add_timestamp_header(self.request.response, timestamp=timestamp) else: self._add_timestamp_header(self.request.response) action = len(deleted) > 0 and ACTIONS.DELETE or ACTIONS.READ return self.postprocess(deleted, action=action, old=objects[:limit]) def get(self): """Object ``GET`` endpoint: retrieve an object. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotFound` if the object is not found. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified` if ``If-None-Match`` header is provided and object not modified in the interim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and object modified in the iterim. """ self._raise_400_if_invalid_id(self.object_id) obj = self._get_object_or_404(self.object_id) timestamp = obj[self.model.modified_field] self._add_timestamp_header(self.request.response, timestamp=timestamp) self._add_cache_header(self.request.response) self._raise_304_if_not_modified(obj) self._raise_412_if_modified(obj) partial_fields = self._extract_partial_fields() if partial_fields: obj = dict_subset(obj, partial_fields) return self.postprocess(obj) def put(self): """Object ``PUT`` endpoint: create or replace the provided object and return it. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and object modified in the iterim. .. note:: If ``If-None-Match: *`` request header is provided, the ``PUT`` will succeed only if no object exists with this id. .. seealso:: Add custom behaviour by overriding :meth:`kinto.core.resource.Resource.process_object`. """ self._raise_400_if_invalid_id(self.object_id) try: existing = self._get_object_or_404(self.object_id) except HTTPNotFound: existing = None self._raise_412_if_modified(obj=existing) # If `data` is not provided, use existing object (or empty if creation) post_object = self.request.validated["body"].get("data", existing) or {} object_id = post_object.setdefault(self.model.id_field, self.object_id) self._raise_400_if_id_mismatch(object_id, self.object_id) new_object = self.process_object(post_object, old=existing) if existing: obj = self.model.update_object(new_object) else: obj = self.model.create_object(new_object) self.request.response.status_code = 201 timestamp = obj[self.model.modified_field] self._add_timestamp_header(self.request.response, timestamp=timestamp) action = existing and ACTIONS.UPDATE or ACTIONS.CREATE return self.postprocess(obj, action=action, old=existing) def patch(self): """Object ``PATCH`` endpoint: modify an object and return its new version. If a request header ``Response-Behavior`` is set to ``light``, only the fields whose value was changed are returned. If set to ``diff``, only the fields whose value became different than the one provided are returned. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotFound` if the object is not found. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and object modified in the iterim. .. seealso:: Add custom behaviour by overriding :meth:`kinto.core.resource.Resource.apply_changes` or :meth:`kinto.core.resource.Resource.process_object`. """ self._raise_400_if_invalid_id(self.object_id) existing = self._get_object_or_404(self.object_id) self._raise_412_if_modified(existing) # patch is specified as a list of of operations (RFC 6902) if self._is_json_patch: requested_changes = self.request.validated["body"] else: # `data` attribute may not be present if only perms are patched. body = self.request.validated["body"] if not body: # If no `data` nor `permissions` is provided in patch, reject! # XXX: This should happen in schema instead (c.f. ViewSet) error_details = { "name": "data", "description": "Provide at least one of data or permissions", } raise_invalid(self.request, **error_details) requested_changes = body.get("data", {}) updated, applied_changes = self.apply_changes( obj=existing, requested_changes=requested_changes ) object_id = updated.setdefault(self.model.id_field, self.object_id) self._raise_400_if_id_mismatch(object_id, self.object_id) new_object = self.process_object(updated, old=existing) changed_fields = [ k for k in applied_changes.keys() if existing.get(k) != new_object.get(k) ] new_object = self.model.update_object(new_object) # Adjust response according to ``Response-Behavior`` header body_behavior = self.request.validated["header"].get("Response-Behavior", "full") if body_behavior.lower() == "light": # Only fields that were changed. data = {k: new_object[k] for k in changed_fields} elif body_behavior.lower() == "diff": # Only fields that are different from those provided. data = { k: new_object[k] for k in changed_fields if applied_changes.get(k) != new_object.get(k) } else: data = new_object timestamp = new_object.get(self.model.modified_field, existing[self.model.modified_field]) self._add_timestamp_header(self.request.response, timestamp=timestamp) return self.postprocess(data, action=ACTIONS.UPDATE, old=existing) def delete(self): """Object ``DELETE`` endpoint: delete an object and return it. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotFound` if the object is not found. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and object modified in the iterim. """ self._raise_400_if_invalid_id(self.object_id) obj = self._get_object_or_404(self.object_id) self._raise_412_if_modified(obj) # Retreive the last_modified information from a querystring if present. last_modified = self.request.validated["querystring"].get("last_modified") # If less or equal than current object. Ignore it. if last_modified and last_modified <= obj[self.model.modified_field]: last_modified = None try: deleted = self.model.delete_object(obj, last_modified=last_modified) except storage_exceptions.ObjectNotFoundError: # Delete might fail if the object was deleted since we # fetched it from the storage (ref Kinto/kinto#1407). This # is one of a larger class of issues where another request # could modify the object between our fetch and our # delete, which could e.g. invalidate our precondition # checking. Fixing this correctly is a larger # problem. However, let's punt on fixing it correctly and # just handle this one important case for now (see #1557). # # Raise a 404 vs. a 409 or 412 because that's what we # would have done if the other thread's delete had # happened a little earlier. (The client doesn't need to # know that we did a bunch of work fetching the existing # object for nothing.) raise self._404_for_object(self.object_id) timestamp = deleted[self.model.modified_field] self._add_timestamp_header(self.request.response, timestamp=timestamp) return self.postprocess(deleted, action=ACTIONS.DELETE, old=obj) # # Data processing # def process_object(self, new, old=None): """Hook for processing objects before they reach storage, to introduce specific logics on fields for example. .. code-block:: python def process_object(self, new, old=None): new = super().process_object(new, old) version = old['version'] if old else 0 new['version'] = version + 1 return new Or add extra validation based on request: .. code-block:: python from kinto.core.errors import raise_invalid def process_object(self, new, old=None): new = super().process_object(new, old) if new['browser'] not in request.headers['User-Agent']: raise_invalid(self.request, name='browser', error='Wrong') return new :param dict new: the validated object to be created or updated. :param dict old: the old object to be updated, ``None`` for creation endpoints. :returns: the processed object. :rtype: dict """ modified_field = self.model.modified_field new_last_modified = new.get(modified_field) # Drop the new last_modified if it is not an integer. is_integer = isinstance(new_last_modified, int) if not is_integer: new.pop(modified_field, None) new_last_modified = None # Drop the new last_modified if lesser or equal to the old one. is_less_or_equal = ( new_last_modified and old is not None and new_last_modified <= old[modified_field] ) if is_less_or_equal: new.pop(modified_field, None) # patch is specified as a list of of operations (RFC 6902) payload = self.request.validated["body"] if self._is_json_patch: permissions = apply_json_patch(old, payload)["permissions"] elif self._is_merge_patch: existing = old or {} permissions = existing.get("__permissions__", {}) recursive_update_dict(permissions, payload.get("permissions", {}), ignores=(None,)) else: permissions = { k: v for k, v in payload.get("permissions", {}).items() if v is not None } annotated = {**new} if permissions: is_put = self.request.method.lower() == "put" if is_put or self._is_merge_patch: # Remove every existing ACEs using empty lists. for perm in self.permissions: permissions.setdefault(perm, []) annotated[self.model.permissions_field] = permissions return annotated def apply_changes(self, obj, requested_changes): """Merge `changes` into `object` fields. .. note:: This is used in the context of PATCH only. Override this to control field changes at object level, for example: .. code-block:: python def apply_changes(self, obj, requested_changes): # Ignore value change if inferior if object['position'] > changes.get('position', -1): changes.pop('position', None) return super().apply_changes(obj, requested_changes) :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest` if result does not comply with resource schema. :returns: the new object with `changes` applied. :rtype: tuple """ if self._is_json_patch: try: applied_changes = apply_json_patch(obj, requested_changes)["data"] updated = {**applied_changes} except ValueError as e: error_details = { "location": "body", "description": f"JSON Patch operation failed: {e}", } raise_invalid(self.request, **error_details) else: applied_changes = {**requested_changes} updated = {**obj} # recursive patch and remove field if null attribute is passed (RFC 7396) if self._is_merge_patch: recursive_update_dict(updated, applied_changes, ignores=(None,)) else: updated.update(**applied_changes) for field, value in applied_changes.items(): has_changed = obj.get(field, value) != value if self.schema.is_readonly(field) and has_changed: error_details = {"name": field, "description": f"Cannot modify {field}"} raise_invalid(self.request, **error_details) try: validated = self.schema().deserialize(updated) except colander.Invalid as e: # Transform the errors we got from colander into Cornice errors. # We could not rely on Service schema because the object should be # validated only once the changes are applied for field, error in e.asdict().items(): # pragma: no branch raise_invalid(self.request, name=field, description=error) return validated, applied_changes def postprocess(self, result, action=ACTIONS.READ, old=None): body = {} if not isinstance(result, list): perms = result.pop(self.model.permissions_field, None) if perms is not None: body["permissions"] = {k: list(p) for k, p in perms.items()} if old: # Remove permissions from event payload. old.pop(self.model.permissions_field, None) body["data"] = result parent_id = self.get_parent_id(self.request) # Use self.model.timestamp() instead of self.timestamp because # self.timestamp is @reify'd relatively early in the request, # so doesn't correspond to any time that is relevant to the # event. See #1769. timestamp = self.model.timestamp() self.request.notify_resource_event( parent_id=parent_id, timestamp=timestamp, data=result, action=action, old=old ) return body # # Internals # def _404_for_object(self, object_id): details = {"id": object_id, "resource_name": self.request.current_resource_name} return http_error(HTTPNotFound(), errno=ERRORS.INVALID_RESOURCE_ID, details=details) def _get_object_or_404(self, object_id): """Retrieve object from storage and raise ``404 Not found`` if missing. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotFound` if the object is not found. """ if self.context and self.context.current_object: # Set during authorization. Save a storage hit. return self.context.current_object try: return self.model.get_object(object_id) except storage_exceptions.ObjectNotFoundError: raise self._404_for_object(object_id) def _add_timestamp_header(self, response, timestamp=None): """Add current timestamp in response headers, when request comes in. """ if timestamp is None: timestamp = self.timestamp # Pyramid takes care of converting. response.last_modified = timestamp / 1000.0 # Return timestamp as ETag. response.headers["ETag"] = f'"{timestamp}"' def _add_cache_header(self, response): """Add Cache-Control and Expire headers, based a on a setting for the current resource. Cache headers will be set with anonymous requests only. .. note:: The ``Cache-Control: no-cache`` response header does not prevent caching in client. It will indicate the client to revalidate the response content on each access. The client will send a conditional request to the server and check that a ``304 Not modified`` is returned before serving content from cache. """ resource_name = self.context.resource_name if self.context else "" setting_key = f"{resource_name}_cache_expires_seconds" cache_expires = self.request.registry.settings.get(setting_key) is_anonymous = self.request.prefixed_userid is None if cache_expires and is_anonymous: response.cache_expires(seconds=int(cache_expires)) else: # Since `Expires` response header provides an HTTP data with a # resolution in seconds, do not use Pyramid `cache_expires()` in # order to omit it. response.cache_control.no_cache = True response.cache_control.no_store = True def _raise_400_if_invalid_id(self, object_id): """Raise 400 if specified object id does not match the format excepted by storage backends. :raises: :class:`pyramid.httpexceptions.HTTPBadRequest` """ is_string = isinstance(object_id, str) if not is_string or not self.model.id_generator.match(object_id): error_details = {"location": "path", "description": "Invalid object id"} raise_invalid(self.request, **error_details) def _raise_304_if_not_modified(self, obj=None): """Raise 304 if current timestamp is inferior to the one specified in headers. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified` """ if_none_match = self.request.validated["header"].get("If-None-Match") if not if_none_match: return if if_none_match == "*": return if obj: current_timestamp = obj[self.model.modified_field] else: current_timestamp = self.model.timestamp() if current_timestamp == if_none_match: response = HTTPNotModified() self._add_timestamp_header(response, timestamp=current_timestamp) raise response def _raise_412_if_modified(self, obj=None): """Raise 412 if current timestamp is superior to the one specified in headers. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` """ if_match = self.request.validated["header"].get("If-Match") if_none_match = self.request.validated["header"].get("If-None-Match") # Check if object exists object_exists = obj is not None # If no precondition headers, just ignore if not if_match and not if_none_match: return # If-None-Match: * should always raise if an object exists if if_none_match == "*" and object_exists: modified_since = -1 # Always raise. # If-Match should always raise if an object doesn't exist elif if_match and not object_exists: modified_since = -1 # If-Match with ETag value on existing objects should compare ETag elif if_match and if_match != "*": modified_since = if_match # If none of the above applies, don't raise else: return if obj: current_timestamp = obj[self.model.modified_field] else: current_timestamp = self.model.timestamp() if current_timestamp != modified_since: error_msg = "Resource was modified meanwhile" # Do not provide the permissions among the object fields. # Ref: https://github.com/Kinto/kinto/issues/224 existing = {**obj} if obj else {} existing.pop(self.model.permissions_field, None) details = {"existing": existing} if obj else {} response = http_error( HTTPPreconditionFailed(), errno=ERRORS.MODIFIED_MEANWHILE, message=error_msg, details=details, ) self._add_timestamp_header(response, timestamp=current_timestamp) raise response def _raise_400_if_id_mismatch(self, new_id, object_id): """Raise 400 if the `new_id`, within the request body, does not match the `object_id`, obtained from request path. :raises: :class:`pyramid.httpexceptions.HTTPBadRequest` """ if new_id != object_id: error_msg = "Object id does not match existing object" error_details = {"name": self.model.id_field, "description": error_msg} raise_invalid(self.request, **error_details) def _extract_partial_fields(self): """Extract the fields to do the projection from QueryString parameters. """ fields = self.request.validated["querystring"].get("_fields") if fields: root_fields = [f.split(".")[0] for f in fields] known_fields = self._get_known_fields() invalid_fields = set(root_fields) - set(known_fields) preserve_unknown = self.schema.get_option("preserve_unknown") if not preserve_unknown and invalid_fields: error_msg = f"Fields {','.join(invalid_fields)} do not exist" error_details = {"name": "Invalid _fields parameter", "description": error_msg} raise_invalid(self.request, **error_details) # Since id and last_modified are part of the synchronisation # API, force their presence in payloads. fields = fields + [self.model.id_field, self.model.modified_field] return fields def _extract_limit(self): """Extract limit value from QueryString parameters.""" paginate_by = self.request.registry.settings["paginate_by"] max_fetch_size = self.request.registry.settings["storage_max_fetch_size"] limit = self.request.validated["querystring"].get("_limit", paginate_by) # If limit is higher than paginate_by setting, ignore it. if limit and paginate_by: limit = min(limit, paginate_by) # If limit is higher than what storage can retrieve, ignore it. limit = min(limit, max_fetch_size) if limit else max_fetch_size return limit def _extract_filters(self): """Extracts filters from QueryString parameters.""" queryparams = self.request.validated["querystring"] filters = [] for param, value in queryparams.items(): param = param.strip() error_details = { "name": param, "location": "querystring", "description": f"Invalid value for {param}", } # Ignore specific fields if param.startswith("_") and param not in ("_since", "_to", "_before"): continue # Handle the _since specific filter. if param in ("_since", "_to", "_before"): if param == "_since": operator = COMPARISON.GT else: if param == "_to": message = "_to is now deprecated, " "you should use _before instead" url = ( "https://kinto.readthedocs.io/en/2.4.0/api/" "resource.html#list-of-available-url-" "parameters" ) send_alert(self.request, message, url) operator = COMPARISON.LT if value == "": raise_invalid(self.request, **error_details) filters.append(Filter(self.model.modified_field, value, operator)) continue all_keywords = r"|".join([i.name.lower() for i in COMPARISON]) m = re.match(r"^(" + all_keywords + r")_([\w\.]+)$", param) if m: keyword, field = m.groups() operator = getattr(COMPARISON, keyword.upper()) else: operator, field = COMPARISON.EQ, param if not self.is_known_field(field): error_msg = f"Unknown filter field '{param}'" error_details["description"] = error_msg raise_invalid(self.request, **error_details) # Return 400 if _limit is not a string if operator == COMPARISON.LIKE: if not isinstance(value, str): raise_invalid(self.request, **error_details) if operator in (COMPARISON.IN, COMPARISON.EXCLUDE): all_integers = all([isinstance(v, int) for v in value]) all_strings = all([isinstance(v, str) for v in value]) has_invalid_value = (field == self.model.id_field and not all_strings) or ( field == self.model.modified_field and not all_integers ) if has_invalid_value: raise_invalid(self.request, **error_details) if "\x00" in field or "\x00" in str(value): error_details["description"] = "Invalid character 0x00" raise_invalid(self.request, **error_details) if field == self.model.modified_field and value == "": raise_invalid(self.request, **error_details) filters.append(Filter(field, value, operator)) # If a plural endpoint is reached, and if the user does not have the # permission to read/write the whole list, the set is filtered by ids, # based on the list of ids returned by the authorization policy. ids = self.context.shared_ids if ids is not None: filter_by_id = Filter(self.model.id_field, ids, COMPARISON.IN) filters.insert(0, filter_by_id) return filters def _extract_sorting(self, limit): """Extracts filters from QueryString parameters.""" specified = self.request.validated["querystring"].get("_sort", []) sorting = [] modified_field_used = self.model.modified_field in specified for field in specified: field = field.strip() m = re.match(r"^([\-+]?)([\w\.]+)$", field) if m: order, field = m.groups() if not self.is_known_field(field): error_details = { "location": "querystring", "description": f"Unknown sort field '{field}'", } raise_invalid(self.request, **error_details) direction = -1 if order == "-" else 1 sorting.append(Sort(field, direction)) if not modified_field_used: # Add a sort by the ``modified_field`` in descending order # useful for pagination sorting.append(Sort(self.model.modified_field, -1)) return sorting def _build_pagination_rules(self, sorting, last_object, rules=None): """Return the list of rules for a given sorting attribute and last_object. """ if rules is None: rules = [] rule = [] next_sorting = sorting[:-1] for field, _ in next_sorting: rule.append(Filter(field, last_object.get(field, MISSING), COMPARISON.EQ)) field, direction = sorting[-1] if direction == -1: rule.append(Filter(field, last_object.get(field, MISSING), COMPARISON.LT)) else: rule.append(Filter(field, last_object.get(field, MISSING), COMPARISON.GT)) rules.append(rule) if len(next_sorting) == 0: return rules return self._build_pagination_rules(next_sorting, last_object, rules) def _extract_pagination_rules_from_token(self, limit, sorting): """Get pagination params.""" token = self.request.validated["querystring"].get("_token", None) filters = [] offset = 0 if token: error_msg = None try: tokeninfo = json.loads(decode64(token)) if not isinstance(tokeninfo, dict): raise ValueError() last_object = tokeninfo["last_object"] offset = tokeninfo["offset"] nonce = tokeninfo["nonce"] except (ValueError, KeyError, TypeError): error_msg = "_token has invalid content" # We don't want pagination tokens to be reused several times (#1171). # The cache backend is used to keep track of "nonces". if self.request.method.lower() == "delete" and error_msg is None: registry = self.request.registry deleted = registry.cache.delete(nonce) if deleted is None: error_msg = "_token was already used or has expired." if error_msg: error_details = {"location": "querystring", "description": error_msg} raise_invalid(self.request, **error_details) filters = self._build_pagination_rules(sorting, last_object) return filters, offset def _next_page_url(self, sorting, limit, last_object, offset): """Build the Next-Page header from where we stopped.""" token = self._build_pagination_token(sorting, last_object, offset) params = {**request_GET(self.request), "_limit": limit, "_token": token} service = self.request.current_service next_page_url = self.request.route_url( service.name, _query=params, **self.request.matchdict ) return next_page_url def _build_pagination_token(self, sorting, last_object, offset): """Build a pagination token. It is a base64 JSON object with the sorting fields values of the last_object. """ nonce = f"pagination-token-{uuid4()}" if self.request.method.lower() == "delete": registry = self.request.registry validity = registry.settings["pagination_token_validity_seconds"] registry.cache.set(nonce, "", validity) token = {"last_object": {}, "offset": offset, "nonce": nonce} for field, _ in sorting: last_value = find_nested_value(last_object, field, MISSING) if last_value is not MISSING: token["last_object"][field] = last_value return encode64(json.dumps(token)) @property def record_id(self): message = "`record_id` is deprecated, use `object_id` instead." warnings.warn(message, DeprecationWarning) return self.object_id def process_record(self, *args, **kwargs): message = "`process_record()` is deprecated, use `process_object()` instead." warnings.warn(message, DeprecationWarning) return self.process_object(*args, **kwargs) def collection_get(self, *args, **kwargs): message = "`collection_get()` is deprecated, use `plural_get()` instead." warnings.warn(message, DeprecationWarning) return self.plural_get(*args, **kwargs) def collection_post(self, *args, **kwargs): message = "`collection_post()` is deprecated, use `plural_post()` instead." warnings.warn(message, DeprecationWarning) return self.plural_post(*args, **kwargs) def collection_delete(self, *args, **kwargs): message = "`collection_delete()` is deprecated, use `plural_delete()` instead." warnings.warn(message, DeprecationWarning) return self.plural_delete(*args, **kwargs) class ShareableResource(Resource): def __init__(self, *args, **kwargs): message = "`ShareableResource` is deprecated, use `Resource` instead." warnings.warn(message, DeprecationWarning) super().__init__(*args, **kwargs)
1
12,681
There might other values that we don't support here (eg. `[]`). So it might be safer to check for the supported types instead (string or number).
Kinto-kinto
py
@@ -179,6 +179,18 @@ Status GoExecutor::prepareFrom() { status = Status::Error(); break; } + if (expr->isFunCallExpression()) { + auto *funcExpr = static_cast<FunctionCallExpression*>(expr); + if (*(funcExpr->name()) == "near") { + auto v = Expression::asString(value.value()); + std::vector<VertexID> result; + folly::split(",", v, result, true); + starts_.insert(starts_.end(), + std::make_move_iterator(result.begin()), + std::make_move_iterator(result.end())); + continue; + } + } auto v = value.value(); if (!Expression::isInt(v)) { status = Status::Error("Vertex ID should be of type integer");
1
/* Copyright (c) 2018 vesoft inc. All rights reserved. * * This source code is licensed under Apache 2.0 License, * attached with Common Clause Condition 1.0, found in the LICENSES directory. */ #include "base/Base.h" #include "graph/GoExecutor.h" #include "graph/SchemaHelper.h" #include "dataman/RowReader.h" #include "dataman/RowSetReader.h" #include "dataman/ResultSchemaProvider.h" namespace nebula { namespace graph { using SchemaProps = std::unordered_map<std::string, std::vector<std::string>>; using nebula::cpp2::SupportedType; GoExecutor::GoExecutor(Sentence *sentence, ExecutionContext *ectx) : TraverseExecutor(ectx) { // The RTTI is guaranteed by Sentence::Kind, // so we use `static_cast' instead of `dynamic_cast' for the sake of efficiency. sentence_ = static_cast<GoSentence*>(sentence); } Status GoExecutor::prepare() { return Status::OK(); } Status GoExecutor::prepareClauses() { DCHECK(sentence_ != nullptr); Status status; expCtx_ = std::make_unique<ExpressionContext>(); expCtx_->setStorageClient(ectx()->getStorageClient()); do { status = checkIfGraphSpaceChosen(); if (!status.ok()) { break; } status = prepareStep(); if (!status.ok()) { break; } status = prepareFrom(); if (!status.ok()) { break; } status = prepareOver(); if (!status.ok()) { break; } status = prepareWhere(); if (!status.ok()) { break; } status = prepareYield(); if (!status.ok()) { break; } status = prepareNeededProps(); if (!status.ok()) { break; } status = prepareDistinct(); if (!status.ok()) { break; } } while (false); if (!status.ok()) { LOG(ERROR) << "Preparing failed: " << status; return status; } return status; } void GoExecutor::execute() { FLOG_INFO("Executing Go: %s", sentence_->toString().c_str()); auto status = prepareClauses(); if (!status.ok()) { DCHECK(onError_); onError_(std::move(status)); return; } status = setupStarts(); if (!status.ok()) { onError_(std::move(status)); return; } if (starts_.empty()) { onEmptyInputs(); return; } if (distinct_) { std::unordered_set<VertexID> uniqID; for (auto id : starts_) { uniqID.emplace(id); } starts_ = std::vector<VertexID>(uniqID.begin(), uniqID.end()); } stepOut(); } void GoExecutor::feedResult(std::unique_ptr<InterimResult> result) { inputs_ = std::move(result); } Status GoExecutor::prepareStep() { auto *clause = sentence_->stepClause(); if (clause != nullptr) { steps_ = clause->steps(); upto_ = clause->isUpto(); } if (isUpto()) { return Status::Error("`UPTO' not supported yet"); } if (steps_ != 1) { backTracker_ = std::make_unique<VertexBackTracker>(); } return Status::OK(); } Status GoExecutor::prepareFrom() { Status status = Status::OK(); auto *clause = sentence_->fromClause(); do { if (clause == nullptr) { LOG(FATAL) << "From clause shall never be null"; } if (clause->isRef()) { auto *expr = clause->ref(); if (expr->isInputExpression()) { fromType_ = kPipe; auto *iexpr = static_cast<InputPropertyExpression*>(expr); colname_ = iexpr->prop(); } else if (expr->isVariableExpression()) { fromType_ = kVariable; auto *vexpr = static_cast<VariablePropertyExpression*>(expr); varname_ = vexpr->alias(); colname_ = vexpr->prop(); } else { // No way to happen except memory corruption LOG(FATAL) << "Unknown kind of expression"; } if (colname_ != nullptr && *colname_ == "*") { status = Status::Error("Can not use `*' to reference a vertex id column."); break; } break; } auto space = ectx()->rctx()->session()->space(); expCtx_->setSpace(space); auto vidList = clause->vidList(); for (auto *expr : vidList) { expr->setContext(expCtx_.get()); status = expr->prepare(); if (!status.ok()) { break; } auto value = expr->eval(); if (!value.ok()) { status = Status::Error(); break; } auto v = value.value(); if (!Expression::isInt(v)) { status = Status::Error("Vertex ID should be of type integer"); break; } starts_.push_back(Expression::asInt(v)); } fromType_ = kInstantExpr; if (!status.ok()) { break; } } while (false); return status; } Status GoExecutor::prepareOverAll() { auto spaceId = ectx()->rctx()->session()->space(); auto edgeAllStatus = ectx()->schemaManager()->getAllEdge(spaceId); if (!edgeAllStatus.ok()) { return edgeAllStatus.status(); } auto allEdge = edgeAllStatus.value(); for (auto &e : allEdge) { auto edgeStatus = ectx()->schemaManager()->toEdgeType(spaceId, e); if (!edgeStatus.ok()) { return edgeStatus.status(); } auto v = edgeStatus.value(); edgeTypes_.push_back(v); if (!expCtx_->addEdge(e, v)) { return Status::Error(folly::sformat("edge alias({}) was dup", e)); } } return Status::OK(); } Status GoExecutor::prepareOver() { Status status = Status::OK(); auto *clause = sentence_->overClause(); if (clause == nullptr) { LOG(FATAL) << "Over clause shall never be null"; } auto edges = clause->edges(); for (auto e : edges) { if (e->isOverAll()) { expCtx_->setOverAllEdge(); return prepareOverAll(); } auto spaceId = ectx()->rctx()->session()->space(); auto edgeStatus = ectx()->schemaManager()->toEdgeType(spaceId, *e->edge()); if (!edgeStatus.ok()) { return edgeStatus.status(); } if (e->isReversely()) { edgeTypes_.push_back(-edgeStatus.value()); return Status::Error("`REVERSELY' not supported yet"); } auto v = edgeStatus.value(); edgeTypes_.push_back(v); if (e->alias() != nullptr) { if (!expCtx_->addEdge(*e->alias(), v)) { return Status::Error(folly::sformat("edge alias({}) was dup", *e->alias())); } } else { if (!expCtx_->addEdge(*e->edge(), v)) { return Status::Error(folly::sformat("edge alias({}) was dup", *e->edge())); } } } return status; } Status GoExecutor::prepareWhere() { auto *clause = sentence_->whereClause(); if (clause != nullptr) { filter_ = clause->filter(); } return Status::OK(); } Status GoExecutor::prepareYield() { auto *clause = sentence_->yieldClause(); // this preparation depends on interim result, // it can only be called after getting results of the previous executor, // but if we can do the semantic analysis before execution, // then we can do the preparation before execution // TODO: make it possible that this preparation not depends on interim result if (clause != nullptr) { yieldClauseWrapper_ = std::make_unique<YieldClauseWrapper>(clause); auto *varHolder = ectx()->variableHolder(); auto status = yieldClauseWrapper_->prepare(inputs_.get(), varHolder, yields_); if (!status.ok()) { return status; } for (auto *col : yields_) { if (!col->getFunName().empty()) { return Status::SyntaxError("Do not support in aggregated query without group by"); } } } return Status::OK(); } Status GoExecutor::prepareNeededProps() { auto status = Status::OK(); do { if (filter_ != nullptr) { filter_->setContext(expCtx_.get()); status = filter_->prepare(); if (!status.ok()) { break; } } for (auto *col : yields_) { col->expr()->setContext(expCtx_.get()); status = col->expr()->prepare(); if (!status.ok()) { break; } } if (!status.ok()) { break; } if (expCtx_->hasVariableProp()) { if (fromType_ != kVariable) { status = Status::Error("A variable must be referred in FROM " "before used in WHERE or YIELD"); break; } auto &variables = expCtx_->variables(); if (variables.size() > 1) { status = Status::Error("Only one variable allowed to use"); break; } auto &var = *variables.begin(); if (var != *varname_) { status = Status::Error("Variable name not match: `%s' vs. `%s'", var.c_str(), varname_->c_str()); break; } } if (expCtx_->hasInputProp()) { if (fromType_ != kPipe) { status = Status::Error("`$-' must be referred in FROM " "before used in WHERE or YIELD"); break; } } } while (false); return status; } Status GoExecutor::prepareDistinct() { auto *clause = sentence_->yieldClause(); if (clause != nullptr) { distinct_ = clause->isDistinct(); // TODO Consider distinct pushdown later, depends on filter and some other clause pushdown. distinctPushDown_ = !((expCtx_->hasSrcTagProp() || expCtx_->hasEdgeProp()) && expCtx_->hasDstTagProp()); } return Status::OK(); } Status GoExecutor::setupStarts() { // Literal vertex ids if (!starts_.empty()) { return Status::OK(); } const auto *inputs = inputs_.get(); // Take one column from a variable if (varname_ != nullptr) { bool existing = false; auto *varInputs = ectx()->variableHolder()->get(*varname_, &existing); if (varInputs == nullptr && !existing) { return Status::Error("Variable `%s' not defined", varname_->c_str()); } DCHECK(inputs == nullptr); inputs = varInputs; } // No error happened, but we are having empty inputs if (inputs == nullptr || !inputs->hasData()) { return Status::OK(); } auto result = inputs->getVIDs(*colname_); if (!result.ok()) { LOG(ERROR) << "Get vid fail: " << *colname_; return std::move(result).status(); } starts_ = std::move(result).value(); auto indexResult = inputs->buildIndex(*colname_); if (!indexResult.ok()) { return std::move(indexResult).status(); } index_ = std::move(indexResult).value(); return Status::OK(); } void GoExecutor::setupResponse(cpp2::ExecutionResponse &resp) { if (resp_ == nullptr) { resp_ = std::make_unique<cpp2::ExecutionResponse>(); } resp = std::move(*resp_); } void GoExecutor::stepOut() { auto spaceId = ectx()->rctx()->session()->space(); auto status = getStepOutProps(); if (!status.ok()) { DCHECK(onError_); onError_(Status::Error("Get step out props failed")); return; } auto returns = status.value(); auto future = ectx()->getStorageClient()->getNeighbors(spaceId, starts_, edgeTypes_, "", std::move(returns)); auto *runner = ectx()->rctx()->runner(); auto cb = [this] (auto &&result) { auto completeness = result.completeness(); if (completeness == 0) { DCHECK(onError_); onError_(Status::Error("Get neighbors failed")); return; } else if (completeness != 100) { // TODO(dutor) We ought to let the user know that the execution was partially // performed, even in the case that this happened in the intermediate process. // Or, make this case configurable at runtime. // For now, we just do some logging and keep going. LOG(INFO) << "Get neighbors partially failed: " << completeness << "%"; for (auto &error : result.failedParts()) { LOG(ERROR) << "part: " << error.first << "error code: " << static_cast<int>(error.second); } } onStepOutResponse(std::move(result)); }; auto error = [this] (auto &&e) { LOG(ERROR) << "Exception caught: " << e.what(); onError_(Status::Error("Internal error")); }; std::move(future).via(runner).thenValue(cb).thenError(error); } void GoExecutor::onStepOutResponse(RpcResponse &&rpcResp) { if (isFinalStep()) { if (expCtx_->hasDstTagProp()) { auto dstids = getDstIdsFromResp(rpcResp); if (dstids.empty()) { onEmptyInputs(); return; } fetchVertexProps(std::move(dstids), std::move(rpcResp)); return; } finishExecution(std::move(rpcResp)); return; } else { starts_ = getDstIdsFromResp(rpcResp); if (starts_.empty()) { onEmptyInputs(); return; } curStep_++; stepOut(); } } void GoExecutor::onVertexProps(RpcResponse &&rpcResp) { UNUSED(rpcResp); } std::vector<std::string> GoExecutor::getEdgeNamesFromResp(RpcResponse &rpcResp) const { std::vector<std::string> names; auto spaceId = ectx()->rctx()->session()->space(); auto &resp = rpcResp.responses(); auto *edgeSchema = resp[0].get_edge_schema(); if (edgeSchema == nullptr) { return names; } for (auto &schema : *edgeSchema) { auto edgeType = schema.first; auto status = ectx()->schemaManager()->toEdgeName(spaceId, edgeType); DCHECK(status.ok()); auto edgeName = status.value(); names.emplace_back(std::move(edgeName)); } return names; } std::vector<VertexID> GoExecutor::getDstIdsFromResp(RpcResponse &rpcResp) const { std::unordered_set<VertexID> set; for (auto &resp : rpcResp.responses()) { auto *vertices = resp.get_vertices(); if (vertices == nullptr) { continue; } auto *eschema = resp.get_edge_schema(); if (eschema == nullptr) { continue; } std::unordered_map<EdgeType, std::shared_ptr<ResultSchemaProvider>> schema; std::transform(eschema->cbegin(), eschema->cend(), std::inserter(schema, schema.begin()), [](auto &s) { return std::make_pair( s.first, std::make_shared<ResultSchemaProvider>(s.second)); }); for (auto &vdata : *vertices) { for (auto &edata : vdata.edge_data) { auto it = schema.find(edata.type); DCHECK(it != schema.end()); RowSetReader rsReader(it->second, edata.data); auto iter = rsReader.begin(); while (iter) { VertexID dst; auto rc = iter->getVid("_dst", dst); CHECK(rc == ResultType::SUCCEEDED); if (!isFinalStep() && backTracker_ != nullptr) { backTracker_->add(vdata.get_vertex_id(), dst); } set.emplace(dst); ++iter; } } } } return std::vector<VertexID>(set.begin(), set.end()); } void GoExecutor::finishExecution(RpcResponse &&rpcResp) { // MayBe we can do better. std::vector<std::unique_ptr<YieldColumn>> yc; if (expCtx_->isOverAllEdge() && yields_.empty()) { auto edgeNames = getEdgeNamesFromResp(rpcResp); if (edgeNames.empty()) { DCHECK(onError_); onError_(Status::Error("get edge name failed")); return; } for (const auto &name : edgeNames) { auto dummy = new std::string(name); auto dummy_exp = new EdgeDstIdExpression(dummy); auto ptr = std::make_unique<YieldColumn>(dummy_exp); dummy_exp->setContext(expCtx_.get()); yields_.emplace_back(ptr.get()); yc.emplace_back(std::move(ptr)); } } std::unique_ptr<InterimResult> outputs; if (!setupInterimResult(std::move(rpcResp), outputs)) { return; } if (onResult_) { onResult_(std::move(outputs)); } else { resp_ = std::make_unique<cpp2::ExecutionResponse>(); resp_->set_column_names(getResultColumnNames()); if (outputs != nullptr && outputs->hasData()) { auto ret = outputs->getRows(); if (!ret.ok()) { LOG(ERROR) << "Get rows failed: " << ret.status(); onError_(std::move(ret).status()); return; } resp_->set_rows(std::move(ret).value()); } } DCHECK(onFinish_); onFinish_(); } StatusOr<std::vector<storage::cpp2::PropDef>> GoExecutor::getStepOutProps() { std::vector<storage::cpp2::PropDef> props; for (auto &e : edgeTypes_) { storage::cpp2::PropDef pd; pd.owner = storage::cpp2::PropOwner::EDGE; pd.name = "_dst"; pd.id.set_edge_type(e); props.emplace_back(std::move(pd)); } if (!isFinalStep()) { return props; } auto spaceId = ectx()->rctx()->session()->space(); for (auto &tagProp : expCtx_->srcTagProps()) { storage::cpp2::PropDef pd; pd.owner = storage::cpp2::PropOwner::SOURCE; pd.name = tagProp.second; auto status = ectx()->schemaManager()->toTagID(spaceId, tagProp.first); if (!status.ok()) { return Status::Error("No schema found for '%s'", tagProp.first.c_str()); } auto tagId = status.value(); pd.id.set_tag_id(tagId); props.emplace_back(std::move(pd)); } for (auto &prop : expCtx_->aliasProps()) { storage::cpp2::PropDef pd; pd.owner = storage::cpp2::PropOwner::EDGE; pd.name = prop.second; EdgeType edgeType; if (!expCtx_->getEdgeType(prop.first, edgeType)) { return Status::Error("the edge was not found '%s'", prop.first.c_str()); } pd.id.set_edge_type(edgeType); props.emplace_back(std::move(pd)); } return props; } StatusOr<std::vector<storage::cpp2::PropDef>> GoExecutor::getDstProps() { std::vector<storage::cpp2::PropDef> props; auto spaceId = ectx()->rctx()->session()->space(); for (auto &tagProp : expCtx_->dstTagProps()) { storage::cpp2::PropDef pd; pd.owner = storage::cpp2::PropOwner::DEST; pd.name = tagProp.second; auto status = ectx()->schemaManager()->toTagID(spaceId, tagProp.first); if (!status.ok()) { return Status::Error("No schema found for '%s'", tagProp.first.c_str()); } auto tagId = status.value(); pd.id.set_tag_id(tagId); props.emplace_back(std::move(pd)); } return props; } void GoExecutor::fetchVertexProps(std::vector<VertexID> ids, RpcResponse &&rpcResp) { auto spaceId = ectx()->rctx()->session()->space(); auto status = getDstProps(); if (!status.ok()) { DCHECK(onError_); onError_(Status::Error("Get dest props failed")); return; } auto returns = status.value(); auto future = ectx()->getStorageClient()->getVertexProps(spaceId, ids, returns); auto *runner = ectx()->rctx()->runner(); auto cb = [this, stepOutResp = std::move(rpcResp)] (auto &&result) mutable { auto completeness = result.completeness(); if (completeness == 0) { DCHECK(onError_); onError_(Status::Error("Get dest props failed")); return; } else if (completeness != 100) { LOG(INFO) << "Get neighbors partially failed: " << completeness << "%"; for (auto &error : result.failedParts()) { LOG(ERROR) << "part: " << error.first << "error code: " << static_cast<int>(error.second); } } if (vertexHolder_ == nullptr) { vertexHolder_ = std::make_unique<VertexHolder>(); } for (auto &resp : result.responses()) { vertexHolder_->add(resp); } finishExecution(std::move(stepOutResp)); return; }; auto error = [this] (auto &&e) { LOG(ERROR) << "Exception caught: " << e.what(); onError_(Status::Error("Internal error")); }; std::move(future).via(runner).thenValue(cb).thenError(error); } std::vector<std::string> GoExecutor::getResultColumnNames() const { std::vector<std::string> result; result.reserve(yields_.size()); for (auto *col : yields_) { if (col->alias() == nullptr) { result.emplace_back(col->expr()->toString()); } else { result.emplace_back(*col->alias()); } } return result; } bool GoExecutor::setupInterimResult(RpcResponse &&rpcResp, std::unique_ptr<InterimResult> &result) { // Generic results result = std::make_unique<InterimResult>(getResultColumnNames()); std::shared_ptr<SchemaWriter> schema; std::unique_ptr<RowSetWriter> rsWriter; auto uniqResult = std::make_unique<std::unordered_set<std::string>>(); auto cb = [&] (std::vector<VariantType> record, std::vector<nebula::cpp2::SupportedType> colTypes) { if (schema == nullptr) { schema = std::make_shared<SchemaWriter>(); auto colnames = getResultColumnNames(); if (record.size() != colTypes.size()) { LOG(FATAL) << "data nums: " << record.size() << " != type nums: " << colTypes.size(); } for (auto i = 0u; i < record.size(); i++) { SupportedType type; if (colTypes[i] == SupportedType::UNKNOWN) { switch (record[i].which()) { case VAR_INT64: // all integers in InterimResult are regarded as type of INT type = SupportedType::INT; break; case VAR_DOUBLE: type = SupportedType::DOUBLE; break; case VAR_BOOL: type = SupportedType::BOOL; break; case VAR_STR: type = SupportedType::STRING; break; default: LOG(FATAL) << "Unknown VariantType: " << record[i].which(); } } else { type = colTypes[i]; } schema->appendCol(colnames[i], type); } // for rsWriter = std::make_unique<RowSetWriter>(schema); } // if RowWriter writer(schema); for (auto &column : record) { switch (column.which()) { case VAR_INT64: writer << boost::get<int64_t>(column); break; case VAR_DOUBLE: writer << boost::get<double>(column); break; case VAR_BOOL: writer << boost::get<bool>(column); break; case VAR_STR: writer << boost::get<std::string>(column); break; default: LOG(FATAL) << "Unknown VariantType: " << column.which(); } } // TODO Consider float/double, and need to reduce mem copy. std::string encode = writer.encode(); if (distinct_) { auto ret = uniqResult->emplace(encode); if (ret.second) { rsWriter->addRow(std::move(encode)); } } else { rsWriter->addRow(std::move(encode)); } }; // cb if (!processFinalResult(rpcResp, cb)) { return false; } if (rsWriter != nullptr) { result->setInterim(std::move(rsWriter)); } return true; } void GoExecutor::onEmptyInputs() { auto resultColNames = getResultColumnNames(); auto outputs = std::make_unique<InterimResult>(std::move(resultColNames)); if (onResult_) { onResult_(std::move(outputs)); } else if (resp_ == nullptr) { resp_ = std::make_unique<cpp2::ExecutionResponse>(); } onFinish_(); } bool GoExecutor::processFinalResult(RpcResponse &rpcResp, Callback cb) const { auto all = rpcResp.responses(); auto spaceId = ectx()->rctx()->session()->space(); for (auto &resp : all) { if (resp.get_vertices() == nullptr) { continue; } std::unordered_map<TagID, std::shared_ptr<ResultSchemaProvider>> tagSchema; auto *vschema = resp.get_vertex_schema(); if (vschema != nullptr) { std::transform(vschema->cbegin(), vschema->cend(), std::inserter(tagSchema, tagSchema.begin()), [](auto &schema) { return std::make_pair( schema.first, std::make_shared<ResultSchemaProvider>(schema.second)); }); } std::unordered_map<EdgeType, std::shared_ptr<ResultSchemaProvider>> edgeSchema; auto *eschema = resp.get_edge_schema(); if (eschema != nullptr) { std::transform(eschema->cbegin(), eschema->cend(), std::inserter(edgeSchema, edgeSchema.begin()), [](auto &schema) { return std::make_pair( schema.first, std::make_shared<ResultSchemaProvider>(schema.second)); }); } if (tagSchema.empty() && edgeSchema.empty()) { continue; } for (auto &vdata : resp.vertices) { DCHECK(vdata.__isset.edge_data); auto tagData = vdata.get_tag_data(); for (auto &edata : vdata.edge_data) { auto it = edgeSchema.find(edata.type); DCHECK(it != edgeSchema.end()); RowSetReader rsReader(it->second, edata.data); auto iter = rsReader.begin(); auto edgeType = edata.type; while (iter) { std::vector<SupportedType> colTypes; bool saveTypeFlag = false; auto &getters = expCtx_->getters(); getters.getAliasProp = [&iter, &spaceId, &edgeType, &saveTypeFlag, &colTypes, &edgeSchema, this]( const std::string &edgeName, const std::string &prop) -> OptVariantType { auto edgeStatus = ectx()->schemaManager()->toEdgeType(spaceId, edgeName); if (!edgeStatus.ok()) { return edgeStatus.status(); } if (saveTypeFlag) { colTypes.back() = iter->getSchema()->getFieldType(prop).type; } if (edgeType != edgeStatus.value()) { auto sit = edgeSchema.find(edgeStatus.value()); if (sit == edgeSchema.end()) { return Status::Error("get schema failed"); } return RowReader::getDefaultProp(sit->second.get(), prop); } auto res = RowReader::getPropByName(&*iter, prop); if (!ok(res)) { return Status::Error( folly::sformat("get prop({}.{}) failed", edgeName, prop)); } return value(std::move(res)); }; getters.getSrcTagProp = [&iter, &spaceId, &tagData, &tagSchema, &saveTypeFlag, &colTypes, this]( const std::string &tag, const std::string &prop) -> OptVariantType { auto status = ectx()->schemaManager()->toTagID(spaceId, tag); if (!status.ok()) { return status.status(); } auto tagId = status.value(); auto it2 = std::find_if(tagData.cbegin(), tagData.cend(), [&tagId](auto &td) { if (td.tag_id == tagId) { return true; } return false; }); if (it2 == tagData.cend()) { return RowReader::getDefaultProp(iter->getSchema().get(), prop); } if (saveTypeFlag) { colTypes.back() = tagSchema[tagId]->getFieldType(prop).type; } DCHECK(it2->__isset.data); auto vreader = RowReader::getRowReader(it2->data, tagSchema[tagId]); auto res = RowReader::getPropByName(vreader.get(), prop); if (!ok(res)) { return Status::Error( folly::sformat("get prop({}.{}) failed", tag, prop)); } return value(res); }; getters.getDstTagProp = [&iter, &spaceId, &saveTypeFlag, &colTypes, this]( const std::string &tag, const std::string &prop) -> OptVariantType { auto dst = RowReader::getPropByName(&*iter, "_dst"); if (!ok(dst)) { return Status::Error( folly::sformat("get prop({}.{}) failed", tag, prop)); } auto vid = boost::get<int64_t>(value(std::move(dst))); auto status = ectx()->schemaManager()->toTagID(spaceId, tag); if (!status.ok()) { return status.status(); } auto tagId = status.value(); if (saveTypeFlag) { SupportedType type = vertexHolder_->getType(vid, tagId, prop); colTypes.back() = type; } return vertexHolder_->get(vid, tagId, prop); }; getters.getVariableProp = [&saveTypeFlag, &colTypes, &vdata, this](const std::string &prop) { if (saveTypeFlag) { colTypes.back() = getPropTypeFromInterim(prop); } return getPropFromInterim(vdata.get_vertex_id(), prop); }; getters.getInputProp = [&saveTypeFlag, &colTypes, &vdata, this](const std::string &prop) { if (saveTypeFlag) { colTypes.back() = getPropTypeFromInterim(prop); } return getPropFromInterim(vdata.get_vertex_id(), prop); }; // Evaluate filter if (filter_ != nullptr) { auto value = filter_->eval(); if (!value.ok()) { onError_(value.status()); return false; } if (!Expression::asBool(value.value())) { ++iter; continue; } } std::vector<VariantType> record; record.reserve(yields_.size()); saveTypeFlag = true; for (auto *column : yields_) { colTypes.emplace_back(SupportedType::UNKNOWN); auto *expr = column->expr(); auto value = expr->eval(); if (!value.ok()) { onError_(value.status()); return false; } if (column->expr()->isTypeCastingExpression()) { auto exprPtr = static_cast<TypeCastingExpression *>(column->expr()); colTypes.back() = SchemaHelper::columnTypeToSupportedType( exprPtr->getType()); } record.emplace_back(std::move(value.value())); } cb(std::move(record), std::move(colTypes)); ++iter; } // while `iter' } } // for `vdata' } // for `resp' return true; } OptVariantType GoExecutor::VertexHolder::getDefaultProp(TagID tid, const std::string &prop) const { for (auto it = data_.cbegin(); it != data_.cend(); ++it) { auto it2 = it->second.find(tid); if (it2 != it->second.cend()) { return RowReader::getDefaultProp(std::get<0>(it2->second).get(), prop); } } return Status::Error("Unknown Vertex"); } SupportedType GoExecutor::VertexHolder::getDefaultPropType(TagID tid, const std::string &prop) const { for (auto it = data_.cbegin(); it != data_.cend(); ++it) { auto it2 = it->second.find(tid); if (it2 != it->second.cend()) { return std::get<0>(it2->second)->getFieldType(prop).type; } } return nebula::cpp2::SupportedType::UNKNOWN; } OptVariantType GoExecutor::VertexHolder::get(VertexID id, TagID tid, const std::string &prop) const { auto iter = data_.find(id); if (iter == data_.end()) { return getDefaultProp(tid, prop); } auto iter2 = iter->second.find(tid); if (iter2 == iter->second.end()) { return getDefaultProp(tid, prop); } auto reader = RowReader::getRowReader(std::get<1>(iter2->second), std::get<0>(iter2->second)); auto res = RowReader::getPropByName(reader.get(), prop); if (!ok(res)) { return Status::Error(folly::sformat("get prop({}) failed", prop)); } return value(std::move(res)); } SupportedType GoExecutor::VertexHolder::getType(VertexID id, TagID tid, const std::string &prop) { auto iter = data_.find(id); if (iter == data_.end()) { return getDefaultPropType(tid, prop); } auto iter2 = iter->second.find(tid); if (iter2 == iter->second.end()) { return getDefaultPropType(tid, prop); } return std::get<0>(iter2->second)->getFieldType(prop).type; } void GoExecutor::VertexHolder::add(const storage::cpp2::QueryResponse &resp) { auto *vertices = resp.get_vertices(); if (vertices == nullptr) { return; } auto *vertexSchema = resp.get_vertex_schema(); if (vertexSchema == nullptr) { return; } for (auto &vdata : *vertices) { std::unordered_map<TagID, VData> m; for (auto &td : vdata.tag_data) { DCHECK(td.__isset.data); auto it = vertexSchema->find(td.tag_id); DCHECK(it != vertexSchema->end()); m[td.tag_id] = {std::make_shared<ResultSchemaProvider>(it->second), td.data}; } data_[vdata.vertex_id] = std::move(m); } } OptVariantType GoExecutor::getPropFromInterim(VertexID id, const std::string &prop) const { auto rootId = id; if (backTracker_ != nullptr) { DCHECK_NE(steps_ , 1u); rootId = backTracker_->get(id); } DCHECK(index_ != nullptr); return index_->getColumnWithVID(rootId, prop); } SupportedType GoExecutor::getPropTypeFromInterim(const std::string &prop) const { DCHECK(index_ != nullptr); return index_->getColumnType(prop); } } // namespace graph } // namespace nebula
1
23,057
FYI, It won't have any benefit to move from trivial types.
vesoft-inc-nebula
cpp
@@ -37,6 +37,7 @@ const ( capabilityDockerPluginInfix = "docker-plugin." attributeSeparator = "." capabilityPrivateRegistryAuthASM = "private-registry-authentication.secretsmanager" + capabilitySecretEnvSSM = "secrets-ssm-environment-variables" ) // capabilities returns the supported capabilities of this agent / docker-client pair.
1
// Copyright 2014-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"). You may // not use this file except in compliance with the License. A copy of the // License is located at // // http://aws.amazon.com/apache2.0/ // // or in the "license" file accompanying this file. This file is distributed // on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either // express or implied. See the License for the specific language governing // permissions and limitations under the License. package app import ( "github.com/aws/amazon-ecs-agent/agent/config" "github.com/aws/amazon-ecs-agent/agent/dockerclient" "github.com/aws/amazon-ecs-agent/agent/ecs_client/model/ecs" "github.com/aws/amazon-ecs-agent/agent/ecscni" "github.com/aws/aws-sdk-go/aws" "github.com/cihub/seelog" "github.com/pkg/errors" ) const ( // capabilityPrefix is deprecated. For new capabilities, use attributePrefix. capabilityPrefix = "com.amazonaws.ecs.capability." attributePrefix = "ecs.capability." capabilityTaskIAMRole = "task-iam-role" capabilityTaskIAMRoleNetHost = "task-iam-role-network-host" taskENIAttributeSuffix = "task-eni" taskENIBlockInstanceMetadataAttributeSuffix = "task-eni-block-instance-metadata" cniPluginVersionSuffix = "cni-plugin-version" capabilityTaskCPUMemLimit = "task-cpu-mem-limit" capabilityDockerPluginInfix = "docker-plugin." attributeSeparator = "." capabilityPrivateRegistryAuthASM = "private-registry-authentication.secretsmanager" ) // capabilities returns the supported capabilities of this agent / docker-client pair. // Currently, the following capabilities are possible: // // com.amazonaws.ecs.capability.privileged-container // com.amazonaws.ecs.capability.docker-remote-api.1.17 // com.amazonaws.ecs.capability.docker-remote-api.1.18 // com.amazonaws.ecs.capability.docker-remote-api.1.19 // com.amazonaws.ecs.capability.docker-remote-api.1.20 // com.amazonaws.ecs.capability.logging-driver.json-file // com.amazonaws.ecs.capability.logging-driver.syslog // com.amazonaws.ecs.capability.logging-driver.fluentd // com.amazonaws.ecs.capability.logging-driver.journald // com.amazonaws.ecs.capability.logging-driver.gelf // com.amazonaws.ecs.capability.logging-driver.none // com.amazonaws.ecs.capability.selinux // com.amazonaws.ecs.capability.apparmor // com.amazonaws.ecs.capability.ecr-auth // com.amazonaws.ecs.capability.task-iam-role // com.amazonaws.ecs.capability.task-iam-role-network-host // ecs.capability.docker-volume-driver.${driverName} // ecs.capability.task-eni // ecs.capability.task-eni-block-instance-metadata // ecs.capability.execution-role-ecr-pull // ecs.capability.execution-role-awslogs // ecs.capability.container-health-check // ecs.capability.private-registry-authentication.secretsmanager func (agent *ecsAgent) capabilities() ([]*ecs.Attribute, error) { var capabilities []*ecs.Attribute if !agent.cfg.PrivilegedDisabled { capabilities = appendNameOnlyAttribute(capabilities, capabilityPrefix+"privileged-container") } supportedVersions := make(map[dockerclient.DockerVersion]bool) // Determine API versions to report as supported. Supported versions are also used for capability-enablement, except // logging drivers. for _, version := range agent.dockerClient.SupportedVersions() { capabilities = appendNameOnlyAttribute(capabilities, capabilityPrefix+"docker-remote-api."+string(version)) supportedVersions[version] = true } capabilities = agent.appendLoggingDriverCapabilities(capabilities) if agent.cfg.SELinuxCapable { capabilities = appendNameOnlyAttribute(capabilities, capabilityPrefix+"selinux") } if agent.cfg.AppArmorCapable { capabilities = appendNameOnlyAttribute(capabilities, capabilityPrefix+"apparmor") } capabilities = agent.appendTaskIamRoleCapabilities(capabilities, supportedVersions) capabilities, err := agent.appendTaskCPUMemLimitCapabilities(capabilities, supportedVersions) if err != nil { return nil, err } capabilities = agent.appendTaskENICapabilities(capabilities) capabilities = agent.appendDockerDependentCapabilities(capabilities, supportedVersions) // TODO: gate this on docker api version when ecs supported docker includes // credentials endpoint feature from upstream docker if agent.cfg.OverrideAWSLogsExecutionRole { capabilities = appendNameOnlyAttribute(capabilities, attributePrefix+"execution-role-awslogs") } capabilities = agent.appendVolumeDriverCapabilities(capabilities) // ecs agent version 1.19.0 supports private registry authentication using // aws secrets manager capabilities = appendNameOnlyAttribute(capabilities, attributePrefix+capabilityPrivateRegistryAuthASM) return capabilities, nil } func (agent *ecsAgent) appendDockerDependentCapabilities(capabilities []*ecs.Attribute, supportedVersions map[dockerclient.DockerVersion]bool) []*ecs.Attribute { if _, ok := supportedVersions[dockerclient.Version_1_19]; ok { capabilities = appendNameOnlyAttribute(capabilities, capabilityPrefix+"ecr-auth") capabilities = appendNameOnlyAttribute(capabilities, attributePrefix+"execution-role-ecr-pull") } if _, ok := supportedVersions[dockerclient.Version_1_24]; ok { // Docker health check was added in API 1.24 capabilities = appendNameOnlyAttribute(capabilities, attributePrefix+"container-health-check") } return capabilities } func (agent *ecsAgent) appendLoggingDriverCapabilities(capabilities []*ecs.Attribute) []*ecs.Attribute { knownVersions := make(map[dockerclient.DockerVersion]struct{}) // Determine known API versions. Known versions are used exclusively for logging-driver enablement, since none of // the structural API elements change. for _, version := range agent.dockerClient.KnownVersions() { knownVersions[version] = struct{}{} } for _, loggingDriver := range agent.cfg.AvailableLoggingDrivers { requiredVersion := dockerclient.LoggingDriverMinimumVersion[loggingDriver] if _, ok := knownVersions[requiredVersion]; ok { capabilities = appendNameOnlyAttribute(capabilities, capabilityPrefix+"logging-driver."+string(loggingDriver)) } } return capabilities } func (agent *ecsAgent) appendTaskIamRoleCapabilities(capabilities []*ecs.Attribute, supportedVersions map[dockerclient.DockerVersion]bool) []*ecs.Attribute { if agent.cfg.TaskIAMRoleEnabled { // The "task-iam-role" capability is supported for docker v1.7.x onwards // Refer https://github.com/docker/docker/blob/master/docs/reference/api/docker_remote_api.md // to lookup the table of docker supportedVersions to API supportedVersions if _, ok := supportedVersions[dockerclient.Version_1_19]; ok { capabilities = appendNameOnlyAttribute(capabilities, capabilityPrefix+capabilityTaskIAMRole) } else { seelog.Warn("Task IAM Role not enabled due to unsuppported Docker version") } } if agent.cfg.TaskIAMRoleEnabledForNetworkHost { // The "task-iam-role-network-host" capability is supported for docker v1.7.x onwards if _, ok := supportedVersions[dockerclient.Version_1_19]; ok { capabilities = appendNameOnlyAttribute(capabilities, capabilityPrefix+capabilityTaskIAMRoleNetHost) } else { seelog.Warn("Task IAM Role for Host Network not enabled due to unsuppported Docker version") } } return capabilities } func (agent *ecsAgent) appendTaskCPUMemLimitCapabilities(capabilities []*ecs.Attribute, supportedVersions map[dockerclient.DockerVersion]bool) ([]*ecs.Attribute, error) { if agent.cfg.TaskCPUMemLimit.Enabled() { if _, ok := supportedVersions[dockerclient.Version_1_22]; ok { capabilities = appendNameOnlyAttribute(capabilities, attributePrefix+capabilityTaskCPUMemLimit) } else if agent.cfg.TaskCPUMemLimit == config.ExplicitlyEnabled { // explicitly enabled -- return an error because we cannot fulfil an explicit request return nil, errors.New("engine: Task CPU + Mem limit cannot be enabled due to unsupported Docker version") } else { // implicitly enabled -- don't register the capability, but degrade gracefully seelog.Warn("Task CPU + Mem Limit disabled due to unsupported Docker version. API version 1.22 or greater is required.") agent.cfg.TaskCPUMemLimit = config.ExplicitlyDisabled } } return capabilities, nil } func (agent *ecsAgent) appendTaskENICapabilities(capabilities []*ecs.Attribute) []*ecs.Attribute { if agent.cfg.TaskENIEnabled { // The assumption here is that all of the dependecies for supporting the // Task ENI in the Agent have already been validated prior to the invocation of // the `agent.capabilities()` call capabilities = append(capabilities, &ecs.Attribute{ Name: aws.String(attributePrefix + taskENIAttributeSuffix), }) taskENIVersionAttribute, err := agent.getTaskENIPluginVersionAttribute() if err != nil { return capabilities } capabilities = append(capabilities, taskENIVersionAttribute) // We only care about AWSVPCBlockInstanceMetdata if Task ENI is enabled if agent.cfg.AWSVPCBlockInstanceMetdata { // If the Block Instance Metadata flag is set for AWS VPC networking mode, register a capability // indicating the same capabilities = append(capabilities, &ecs.Attribute{ Name: aws.String(attributePrefix + taskENIBlockInstanceMetadataAttributeSuffix), }) } } return capabilities } // getTaskENIPluginVersionAttribute returns the version information of the ECS // CNI plugins. It just executes the ENI plugin as the assumption is that these // plugins are packaged with the ECS Agent, which means all of the other plugins // should also emit the same version information. Also, the version information // doesn't contribute to placement decisions and just serves as additional // debugging information func (agent *ecsAgent) getTaskENIPluginVersionAttribute() (*ecs.Attribute, error) { version, err := agent.cniClient.Version(ecscni.ECSENIPluginName) if err != nil { seelog.Warnf( "Unable to determine the version of the plugin '%s': %v", ecscni.ECSENIPluginName, err) return nil, err } return &ecs.Attribute{ Name: aws.String(attributePrefix + cniPluginVersionSuffix), Value: aws.String(version), }, nil } func appendNameOnlyAttribute(attributes []*ecs.Attribute, name string) []*ecs.Attribute { return append(attributes, &ecs.Attribute{Name: aws.String(name)}) }
1
21,012
i missed these discussions- but ` "secrets-ssm-environment-variables"` is what was agreed upon with cp?
aws-amazon-ecs-agent
go
@@ -20,6 +20,7 @@ import ( "google.golang.org/protobuf/types/known/anypb" "gopkg.in/yaml.v3" + "github.com/gogo/protobuf/jsonpb" gatewayv1 "github.com/lyft/clutch/backend/api/config/gateway/v1" "github.com/lyft/clutch/backend/middleware/timeouts" )
1
package gateway import ( "bytes" "encoding/json" "flag" "fmt" "io/ioutil" "os" "path/filepath" "strconv" "strings" "text/template" "time" "go.uber.org/zap" "go.uber.org/zap/zapcore" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/anypb" "gopkg.in/yaml.v3" gatewayv1 "github.com/lyft/clutch/backend/api/config/gateway/v1" "github.com/lyft/clutch/backend/middleware/timeouts" ) type envFiles []string func (f *envFiles) String() string { return strings.Join(*f, ",") } func (f *envFiles) Set(value string) error { *f = append(*f, value) return nil } type Flags struct { ConfigPath string Template bool Validate bool EnvFiles envFiles } // Link register the struct vars globally for parsing by the flag library. func (f *Flags) Link() { flag.StringVar(&f.ConfigPath, "c", "clutch-config.yaml", "path to YAML configuration") flag.BoolVar(&f.Template, "template", false, "executes go templates on the configuration file") flag.BoolVar(&f.Validate, "validate", false, "validates the configuration file and exits") flag.Var(&f.EnvFiles, "env", "path to additional .env files to load") } // Parse command line arguments. func ParseFlags() *Flags { f := &Flags{} f.Link() flag.Parse() return f } func MustReadOrValidateConfig(f *Flags) *gatewayv1.Config { // Use a temporary logger to parse the configuration and output. tmpLogger := newTmpLogger().With(zap.String("file", f.ConfigPath)) var cfg gatewayv1.Config var seenCfgs []string consolidateConfigs(f.ConfigPath, &cfg, f, &seenCfgs) if err := cfg.Validate(); err != nil { tmpLogger.Fatal("validating configuration failed", zap.Error(err)) } if f.Validate { tmpLogger.Info("configuration validation was successful") os.Exit(0) } return &cfg } func contains(s *[]string, str string) bool { for _, v := range *s { if v == str { return true } } return false } func consolidateConfigs(cfgPath string, cfg *gatewayv1.Config, f *Flags, seen *[]string) { // Use a temporary logger to parse the configuration and output. tmpLogger := newTmpLogger().With(zap.String("file", cfgPath)) if contains(seen, cfgPath) { tmpLogger.Warn("ignoring duplicate extended config") return } var curCfg gatewayv1.Config if err := parseFile(cfgPath, &curCfg, f.Template); err != nil { tmpLogger.Fatal("parsing configuration failed", zap.Error(err)) } *seen = append(*seen, cfgPath) if len(curCfg.Extends) == 0 { proto.Merge(cfg, &curCfg) return } for _, c := range curCfg.Extends { if c == cfgPath { continue } consolidateConfigs(c, cfg, f, seen) } proto.Merge(cfg, &curCfg) } func executeTemplate(contents []byte) ([]byte, error) { tmpl := template.New("config").Funcs(map[string]interface{}{ "getenv": os.Getenv, "getboolenv": func(key string) bool { b, _ := strconv.ParseBool(os.Getenv(key)) return b }, }) tmpl, err := tmpl.Parse(string(contents)) if err != nil { return nil, err } var b bytes.Buffer if err := tmpl.Execute(&b, nil); err != nil { return nil, err } return b.Bytes(), nil } func parseFile(path string, pb proto.Message, template bool) error { // Get absolute path representation for better error message in case file not found. path, err := filepath.Abs(path) if err != nil { return err } // Read file. contents, err := ioutil.ReadFile(path) if err != nil { return err } // Execute templates if enabled. if template { contents, err = executeTemplate(contents) if err != nil { return err } } /* We want to support defining Go templates in the clutch-config that can be executed at a later time. Two issues that needed to be addressed: 1) os.ExpandEnv removes the $ and there isn't a way to provide an escape (open upstream issue: https://github.com/golang/go/issues/43482) 2) the config itself can be executed as a template, and so the nested templates would also be executed. Solution is to use Clutch-specific templating tokens in the config that are then replaced with the Go Template syntax 1) $$ in lieu of $ 2) [[ ]] in lieu of {{ }} */ tokenContent := bulkReplaceTemplateTokens(string(contents)) // Interpolate environment variables expandedContent := os.ExpandEnv(tokenContent) contents = []byte(replaceVarTemplateToken(expandedContent)) return parseYAML(contents, pb) } // swaps the Clutch "Action" tokens for the Go Template "Action" tokens // swaps the dollar signs with other characters, otherwise os.ExpandEnv // would remove the dollar signs func bulkReplaceTemplateTokens(data string) string { sanitize := strings.NewReplacer( "[[", "{{", "]]", "}}", "$$", "@#@", ) return sanitize.Replace(data) } // swaps the Clutch variable token with the Go Template variable token func replaceVarTemplateToken(data string) string { return strings.ReplaceAll(data, "@#@", "$") } func parseYAML(contents []byte, pb proto.Message) error { // Decode YAML. var rawConfig map[string]interface{} if err := yaml.Unmarshal(contents, &rawConfig); err != nil { return err } // Encode YAML to JSON. rawJSON, err := json.Marshal(rawConfig) if err != nil { return err } // Unmarshal JSON to proto object. if err := protojson.Unmarshal(rawJSON, pb); err != nil { return err } // All good! return nil } func newLogger(msg *gatewayv1.Logger) (*zap.Logger, error) { return newLoggerWithCore(msg, nil) } func newLoggerWithCore(msg *gatewayv1.Logger, zapCore zapcore.Core) (*zap.Logger, error) { var c zap.Config var opts []zap.Option if msg.GetPretty() { c = zap.NewDevelopmentConfig() opts = append(opts, zap.AddStacktrace(zap.ErrorLevel)) } else { c = zap.NewProductionConfig() } level := zap.NewAtomicLevel() levelName := "INFO" if msg.Level != gatewayv1.Logger_UNSPECIFIED { levelName = msg.Level.String() } if err := level.UnmarshalText([]byte(levelName)); err != nil { return nil, fmt.Errorf("could not parse log level %s", msg.Level.String()) } c.Level = level logger, err := c.Build(opts...) if err != nil { return nil, err } // If zapCore is set, create a new logger, this is currently only used in tests. if zapCore != nil { logger = zap.New(zapCore, opts...) } if len(msg.Namespace) > 0 { logger = logger.With(zap.Namespace(msg.Namespace)) } return logger, nil } func newTmpLogger() *zap.Logger { c := zap.NewProductionConfig() c.DisableStacktrace = true l, err := c.Build() if err != nil { panic(err) } return l } type validator interface { Validate() error } // Returns maximum timeout, where 0 is considered maximum (i.e. no timeout). func computeMaximumTimeout(cfg *gatewayv1.Timeouts) time.Duration { if cfg == nil { return timeouts.DefaultTimeout } ret := cfg.Default.AsDuration() for _, e := range cfg.Overrides { override := e.Timeout.AsDuration() if ret == 0 || override == 0 { return 0 } if override > ret { ret = override } } return ret } func validateAny(a *anypb.Any) error { if a == nil { return nil } m, err := a.UnmarshalNew() if err != nil { return err } if v, ok := m.(validator); ok { return v.Validate() } return nil }
1
11,998
whats the difference between this package and `github.com/golang/protobuf` ? or did vscode just decided this was the package it wanted to used?
lyft-clutch
go
@@ -127,6 +127,7 @@ public class RepositoriesPanel extends StackPane { this.addButton.setText(tr("Add")); this.addButton.setOnAction((ActionEvent event) -> { AddRepositoryDialog dialog = new AddRepositoryDialog(); + dialog.initOwner(this.getParent().getScene().getWindow()); Optional<RepositoryLocation<? extends Repository>> successResult = dialog.showAndWait();
1
package org.phoenicis.javafx.views.mainwindow.settings; import javafx.application.Platform; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.event.ActionEvent; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.geometry.VPos; import javafx.scene.control.*; import javafx.scene.layout.*; import javafx.scene.text.Text; import org.phoenicis.javafx.views.common.TextWithStyle; import org.phoenicis.javafx.views.mainwindow.settings.addrepository.AddRepositoryDialog; import org.phoenicis.repository.RepositoryManager; import org.phoenicis.repository.location.RepositoryLocation; import org.phoenicis.repository.repositoryTypes.Repository; import org.phoenicis.settings.SettingsManager; import java.util.Optional; import static org.phoenicis.configuration.localisation.Localisation.tr; /** * This class represents the "Repositories" settings category * * @author marc * @since 23.04.17 */ public class RepositoriesPanel extends StackPane { private SettingsManager settingsManager; private RepositoryManager repositoryManager; private VBox vBox; private Text title; private GridPane repositoryGrid; private Text repositoryText; private VBox repositoryLayout; private ListView<RepositoryLocation<? extends Repository>> repositoryListView; private HBox repositoryButtonLayout; private Button addButton; private Button removeButton; private Label priorityHint; private GridPane refreshLayout; private Label refreshRepositoriesLabel; private Button refreshRepositoriesButton; private VBox overlay; private ObservableList<RepositoryLocation<? extends Repository>> repositories; /** * Constructor * * @param settingsManager The settings manager * @param repositoryManager The repository manager */ public RepositoriesPanel(SettingsManager settingsManager, RepositoryManager repositoryManager) { super(); this.settingsManager = settingsManager; this.repositoryManager = repositoryManager; this.repositories = FXCollections.observableArrayList(settingsManager.loadRepositoryLocations()); this.getStyleClass().add("containerConfigurationPane"); this.vBox = new VBox(); this.populateRepositoryGrid(); this.populateRepositoryLegend(); this.populateRepositoryRefresh(); VBox.setVgrow(repositoryGrid, Priority.ALWAYS); this.initializeRefreshCallback(); this.vBox.getChildren().setAll(title, repositoryGrid, priorityHint, refreshLayout); // overlay which is shown when repository is refreshed ProgressIndicator progressIndicator = new ProgressIndicator(); this.overlay = new VBox(progressIndicator); this.overlay.setAlignment(Pos.CENTER); this.getChildren().setAll(this.overlay, this.vBox); } private void initializeRefreshCallback() { repositoryManager.addCallbacks(categories -> { Platform.runLater(() -> { this.overlay.toBack(); this.vBox.setDisable(false); }); }, error -> { }); } private void populateRepositoryGrid() { this.title = new TextWithStyle(tr("Repositories Settings"), "title"); this.repositoryGrid = new GridPane(); this.repositoryGrid.getStyleClass().add("grid"); this.repositoryText = new TextWithStyle(tr("Repository:"), "captionTitle"); this.repositoryLayout = new VBox(); this.repositoryLayout.setSpacing(5); this.repositoryListView = new ListView<>(repositories); this.repositoryListView.setPrefHeight(0); this.repositoryListView.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE); this.repositoryListView.setEditable(true); this.repositoryListView.setCellFactory(param -> new DragableRepositoryListCell((repositoryUrl, toIndex) -> { this.repositoryManager.moveRepository(repositoryUrl, toIndex.intValue()); this.save(); })); this.repositoryButtonLayout = new HBox(); this.repositoryButtonLayout.setSpacing(5); this.addButton = new Button(); this.addButton.setText(tr("Add")); this.addButton.setOnAction((ActionEvent event) -> { AddRepositoryDialog dialog = new AddRepositoryDialog(); Optional<RepositoryLocation<? extends Repository>> successResult = dialog.showAndWait(); successResult.ifPresent(repositoryLocation -> { repositories.add(repositoryLocation); this.save(); repositoryManager.addRepositories(0, repositoryLocation); }); }); this.removeButton = new Button(); this.removeButton.setText(tr("Remove")); this.removeButton.setOnAction((ActionEvent event) -> { RepositoryLocation<? extends Repository>[] toRemove = repositoryListView.getSelectionModel() .getSelectedItems().toArray(new RepositoryLocation[0]); repositories.removeAll(toRemove); this.save(); repositoryManager.removeRepositories(toRemove); }); this.repositoryButtonLayout.getChildren().addAll(addButton, removeButton); this.repositoryLayout.getChildren().addAll(repositoryListView, repositoryButtonLayout); VBox.setVgrow(repositoryListView, Priority.ALWAYS); this.repositoryGrid.add(repositoryText, 0, 0); this.repositoryGrid.add(repositoryLayout, 1, 0); GridPane.setHgrow(repositoryLayout, Priority.ALWAYS); GridPane.setVgrow(repositoryLayout, Priority.ALWAYS); GridPane.setValignment(repositoryText, VPos.TOP); } private void populateRepositoryLegend() { this.priorityHint = new Label(tr( "The value in front of each repository is its priority. The higher the priority is, the more important the scripts inside the repository are.")); this.priorityHint.setWrapText(true); this.priorityHint.setPadding(new Insets(10)); } private void populateRepositoryRefresh() { // Refresh Repositories this.refreshLayout = new GridPane(); this.refreshLayout.setHgap(20); this.refreshLayout.setVgap(10); this.refreshRepositoriesLabel = new Label( tr("Fetch updates for the repositories to retrieve the newest script versions")); this.refreshRepositoriesLabel.setWrapText(true); this.refreshRepositoriesButton = new Button(tr("Refresh Repositories")); this.refreshRepositoriesButton.setOnAction(event -> { this.vBox.setDisable(true); this.overlay.toFront(); repositoryManager.triggerRepositoryChange(); }); this.refreshLayout.add(refreshRepositoriesLabel, 0, 0); this.refreshLayout.add(refreshRepositoriesButton, 1, 0); GridPane.setHgrow(refreshRepositoriesLabel, Priority.ALWAYS); } private void save() { settingsManager.saveRepositories(repositories); } }
1
10,755
Just asking: What does this line do? Does it add the stylesheet from the parent to the dialog?
PhoenicisOrg-phoenicis
java
@@ -108,6 +108,7 @@ if __name__ == "__main__": "pytest-xdist==2.1.0", "pytest==6.1.1", "responses==0.10.*", + "scikit-learn<1.0.0", # scikit-learn 1.0 requires python 3.7 "snapshottest==0.6.0", "tox==3.14.2", "tox-pip-version==0.0.7",
1
from typing import Dict from setuptools import find_packages, setup # type: ignore def long_description() -> str: return """ ## Dagster Dagster is a data orchestrator for machine learning, analytics, and ETL. Dagster lets you define pipelines in terms of the data flow between reusable, logical components, then test locally and run anywhere. With a unified view of pipelines and the assets they produce, Dagster can schedule and orchestrate Pandas, Spark, SQL, or anything else that Python can invoke. Dagster is designed for data platform engineers, data engineers, and full-stack data scientists. Building a data platform with Dagster makes your stakeholders more independent and your systems more robust. Developing data pipelines with Dagster makes testing easier and deploying faster. """.strip() def get_version() -> str: version: Dict[str, str] = {} with open("dagster/version.py") as fp: exec(fp.read(), version) # pylint: disable=W0122 return version["__version__"] if __name__ == "__main__": setup( name="dagster", version=get_version(), author="Elementl", author_email="[email protected]", license="Apache-2.0", description="A data orchestrator for machine learning, analytics, and ETL.", long_description=long_description(), long_description_content_type="text/markdown", url="https://github.com/dagster-io/dagster", classifiers=[ "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", ], packages=find_packages(exclude=["dagster_tests"]), package_data={ "dagster": [ "dagster/core/storage/event_log/sqlite/alembic/*", "dagster/core/storage/runs/sqlite/alembic/*", "dagster/core/storage/schedules/sqlite/alembic/*", "dagster/generate/new_project/*", "dagster/grpc/protos/*", ] }, include_package_data=True, install_requires=[ # cli "click>=5.0", "coloredlogs>=6.1, <=14.0", # https://github.com/dagster-io/dagster/issues/4167 "Jinja2<3.0", "PyYAML>=5.1", # core (not explicitly expressed atm) # alembic 1.6.3 broke our migrations: https://github.com/sqlalchemy/alembic/issues/848 # alembic 1.7.0 is a breaking change "alembic>=1.2.1,!=1.6.3,<1.7.0", "croniter>=0.3.34", "grpcio>=1.32.0", # ensure version we require is >= that with which we generated the grpc code (set in dev-requirements) "grpcio-health-checking>=1.32.0", "packaging>=20.9", "pendulum", "protobuf>=3.13.0", # ensure version we require is >= that with which we generated the proto code (set in dev-requirements) "python-dateutil", "pytz", "rx>=1.6,<2", # https://github.com/dagster-io/dagster/issues/4089 "tabulate", "tqdm", "typing_compat", "sqlalchemy>=1.0", "toposort>=1.0", "watchdog>=0.8.3", 'psutil >= 1.0; platform_system=="Windows"', # https://github.com/mhammond/pywin32/issues/1439 'pywin32 != 226; platform_system=="Windows"', "docstring-parser", ], extras_require={ "docker": ["docker"], "test": [ "astroid>=2.3.3,<2.5", "black==20.8b1", "coverage==5.3", "docker", "flake8>=3.7.8", "freezegun>=0.3.15", "grpcio-tools==1.32.0", "isort>=4.3.21,<5", "mock==3.0.5", "protobuf==3.13.0", # without this, pip will install the most up-to-date protobuf "pylint==2.6.0", "pytest-cov==2.10.1", "pytest-dependency==0.5.1", "pytest-mock==3.3.1", "pytest-rerunfailures==10.0", "pytest-runner==5.2", "pytest-xdist==2.1.0", "pytest==6.1.1", "responses==0.10.*", "snapshottest==0.6.0", "tox==3.14.2", "tox-pip-version==0.0.7", "tqdm==4.48.0", # pylint crash 48.1+ "yamllint", ], }, entry_points={ "console_scripts": [ "dagster = dagster.cli:main", "dagster-daemon = dagster.daemon.cli:main", ] }, )
1
17,134
Including the scikit-learn dependency here would pull in scikit-learn for everyone who depends on Dagster. If you put it in the setup.py under docs_snippets, we'd avoid that problem (although I think it's already there).
dagster-io-dagster
py
@@ -274,7 +274,6 @@ Status DataCollectExecutor::collectMultiplePairShortestPath(const std::vector<st Status DataCollectExecutor::collectPathProp(const std::vector<std::string>& vars) { DataSet ds; ds.colNames = colNames_; - DCHECK(!ds.colNames.empty()); // 0: vertices's props, 1: Edges's props 2: paths without prop DCHECK_EQ(vars.size(), 3);
1
/* Copyright (c) 2020 vesoft inc. All rights reserved. * * This source code is licensed under Apache 2.0 License, * attached with Common Clause Condition 1.0, found in the LICENSES directory. */ #include "graph/executor/query/DataCollectExecutor.h" #include "graph/planner/plan/Query.h" #include "graph/util/ScopedTimer.h" namespace nebula { namespace graph { folly::Future<Status> DataCollectExecutor::execute() { return doCollect().ensure([this]() { result_ = Value::kEmpty; colNames_.clear(); }); } folly::Future<Status> DataCollectExecutor::doCollect() { SCOPED_TIMER(&execTime_); auto* dc = asNode<DataCollect>(node()); colNames_ = dc->colNames(); auto vars = dc->vars(); switch (dc->kind()) { case DataCollect::DCKind::kSubgraph: { NG_RETURN_IF_ERROR(collectSubgraph(vars)); break; } case DataCollect::DCKind::kRowBasedMove: { NG_RETURN_IF_ERROR(rowBasedMove(vars)); break; } case DataCollect::DCKind::kMToN: { NG_RETURN_IF_ERROR(collectMToN(vars, dc->step(), dc->distinct())); break; } case DataCollect::DCKind::kBFSShortest: { NG_RETURN_IF_ERROR(collectBFSShortest(vars)); break; } case DataCollect::DCKind::kAllPaths: { NG_RETURN_IF_ERROR(collectAllPaths(vars)); break; } case DataCollect::DCKind::kMultiplePairShortest: { NG_RETURN_IF_ERROR(collectMultiplePairShortestPath(vars)); break; } case DataCollect::DCKind::kPathProp: { NG_RETURN_IF_ERROR(collectPathProp(vars)); break; } default: LOG(FATAL) << "Unknown data collect type: " << static_cast<int64_t>(dc->kind()); } ResultBuilder builder; builder.value(Value(std::move(result_))).iter(Iterator::Kind::kSequential); return finish(builder.build()); } Status DataCollectExecutor::collectSubgraph(const std::vector<std::string>& vars) { DataSet ds; ds.colNames = std::move(colNames_); std::unordered_set<std::tuple<Value, EdgeType, EdgeRanking, Value>> uniqueEdges; for (auto i = vars.begin(); i != vars.end(); ++i) { const auto& hist = ectx_->getHistory(*i); for (auto j = hist.begin(); j != hist.end(); ++j) { if (i == vars.begin() && j == hist.end() - 1) { continue; } auto iter = (*j).iter(); if (!iter->isGetNeighborsIter()) { std::stringstream msg; msg << "Iterator should be kind of GetNeighborIter, but was: " << iter->kind(); return Status::Error(msg.str()); } List vertices; List edges; auto* gnIter = static_cast<GetNeighborsIter*>(iter.get()); auto originVertices = gnIter->getVertices(); for (auto& v : originVertices.values) { if (UNLIKELY(!v.isVertex())) { continue; } vertices.emplace_back(std::move(v)); } auto originEdges = gnIter->getEdges(); for (auto& edge : originEdges.values) { if (UNLIKELY(!edge.isEdge())) { continue; } const auto& e = edge.getEdge(); auto edgeKey = std::make_tuple(e.src, e.type, e.ranking, e.dst); if (uniqueEdges.emplace(std::move(edgeKey)).second) { edges.emplace_back(std::move(edge)); } } ds.rows.emplace_back(Row({std::move(vertices), std::move(edges)})); } } result_.setDataSet(std::move(ds)); return Status::OK(); } Status DataCollectExecutor::rowBasedMove(const std::vector<std::string>& vars) { DataSet ds; ds.colNames = std::move(colNames_); DCHECK(!ds.colNames.empty()); size_t cap = 0; for (auto& var : vars) { auto& result = ectx_->getResult(var); auto iter = result.iter(); cap += iter->size(); } ds.rows.reserve(cap); for (auto& var : vars) { auto& result = ectx_->getResult(var); auto iter = result.iter(); if (iter->isSequentialIter() || iter->isPropIter()) { auto* seqIter = static_cast<SequentialIter*>(iter.get()); for (; seqIter->valid(); seqIter->next()) { ds.rows.emplace_back(seqIter->moveRow()); } } else { return Status::Error("Iterator should be kind of SequentialIter."); } } result_.setDataSet(std::move(ds)); return Status::OK(); } Status DataCollectExecutor::collectMToN(const std::vector<std::string>& vars, const StepClause& mToN, bool distinct) { DataSet ds; ds.colNames = std::move(colNames_); DCHECK(!ds.colNames.empty()); std::unordered_set<const Row*> unique; // itersHolder keep life cycle of iters util this method return. std::vector<std::unique_ptr<Iterator>> itersHolder; for (auto& var : vars) { auto& hist = ectx_->getHistory(var); std::size_t histSize = hist.size(); DCHECK_GE(mToN.mSteps(), 1); std::size_t n = mToN.nSteps() > histSize ? histSize : mToN.nSteps(); for (auto i = mToN.mSteps() - 1; i < n; ++i) { auto iter = hist[i].iter(); if (iter->isSequentialIter()) { auto* seqIter = static_cast<SequentialIter*>(iter.get()); while (seqIter->valid()) { if (distinct && !unique.emplace(seqIter->row()).second) { seqIter->unstableErase(); } else { seqIter->next(); } } } else { std::stringstream msg; msg << "Iterator should be kind of SequentialIter, but was: " << iter->kind(); return Status::Error(msg.str()); } itersHolder.emplace_back(std::move(iter)); } } for (auto& iter : itersHolder) { if (iter->isSequentialIter()) { auto* seqIter = static_cast<SequentialIter*>(iter.get()); for (seqIter->reset(); seqIter->valid(); seqIter->next()) { ds.rows.emplace_back(seqIter->moveRow()); } } } result_.setDataSet(std::move(ds)); return Status::OK(); } Status DataCollectExecutor::collectBFSShortest(const std::vector<std::string>& vars) { // Will rewrite this method once we implement returning the props for the // path. return rowBasedMove(vars); } Status DataCollectExecutor::collectAllPaths(const std::vector<std::string>& vars) { DataSet ds; ds.colNames = std::move(colNames_); DCHECK(!ds.colNames.empty()); for (auto& var : vars) { auto& hist = ectx_->getHistory(var); for (auto& result : hist) { auto iter = result.iter(); if (iter->isSequentialIter()) { auto* seqIter = static_cast<SequentialIter*>(iter.get()); for (; seqIter->valid(); seqIter->next()) { ds.rows.emplace_back(seqIter->moveRow()); } } else { std::stringstream msg; msg << "Iterator should be kind of SequentialIter, but was: " << iter->kind(); return Status::Error(msg.str()); } } } result_.setDataSet(std::move(ds)); return Status::OK(); } Status DataCollectExecutor::collectMultiplePairShortestPath(const std::vector<std::string>& vars) { DataSet ds; ds.colNames = std::move(colNames_); DCHECK(!ds.colNames.empty()); // src : {dst : <cost, {path}>} std::unordered_map<Value, std::unordered_map<Value, std::pair<Value, std::vector<Path>>>> shortestPath; for (auto& var : vars) { auto& hist = ectx_->getHistory(var); for (auto& result : hist) { auto iter = result.iter(); if (!iter->isSequentialIter()) { std::stringstream msg; msg << "Iterator should be kind of SequentialIter, but was: " << iter->kind(); return Status::Error(msg.str()); } auto* seqIter = static_cast<SequentialIter*>(iter.get()); for (; seqIter->valid(); seqIter->next()) { auto& pathVal = seqIter->getColumn(kPathStr); auto cost = seqIter->getColumn(kCostStr); if (!pathVal.isPath()) { return Status::Error("Type error `%s', should be PATH", pathVal.typeName().c_str()); } auto& path = pathVal.getPath(); auto& src = path.src.vid; auto& dst = path.steps.back().dst.vid; if (shortestPath.find(src) == shortestPath.end() || shortestPath[src].find(dst) == shortestPath[src].end()) { auto& dstHist = shortestPath[src]; std::vector<Path> tempPaths = {std::move(path)}; dstHist.emplace(dst, std::make_pair(cost, std::move(tempPaths))); } else { auto oldCost = shortestPath[src][dst].first; if (cost < oldCost) { std::vector<Path> tempPaths = {std::move(path)}; shortestPath[src][dst].second.swap(tempPaths); } else if (cost == oldCost) { shortestPath[src][dst].second.emplace_back(std::move(path)); } else { continue; } } } } } // collect result for (auto& srcPath : shortestPath) { for (auto& dstPath : srcPath.second) { for (auto& path : dstPath.second.second) { Row row; row.values.emplace_back(std::move(path)); ds.rows.emplace_back(std::move(row)); } } } result_.setDataSet(std::move(ds)); return Status::OK(); } Status DataCollectExecutor::collectPathProp(const std::vector<std::string>& vars) { DataSet ds; ds.colNames = colNames_; DCHECK(!ds.colNames.empty()); // 0: vertices's props, 1: Edges's props 2: paths without prop DCHECK_EQ(vars.size(), 3); auto vIter = ectx_->getResult(vars[0]).iter(); std::unordered_map<Value, Vertex> vertexMap; vertexMap.reserve(vIter->size()); DCHECK(vIter->isPropIter()); for (; vIter->valid(); vIter->next()) { const auto& vertexVal = vIter->getVertex(); if (!vertexVal.isVertex()) { continue; } const auto& vertex = vertexVal.getVertex(); vertexMap.insert(std::make_pair(vertex.vid, std::move(vertex))); } auto eIter = ectx_->getResult(vars[1]).iter(); std::unordered_map<std::tuple<Value, EdgeType, EdgeRanking, Value>, Edge> edgeMap; edgeMap.reserve(eIter->size()); DCHECK(eIter->isPropIter()); for (; eIter->valid(); eIter->next()) { auto edgeVal = eIter->getEdge(); if (!edgeVal.isEdge()) { continue; } auto& edge = edgeVal.getEdge(); auto edgeKey = std::make_tuple(edge.src, edge.type, edge.ranking, edge.dst); edgeMap.insert(std::make_pair(std::move(edgeKey), std::move(edge))); } auto pIter = ectx_->getResult(vars[2]).iter(); DCHECK(pIter->isSequentialIter()); for (; pIter->valid(); pIter->next()) { auto& pathVal = pIter->getColumn(0); if (!pathVal.isPath()) { continue; } auto path = pathVal.getPath(); auto src = path.src.vid; auto found = vertexMap.find(src); if (found != vertexMap.end()) { path.src = found->second; } for (auto& step : path.steps) { auto dst = step.dst.vid; step.dst = vertexMap[dst]; auto type = step.type; auto ranking = step.ranking; if (type < 0) { dst = src; src = step.dst.vid; type = -type; } auto edgeKey = std::make_tuple(src, type, ranking, dst); auto edge = edgeMap[edgeKey]; step.props = edge.props; src = step.dst.vid; } ds.rows.emplace_back(Row({std::move(path)})); } VLOG(2) << "Path with props : \n" << ds; result_.setDataSet(std::move(ds)); return Status::OK(); } } // namespace graph } // namespace nebula
1
31,408
Why remove this?
vesoft-inc-nebula
cpp
@@ -145,13 +145,3 @@ def test_completion_item_focus(tree, count, expected, completionview): completionview.completion_item_focus(direction) idx = completionview.selectionModel().currentIndex() assert filtermodel.data(idx) == expected - - -def test_completion_item_focus_no_model(completionview): - """Test that next/prev won't crash with no model set. - - This can happen if completion.show and completion.auto-open are False. - Regression test for issue #1722. - """ - completionview.completion_item_focus('prev') - completionview.completion_item_focus('next')
1
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2016 Ryan Roden-Corrent (rcorre) <[email protected]> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Tests for the CompletionView Object.""" import unittest.mock import pytest from PyQt5.QtGui import QStandardItem, QColor from qutebrowser.completion import completionwidget from qutebrowser.completion.models import base, sortfilter @pytest.fixture def completionview(qtbot, status_command_stub, config_stub, win_registry, mocker): """Create the CompletionView used for testing.""" config_stub.data = { 'completion': { 'show': True, 'auto-open': True, 'scrollbar-width': 12, 'scrollbar-padding': 2, 'shrink': False, }, 'colors': { 'completion.fg': QColor(), 'completion.bg': QColor(), 'completion.alternate-bg': QColor(), 'completion.category.fg': QColor(), 'completion.category.bg': QColor(), 'completion.category.border.top': QColor(), 'completion.category.border.bottom': QColor(), 'completion.item.selected.fg': QColor(), 'completion.item.selected.bg': QColor(), 'completion.item.selected.border.top': QColor(), 'completion.item.selected.border.bottom': QColor(), 'completion.match.fg': QColor(), 'completion.scrollbar.fg': QColor(), 'completion.scrollbar.bg': QColor(), }, 'fonts': { 'completion': 'Comic Sans Monospace', 'completion.category': 'Comic Sans Monospace bold', } } # mock the Completer that the widget creates in its constructor mocker.patch('qutebrowser.completion.completer.Completer', autospec=True) view = completionwidget.CompletionView(win_id=0) qtbot.addWidget(view) return view def test_set_model(completionview): """Ensure set_model actually sets the model and expands all categories.""" model = base.BaseCompletionModel() filtermodel = sortfilter.CompletionFilterModel(model) for i in range(3): model.appendRow(QStandardItem(str(i))) completionview.set_model(filtermodel) assert completionview.model() is filtermodel for i in range(model.rowCount()): assert completionview.isExpanded(filtermodel.index(i, 0)) def test_set_pattern(completionview): model = sortfilter.CompletionFilterModel(base.BaseCompletionModel()) model.set_pattern = unittest.mock.Mock() completionview.set_model(model) completionview.set_pattern('foo') model.set_pattern.assert_called_with('foo') def test_maybe_resize_completion(completionview, config_stub, qtbot): """Ensure completion is resized only if shrink is True.""" with qtbot.assertNotEmitted(completionview.resize_completion): completionview.maybe_resize_completion() config_stub.data = {'completion': {'shrink': True}} with qtbot.waitSignal(completionview.resize_completion): completionview.maybe_resize_completion() @pytest.mark.parametrize('tree, count, expected', [ ([['Aa']], 1, 'Aa'), ([['Aa']], -1, 'Aa'), ([['Aa'], ['Ba']], 1, 'Aa'), ([['Aa'], ['Ba']], -1, 'Ba'), ([['Aa'], ['Ba']], 2, 'Ba'), ([['Aa'], ['Ba']], -2, 'Aa'), ([['Aa', 'Ab', 'Ac'], ['Ba', 'Bb'], ['Ca']], 3, 'Ac'), ([['Aa', 'Ab', 'Ac'], ['Ba', 'Bb'], ['Ca']], 4, 'Ba'), ([['Aa', 'Ab', 'Ac'], ['Ba', 'Bb'], ['Ca']], 6, 'Ca'), ([['Aa', 'Ab', 'Ac'], ['Ba', 'Bb'], ['Ca']], 7, 'Aa'), ([['Aa', 'Ab', 'Ac'], ['Ba', 'Bb'], ['Ca']], -1, 'Ca'), ([['Aa', 'Ab', 'Ac'], ['Ba', 'Bb'], ['Ca']], -2, 'Bb'), ([['Aa', 'Ab', 'Ac'], ['Ba', 'Bb'], ['Ca']], -4, 'Ac'), ([[], ['Ba', 'Bb']], 1, 'Ba'), ([[], ['Ba', 'Bb']], -1, 'Bb'), ([[], [], ['Ca', 'Cb']], 1, 'Ca'), ([[], [], ['Ca', 'Cb']], -1, 'Cb'), ([['Aa'], []], 1, 'Aa'), ([['Aa'], []], -1, 'Aa'), ([['Aa'], [], []], 1, 'Aa'), ([['Aa'], [], []], -1, 'Aa'), ([[]], 1, None), ([[]], -1, None), ]) def test_completion_item_focus(tree, count, expected, completionview): """Test that on_next_prev_item moves the selection properly. Args: tree: Each list represents a completion category, with each string being an item under that category. count: Number of times to go forward (or back if negative). expected: item data that should be selected after going back/forward. """ model = base.BaseCompletionModel() for catdata in tree: cat = QStandardItem() model.appendRow(cat) for name in catdata: cat.appendRow(QStandardItem(name)) filtermodel = sortfilter.CompletionFilterModel(model, parent=completionview) completionview.set_model(filtermodel) direction = 'prev' if count < 0 else 'next' for _ in range(abs(count)): completionview.completion_item_focus(direction) idx = completionview.selectionModel().currentIndex() assert filtermodel.data(idx) == expected def test_completion_item_focus_no_model(completionview): """Test that next/prev won't crash with no model set. This can happen if completion.show and completion.auto-open are False. Regression test for issue #1722. """ completionview.completion_item_focus('prev') completionview.completion_item_focus('next')
1
15,738
Why remove this? It seems like we should keep this around as a regression test, unless we can guarantee this will never be called without a model set (does your new code guarantee that?)
qutebrowser-qutebrowser
py
@@ -27,6 +27,7 @@ const SET_PERMISSION_SCOPE_ERROR = 'SET_PERMISSION_SCOPE_ERROR'; export const INITIAL_STATE = { permissionError: null, + capabilities: global._googlesitekitUserData?.permissions || {}, }; export const actions = {
1
/** * core/user Data store: permission scopes. * * Site Kit by Google, Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * External dependencies */ import invariant from 'invariant'; // Actions const CLEAR_PERMISSION_SCOPE_ERROR = 'CLEAR_PERMISSION_SCOPE_ERROR'; const SET_PERMISSION_SCOPE_ERROR = 'SET_PERMISSION_SCOPE_ERROR'; export const INITIAL_STATE = { permissionError: null, }; export const actions = { /** * Clears the permission scope error, if one was previously set. * * @since 1.9.0 * @private * * @return {Object} Redux-style action. */ clearPermissionScopeError() { return { payload: {}, type: CLEAR_PERMISSION_SCOPE_ERROR, }; }, /** * Sets a permission error encountered by an API request. * * @since 1.9.0 * @private * * @param {Object} permissionError Permissions error object. * @return {Object} Redux-style action. */ setPermissionScopeError( permissionError ) { invariant( permissionError, 'permissionError is required.' ); return { payload: { permissionError }, type: SET_PERMISSION_SCOPE_ERROR, }; }, }; export const controls = {}; export const reducer = ( state, { type, payload } ) => { switch ( type ) { case CLEAR_PERMISSION_SCOPE_ERROR: { return { ...state, permissionError: null, }; } case SET_PERMISSION_SCOPE_ERROR: { const { permissionError } = payload; return { ...state, permissionError, }; } default: { return { ...state }; } } }; export const resolvers = {}; export const selectors = { /** * Gets the most recent permission error encountered by this user. * * @since 1.9.0 * @private * * @param {Object} state Data store's state. * @return {(Object|undefined)} Permission scope errors. Returns `null` if no error exists. */ getPermissionScopeError( state ) { const { permissionError } = state; return permissionError; }, }; export default { INITIAL_STATE, actions, controls, reducer, resolvers, selectors, };
1
30,366
Because this defaults to `{}` if `global._googlesitekitUserData?.permissions` is `false`-y, the checks below around `state.capabilities` always result in the `!! capabilities === true` path. This shouldn't have a default value of `{}` if there's the possibility that `global._googlesitekitUserData?.permissions` can be `undefined`/`false`/`null` legitimately.
google-site-kit-wp
js
@@ -234,3 +234,17 @@ def repeat_command(win_id, count=None): cmd = runners.last_command[mode_manager.mode] commandrunner = runners.CommandRunner(win_id) commandrunner.run(cmd[0], count if count is not None else cmd[1]) + [email protected](debug=True,name='debug-log-capacity') +def log_capacity(capacity: int): + """Choose number of lines for your log. + + Args: + capacity: Number of lines for the log. + """ + if capacity < 0: + raise cmdexc.CommandError("Please choose log capacity above 0.") + else: + log.ram_handler.change_log_capacity(capacity) + +
1
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2014-2016 Florian Bruhin (The Compiler) <[email protected]> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Misc. utility commands exposed to the user.""" import functools import types import traceback try: import hunter except ImportError: hunter = None from qutebrowser.browser.webkit.network import qutescheme from qutebrowser.utils import log, objreg, usertypes, message, debug, utils from qutebrowser.commands import cmdutils, runners, cmdexc from qutebrowser.config import style from qutebrowser.misc import consolewidget from PyQt5.QtCore import QUrl # so it's available for :debug-pyeval from PyQt5.QtWidgets import QApplication # pylint: disable=unused-import @cmdutils.register(maxsplit=1, no_cmd_split=True) @cmdutils.argument('win_id', win_id=True) def later(ms: int, command, win_id): """Execute a command after some time. Args: ms: How many milliseconds to wait. command: The command to run, with optional args. """ if ms < 0: raise cmdexc.CommandError("I can't run something in the past!") commandrunner = runners.CommandRunner(win_id) app = objreg.get('app') timer = usertypes.Timer(name='later', parent=app) try: timer.setSingleShot(True) try: timer.setInterval(ms) except OverflowError: raise cmdexc.CommandError("Numeric argument is too large for " "internal int representation.") timer.timeout.connect( functools.partial(commandrunner.run_safely, command)) timer.timeout.connect(timer.deleteLater) timer.start() except: timer.deleteLater() raise @cmdutils.register(maxsplit=1, no_cmd_split=True) @cmdutils.argument('win_id', win_id=True) def repeat(times: int, command, win_id): """Repeat a given command. Args: times: How many times to repeat. command: The command to run, with optional args. """ if times < 0: raise cmdexc.CommandError("A negative count doesn't make sense.") commandrunner = runners.CommandRunner(win_id) for _ in range(times): commandrunner.run_safely(command) @cmdutils.register(hide=True) @cmdutils.argument('win_id', win_id=True) def message_error(win_id, text): """Show an error message in the statusbar. Args: text: The text to show. """ message.error(win_id, text) @cmdutils.register(hide=True) @cmdutils.argument('win_id', win_id=True) def message_info(win_id, text): """Show an info message in the statusbar. Args: text: The text to show. """ message.info(win_id, text) @cmdutils.register(hide=True) @cmdutils.argument('win_id', win_id=True) def message_warning(win_id, text): """Show a warning message in the statusbar. Args: text: The text to show. """ message.warning(win_id, text) @cmdutils.register(debug=True) @cmdutils.argument('typ', choices=['exception', 'segfault']) def debug_crash(typ='exception'): """Crash for debugging purposes. Args: typ: either 'exception' or 'segfault'. """ if typ == 'segfault': # From python's Lib/test/crashers/bogus_code_obj.py co = types.CodeType(0, 0, 0, 0, 0, b'\x04\x71\x00\x00', (), (), (), '', '', 1, b'') exec(co) raise Exception("Segfault failed (wat.)") else: raise Exception("Forced crash") @cmdutils.register(debug=True) def debug_all_objects(): """Print a list of all objects to the debug log.""" s = debug.get_all_objects() log.misc.debug(s) @cmdutils.register(debug=True) def debug_cache_stats(): """Print LRU cache stats.""" config_info = objreg.get('config').get.cache_info() style_info = style.get_stylesheet.cache_info() log.misc.debug('config: {}'.format(config_info)) log.misc.debug('style: {}'.format(style_info)) @cmdutils.register(debug=True) def debug_console(): """Show the debugging console.""" try: con_widget = objreg.get('debug-console') except KeyError: con_widget = consolewidget.ConsoleWidget() objreg.register('debug-console', con_widget) if con_widget.isVisible(): con_widget.hide() else: con_widget.show() @cmdutils.register(debug=True, maxsplit=0, no_cmd_split=True) def debug_trace(expr=""): """Trace executed code via hunter. Args: expr: What to trace, passed to hunter. """ if hunter is None: raise cmdexc.CommandError("You need to install 'hunter' to use this " "command!") try: eval('hunter.trace({})'.format(expr)) except Exception as e: raise cmdexc.CommandError("{}: {}".format(e.__class__.__name__, e)) @cmdutils.register(maxsplit=0, debug=True, no_cmd_split=True) def debug_pyeval(s, quiet=False): """Evaluate a python string and display the results as a web page. Args: s: The string to evaluate. quiet: Don't show the output in a new tab. """ try: r = eval(s) out = repr(r) except Exception: out = traceback.format_exc() qutescheme.pyeval_output = out if quiet: log.misc.debug("pyeval output: {}".format(out)) else: tabbed_browser = objreg.get('tabbed-browser', scope='window', window='last-focused') tabbed_browser.openurl(QUrl('qute:pyeval'), newtab=True) @cmdutils.register(debug=True) def debug_set_fake_clipboard(s=None): """Put data into the fake clipboard and enable logging, used for tests. Args: s: The text to put into the fake clipboard, or unset to enable logging. """ if s is None: utils.log_clipboard = True else: utils.fake_clipboard = s @cmdutils.register(hide=True) @cmdutils.argument('win_id', win_id=True) @cmdutils.argument('count', count=True) def repeat_command(win_id, count=None): """Repeat the last executed command. Args: count: Which count to pass the command. """ mode_manager = objreg.get('mode-manager', scope='window', window=win_id) if mode_manager.mode not in runners.last_command: raise cmdexc.CommandError("You didn't do anything yet.") cmd = runners.last_command[mode_manager.mode] commandrunner = runners.CommandRunner(win_id) commandrunner.run(cmd[0], count if count is not None else cmd[1])
1
15,574
Please add a space after the comma here (generally, with arguments there's always a space after commas).
qutebrowser-qutebrowser
py
@@ -17,7 +17,7 @@ package org.hyperledger.besu.config.experimental; import picocli.CommandLine.Option; /** - * Flags defined in those class must be used with cautious, and strictly reserved to experimental + * Flags defined in those class must be used with caution, and strictly reserved to experimental * EIPs. */ public class ExperimentalEIPs {
1
/* * Copyright ConsenSys AG. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * SPDX-License-Identifier: Apache-2.0 */ package org.hyperledger.besu.config.experimental; import picocli.CommandLine.Option; /** * Flags defined in those class must be used with cautious, and strictly reserved to experimental * EIPs. */ public class ExperimentalEIPs { // To make it easier for tests to reset the value to default public static final boolean EIP1559_ENABLED_DEFAULT_VALUE = false; @Option( hidden = true, names = {"--Xeip1559-enabled"}, description = "Enable experimental EIP-1559 fee market change (default: ${DEFAULT-VALUE})", arity = "1") public static boolean eip1559Enabled = EIP1559_ENABLED_DEFAULT_VALUE; // To make it easier for tests to reset the value to default public static final boolean BERLIN_ENABLED_DEFAULT_VALUE = false; @Option( hidden = true, names = {"--Xberlin-enabled"}, description = "Enable non-finalized Berlin features (default: ${DEFAULT-VALUE})", arity = "1") public static boolean berlinEnabled = BERLIN_ENABLED_DEFAULT_VALUE; public static void eip1559MustBeEnabled() { if (!eip1559Enabled) { throw new RuntimeException("EIP-1559 feature flag must be enabled --Xeip1559-enabled"); } } @Option( hidden = true, names = {"--Xeip1559-basefee-max-change-denominator"}, arity = "1") public static Long basefeeMaxChangeDenominator = 8L; @Option( hidden = true, names = {"--Xeip1559-initial-base-fee"}, arity = "1") public static Long initialBasefee = 1000000000L; @Option( hidden = true, names = {"--Xeip1559-slack-coefficient"}, arity = "1") public static Long slackCoefficient = 2L; }
1
23,941
From the context, should it say "Flags defined in this class must be used with caution..." ?
hyperledger-besu
java
@@ -0,0 +1,19 @@ +module.exports = { + roots: [ + "<rootDir>/javascript/grid-ui/src" + ], + testMatch: [ + "<rootDir>/javascript/grid-ui/src/tests/**/*.test.tsx" + ], + transform: { + "^.+\\.(ts|tsx)$": "ts-jest" + }, + moduleFileExtensions: ["ts", "tsx", "js", "jsx", "json", "node"], + snapshotSerializers: ["enzyme-to-json/serializer"], + setupFilesAfterEnv: ["<rootDir>/setupTests.ts"], + testEnvironment: "jsdom", + moduleNameMapper: { + ".+\\.(svg|png|jpg|css)$": "identity-obj-proxy", + "selenium/javascript/grid-ui/(.*)": "<rootDir>/$1" + } +}
1
1
18,309
We don't need this file, we can use the one that is in the grid-ui directory
SeleniumHQ-selenium
py
@@ -28,6 +28,7 @@ const ( infoNoAccounts = "Did not find any account. Please import or create a new one." infoRenamedAccount = "Renamed account '%s' to '%s'" infoImportedKey = "Imported %s" + infoExportedKey = "Imported key for account %s: \"%s\"" infoImportedNKeys = "Imported %d key%s" infoCreatedNewAccount = "Created new account with address %s" errorNameAlreadyTaken = "The account name '%s' is already taken, please choose another."
1
// Copyright (C) 2019 Algorand, Inc. // This file is part of go-algorand // // go-algorand is free software: you can redistribute it and/or modify // it under the terms of the GNU Affero General Public License as // published by the Free Software Foundation, either version 3 of the // License, or (at your option) any later version. // // go-algorand is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public License // along with go-algorand. If not, see <https://www.gnu.org/licenses/>. package main const ( // General errorNoDataDirectory = "Data directory not specified. Please use -d or set $ALGORAND_DATA in your environment. Exiting." errorOneDataDirSupported = "One one data directory can be specified for this command." errorRequestFail = "Error processing command: %s" errorGenesisIDFail = "Error determining kmd folder (%s). Ensure the node is running in %s." errorDirectoryNotExist = "Specified directory '%s' does not exist." // Account infoNoAccounts = "Did not find any account. Please import or create a new one." infoRenamedAccount = "Renamed account '%s' to '%s'" infoImportedKey = "Imported %s" infoImportedNKeys = "Imported %d key%s" infoCreatedNewAccount = "Created new account with address %s" errorNameAlreadyTaken = "The account name '%s' is already taken, please choose another." errorNameDoesntExist = "An account named '%s' does not exist." infoSetAccountToDefault = "Set account '%s' to be the default account" errorSigningTX = "Couldn't sign tx with kmd: %s" errorOnlineTX = "Couldn't sign tx: %s (for multisig accounts, write tx to file and sign manually)" errorConstructingTX = "Couldn't construct tx: %s" errorBroadcastingTX = "Couldn't broadcast tx with algod: %s" warnMultisigDuplicatesDetected = "Warning: one or more duplicate addresses detected in multisig account creation. This will effectively give the duplicated address(es) extra signature weight. Continuing multisig account creation." errLastRoundInvalid = "roundLastValid needs to be well after the current round (%d)" errExistingPartKey = "Account already has a participation key valid at least until roundLastValid (%d) - current is %d" // KMD infoKMDStopped = "Stopped kmd" infoKMDAlreadyStarted = "kmd is already running" infoKMDAlreadyStopped = "kmd doesn't appear to be running" infoKMDStarted = "Successfully started kmd" errorKMDFailedToStart = "Failed to start kmd: %s" errorKMDFailedToStop = "Failed to stop kmd: %s" // Node infoNodeStart = "Algorand node successfully started!" infoNodeAlreadyStarted = "Algorand node was already started!" infoTryingToStopNode = "Trying to stop the node..." infoNodeSuccessfullyStopped = "The node was successfully stopped." infoNodeStatus = "Last committed block: %d\nTime since last block: %s\nSync Time: %s\nLast consensus protocol: %s\nNext consensus protocol: %s\nRound for next consensus protocol: %d\nNext consensus protocol supported: %v" errorNodeNotDetected = "Algorand node does not appear to be running: %s" errorNodeStatus = "Cannot contact Algorand node: %s." errorNodeFailedToStart = "Algorand node failed to start: %s" errorNodeRunning = "Node must be stopped before writing APIToken" errorNodeFailGenToken = "Cannot generate API token: %s" errorKill = "Cannot kill node: %s" errorCloningNode = "Error cloning the node: %s" infoNodeCloned = "Node cloned successfully to: %s" infoNodeWroteToken = "Successfully wrote new API token: %s" infoNodePendingTxnsDescription = "Pending Transactions (Truncated max=%d, Total in pool=%d): " infoNodeNoPendingTxnsDescription = "None" infoDataDir = "[Data Directory: %s]" errLoadingConfig = "Error loading Config file from '%s': %v" // Clerk infoTxIssued = "Sent %d MicroAlgos from account %s to address %s, transaction ID: %s. Fee set to %d" infoTxCommitted = "Transaction %s committed in round %d" infoTxPending = "Transaction %s still pending as of round %d" malformedNote = "Cannot base64-decode note %s: %s" fileReadError = "Cannot read file %s: %s" fileWriteError = "Cannot write file %s: %s" txDecodeError = "Cannot decode transactions from %s: %s" txDupError = "Duplicate transaction %s in %s" txLengthError = "Transaction list length mismatch" txMergeMismatch = "Cannot merge transactions: transaction IDs differ" txMergeError = "Cannot merge signatures: %v" txNoFilesError = "No input filenames specified" soFlagError = "-s is not meaningful without -o" infoRawTxIssued = "Raw transaction ID %s issued" txPoolError = "Transaction %s kicked out of local node pool: %s" infoAutoFeeSet = "Automatically set fee to %d MicroAlgos" loggingNotConfigured = "Remote logging is not currently configured and won't be enabled" loggingNotEnabled = "Remote logging is current disabled" loggingEnabled = "Remote logging is enabled. Node = %s, Guid = %s" infoNetworkAlreadyExists = "Network Root Directory '%s' already exists" errorCreateNetwork = "Error creating private network: %s" infoNetworkCreated = "Network %s created under %s" errorLoadingNetwork = "Error loading deployed network: %s" errorStartingNetwork = "Error starting deployed network: %s" infoNetworkStarted = "Network Started under %s" infoNetworkStopped = "Network Stopped under %s" infoNetworkDeleted = "Network Deleted under %s" // Wallet infoRecoveryPrompt = "Please type your recovery mnemonic below, and hit return when you are done: " infoChoosePasswordPrompt = "Please choose a password for wallet '%s': " infoPasswordConfirmation = "Please confirm the password: " infoCreatingWallet = "Creating wallet..." infoCreatedWallet = "Created wallet '%s'" infoBackupExplanation = "Your new wallet has a backup phrase that can be used for recovery.\nKeeping this backup phrase safe is extremely important.\nWould you like to see it now? (Y/n): " infoPrintedBackupPhrase = "Your backup phrase is printed below.\nKeep this information safe -- never share it with anyone!" infoBackupPhrase = "\n\x1B[32m%s\033[0m" infoNoWallets = "No wallets found. You can create a wallet with `goal wallet new`" errorCouldntCreateWallet = "Couldn't create wallet: %s" errorCouldntInitializeWallet = "Couldn't initialize wallet: %s" errorCouldntExportMDK = "Couldn't export master derivation key: %s" errorCouldntMakeMnemonic = "Couldn't make mnemonic: %s" errorCouldntListWallets = "Couldn't list wallets: %s" errorPasswordConfirmation = "Password confirmation did not match" errorBadMnemonic = "Problem with mnemonic: %s" errorBadRecoveredKey = "Recovered invalid key" errorFailedToReadResponse = "Couldn't read response: %s" errorFailedToReadPassword = "Couldn't read password: %s" // Commands infoPasswordPrompt = "Please enter the password for wallet '%s': " infoSetWalletToDefault = "Set wallet '%s' to be the default wallet" errCouldNotListWallets = "Couldn't list wallets: %s" errNoWallets = "No wallets found. Create a new wallet with `goal wallet new [wallet name]`" errNoDefaultWallet = "No default wallet found. Specify a wallet by name with -w, or set a default with `goal wallet -f [wallet name]" errFindingWallet = "Couldn't find wallet: %s" errWalletNameAmbiguous = "More than one wallet named '%s' exists. Please remove any wallets with the same name from the kmd wallet directory" errWalletIDDuplicate = "More than one wallet with ID '%s' exists. Please remove any wallets with the samd ID from the kmd wallet directory" errGettingWalletName = "Couldn't get wallet name from ID '%s': %s" errWalletNotFound = "Wallet '%s' not found" errDefaultWalletNotFound = "Wallet with ID '%s' not found. Was the default wallet deleted?" errGettingToken = "Couldn't get token for wallet '%s' (ID: %s): %s" )
1
35,239
"Exported key for account"?
algorand-go-algorand
go
@@ -37,6 +37,7 @@ namespace pwiz.Skyline.Util.Extensions { public const string EXT_CSV = ".csv"; // Not L10N public const string EXT_TSV = ".tsv"; // Not L10N + public const string CRLF = "\r\n"; // Not L10N public static string FILTER_CSV {
1
/* * Original author: Brendan MacLean <brendanx .at. u.washington.edu>, * MacCoss Lab, Department of Genome Sciences, UW * * Copyright 2009 University of Washington - Seattle, WA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; using System.Security.Cryptography; using System.Text; using System.Text.RegularExpressions; using pwiz.Common.SystemUtil; using pwiz.Skyline.Properties; namespace pwiz.Skyline.Util.Extensions { /// <summary> /// Extension functions for reading and writing text /// </summary> public static class TextUtil { public const string EXT_CSV = ".csv"; // Not L10N public const string EXT_TSV = ".tsv"; // Not L10N public static string FILTER_CSV { get { return FileDialogFilter(Resources.TextUtil_DESCRIPTION_CSV_CSV__Comma_delimited_, EXT_CSV); } } public static string FILTER_TSV { get { return FileDialogFilter(Resources.TextUtil_DESCRIPTION_TSV_TSV__Tab_delimited_, EXT_TSV); } } public const char SEPARATOR_CSV = ','; // Not L10N public const char SEPARATOR_CSV_INTL = ';'; // International CSV for comma-decimal locales // Not L10N public const char SEPARATOR_TSV = '\t'; // Not L10N public static readonly string SEPARATOR_TSV_STR = SEPARATOR_TSV.ToString(); public const char SEPARATOR_SPACE = ' '; // Not L10N public const string EXCEL_NA = "#N/A"; // Not L10N /// <summary> /// The CSV separator character for the current culture. Like Excel, a comma /// is used unless the decimal separator is a comma. This allows exported CSV /// files to be imported directly into Excel on the same system. /// </summary> public static char CsvSeparator { get { return GetCsvSeparator(LocalizationHelper.CurrentCulture); } } /// <summary> /// The CSV separator character for a given culture. Like Excel, a comma /// is used unless the decimal separator is a comma. This allows exported CSV /// files to be imported directly into Excel on the same system. /// <param name="cultureInfo">The culture for which the separator is requested.</param> /// </summary> public static char GetCsvSeparator(CultureInfo cultureInfo) { return (Equals(SEPARATOR_CSV.ToString(CultureInfo.InvariantCulture), cultureInfo.NumberFormat.NumberDecimalSeparator) ? SEPARATOR_CSV_INTL : SEPARATOR_CSV); } /// <summary> /// Writes a text string as a value in a delimiter-separated value file, ensuring /// that characters are properly escaped. /// </summary> /// <param name="writer">The writer to use for output</param> /// <param name="text">The text value to output</param> /// <param name="separator">The separator being used</param> /// <param name="replace">Optional value for replacing unwanted characters instead of quoting string</param> public static void WriteDsvField(this TextWriter writer, string text, char separator, string replace = null) { writer.Write(text.ToDsvField(separator, replace)); } /// <summary> /// Converts a string to a field that can be safely written to a delimiter-separated value file. /// </summary> /// <param name="text">The text value of the field</param> /// <param name="separator">The separator being used</param> /// <param name="replace">Optional value for replacing unwanted characters instead of quoting string</param> public static string ToDsvField(this string text, char separator, string replace = null) { if (text == null) return string.Empty; var unwanted = new[] { '"', separator, '\r', '\n' }; // Not L10N if (text.IndexOfAny(unwanted) == -1) return text; if (!string.IsNullOrEmpty(replace)) return string.Join(replace, text.Split(unwanted)); return '"' + text.Replace("\"", "\"\"") + '"'; // Not L10N } /// <summary> /// Converts a list of strings to the fields in a comma-separated line that can be safely written to a comma-separated value file. /// </summary> /// <param name="fields">List of fields to be written in the comma-separated line</param> public static string ToCsvLine(this IEnumerable<string> fields) { return fields.ToDsvLine(CsvSeparator); } /// <summary> /// Converts a list of strings to the fields in a delimiter-separated line that can be safely writted to a delimiter-separated value file. /// </summary> /// <param name="fields">List of fields to be written in the delimiter-separated line</param> /// <param name="separator">The separator being used</param> public static string ToDsvLine(this IEnumerable<string> fields, char separator) { var sb = new StringBuilder(); foreach (string field in fields) { if (sb.Length > 0) sb.Append(separator); sb.Append(field.ToDsvField(separator)); } return sb.ToString(); } /// <summary> /// Splits a line of text in comma-separated value format into an array of fields. /// The function correctly handles quotation marks. /// </summary> /// <param name="line">The line to be split into fields</param> /// <returns>An array of field strings</returns> public static string[] ParseCsvFields(this string line) { return line.ParseDsvFields(SEPARATOR_CSV); } /// <summary> /// Splits a line of text in delimiter-separated value format into an array of fields. /// The function correctly handles quotation marks. /// (N.B. our quotation mark handling now differs from the (March 2018) behavior of Excel and Google Spreadsheets /// when dealing with somewhat absurd uses of quotes as found in our tests, but that seems to be OK for general use. /// </summary> /// <param name="line">The line to be split into fields</param> /// <param name="separator">The separator being used</param> /// <returns>An array of field strings</returns> public static string[] ParseDsvFields(this string line, char separator) { var listFields = new List<string>(); var sbField = new StringBuilder(); bool inQuotes = false; for (var chIndex = 0; chIndex < line.Length; chIndex++) { var ch = line[chIndex]; if (inQuotes) { if (ch == '"') // Not L10N { // Is this the closing quote, or is this an escaped quote? if (chIndex + 1 < line.Length && line[chIndex + 1] == '"') { sbField.Append(ch); // Treat "" as an escaped quote chIndex++; // Consume both quotes } else { inQuotes = false; } } else { sbField.Append(ch); } } else if (ch == '"') // Not L10N { if (sbField.Length == 0) // Quote at start of field is special case { inQuotes = true; } else { if (chIndex + 1 < line.Length && line[chIndex + 1] == '"') { sbField.Append(ch); // Treat "" as an escaped quote chIndex++; // Consume both quotes } else { // N.B. we effectively ignore a bare quote in an unquoted string. // This is technically an undefined behavior, so that's probably OK. // Excel and Google sheets treat it as a literal quote, but that // would be a change in our established behavior inQuotes = true; } } } else if (ch == separator) { listFields.Add(sbField.ToString()); sbField.Remove(0, sbField.Length); } else { sbField.Append(ch); } } listFields.Add(sbField.ToString()); return listFields.ToArray(); } /// <summary> /// Converts an invariant format DSV file to a locale-specific DSV file /// </summary> /// <param name="filePath">Path to the original file</param> /// <param name="outPath">Path to write the locale-specific file if necessary</param> /// <param name="headerLine">True if the input file has a header line</param> /// <returns>True if conversion was necessary and the output file was written</returns> public static bool WriteDsvToCsvLocal(string filePath, string outPath, bool headerLine) { if (CsvSeparator == SEPARATOR_CSV) return false; string[] fileLines = File.ReadAllLines(filePath); for (int i = 0; i < fileLines.Length; i++) { string line = fileLines[i]; bool tsv = line.Contains(SEPARATOR_TSV); if (!tsv) line = line.Replace(SEPARATOR_CSV, SEPARATOR_CSV_INTL); if (!headerLine || i > 0) line = ReplaceDecimalPoint(line, tsv); fileLines[i] = line; } File.WriteAllLines(outPath, fileLines); return true; } private static string ReplaceDecimalPoint(string line, bool tsv) { char separator = tsv ? SEPARATOR_TSV : SEPARATOR_CSV_INTL; var fields = line.Split(separator); for (int i = 0; i < fields.Length; i++) { string field = fields[i]; string fieldConverted = field .Replace(CultureInfo.InvariantCulture.NumberFormat.NumberDecimalSeparator, CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator); double fieldValue; // Convert if the field is numeric or contains modifications if (double.TryParse(fieldConverted, out fieldValue) || new Regex(@"\[[+-]\d+\.\d\]").IsMatch(field)) // Not L10N fields[i] = fieldConverted; } return string.Join(separator.ToString(), fields); } /// <summary> /// Parse a list of comma separated integers, as saved to XML. /// </summary> public static int[] ParseInts(string s) { return ArrayUtil.Parse(s, Convert.ToInt32, SEPARATOR_CSV); } /// <summary> /// Puts quotation marks before and after the text passed in /// </summary> public static string Quote(this string text) { return '"' + text + '"'; // Not L10N } /// <summary> /// This function can be used as a replacement for String.Join("\n", ...) /// </summary> /// <param name="lines">A set of strings to be on separate lines</param> /// <returns>A single string containing the original set separated by new lines</returns> public static string LineSeparate(IEnumerable<string> lines) { var sb = new StringBuilder(); foreach (string line in lines) { if (sb.Length > 0) sb.AppendLine(); sb.Append(line); } return sb.ToString(); } /// <summary> /// This function can be used as a replacement for String.Join("\n", ...) /// </summary> /// <param name="lines">A set of strings to be on separate lines</param> /// <returns>A single string containing the original set separated by new lines</returns> public static string LineSeparate(params string[] lines) { return LineSeparate(lines.AsEnumerable()); } /// <summary> /// This function can be used as a replacement for String.Join(" ", ...) /// </summary> /// <param name="values">A set of strings to be separated by spaces</param> /// <returns>A single string containing the original set separated by spaces</returns> public static string SpaceSeparate(IEnumerable<string> values) { var sb = new StringBuilder(); foreach (string value in values) { if (sb.Length > 0) sb.Append(SEPARATOR_SPACE); sb.Append(value); } return sb.ToString(); } /// <summary> /// This function can be used as a replacement for String.Join(" ", ...) /// </summary> /// <param name="values">A set of strings to be separated by spaces</param> /// <returns>A single string containing the original set separated by spaces</returns> public static string SpaceSeparate(params string[] values) { return SpaceSeparate(values.AsEnumerable()); } /// <summary> /// Convert a collection of strings to a TSV line for serialization purposes, /// watching out for tabs, CRLF, and existing escapes /// </summary> public static string ToEscapedTSV(IEnumerable<string> strings) { return string.Join(SEPARATOR_TSV_STR, strings.Select(s => s.EscapeTabAndCrLf())); } /// <summary> /// Create a collection of strings from a TSV line for deserialization purposes, /// watching out for tabs, CRLF, and existing escapes /// </summary> public static string[] FromEscapedTSV(this string str) { var strings = str.Split(SEPARATOR_TSV).Select(s => s.UnescapeTabAndCrLf()); return strings.ToArray(); } /// <summary> /// Convert tab and/or CRLF characters to printable form for serialization purposes /// </summary> public static string EscapeTabAndCrLf(this string str) { var sb = new StringBuilder(); var len = str.Length; for (int pos = 0; pos < len; pos++) { var c = str[pos]; switch (c) { case '\\': // Take care to preserve "c:\tmp" as "c:\\tmp" so it roundtrips properly sb.Append(c); sb.Append(c); break; case SEPARATOR_TSV: sb.Append('\\'); sb.Append('t'); break; case '\n': sb.Append('\\'); sb.Append('n'); break; case '\r': sb.Append('\\'); sb.Append('r'); break; default: sb.Append(c); break; } } return sb.ToString(); } /// <summary> /// Convert tab and/or CRLF characters from printable form for deserialization purposes /// </summary> public static string UnescapeTabAndCrLf(this string str) { var sb = new StringBuilder(); var len = str.Length; for (int pos = 0; pos < len; pos++) { var c = str[pos]; if (c == '\\' && pos < (len-1)) { var cc = str[pos+1]; switch (cc) { case '\\': sb.Append(c); pos++; break; case 't': sb.Append(SEPARATOR_TSV); pos++; break; case 'n': sb.Append('\n'); pos++; break; case 'r': sb.Append('\r'); pos++; break; default: sb.Append(c); break; } } else { sb.Append(c); } } return sb.ToString(); } /// <summary> /// Returns a filter string suitable for a common file dialog (e.g. "CSV (Comma delimited) (*.csv)|*.csv") /// </summary> /// <param name="description">The description of the filter</param> /// <param name="exts">The file extention(s), beginning with the period (e.g. ".csv")</param> public static string FileDialogFilter(string description, params string[] exts) { var sb = new StringBuilder(); foreach (var ext in exts) { if (sb.Length > 0) sb.Append(';'); // Not L10N sb.Append('*').Append(ext); // Not L10N } return string.Format("{0} ({1})|{1}", description, sb); // Not L10N } /// <summary> /// Returns a filter string suitable for a common file dialog (e.g. "CSV (Comma delimited) (*.csv)|*.csv") /// with the All Files filter appended. /// </summary> /// <param name="description">The description of the filter</param> /// <param name="ext">The file extention, beginning with the period (e.g. ".csv")</param> public static string FileDialogFilterAll(string description, string ext) { return FileDialogFiltersAll(FileDialogFilter(description, ext)); } /// <summary> /// Converts a set of file dialog filter strings into a single string containing all filters /// suitable for the Filter property on a common file dialog. /// </summary> /// <param name="filters">Filters to be joined</param> public static string FileDialogFilters(params string[] filters) { return string.Join("|", filters); // Not L10N } /// <summary> /// Converts a set of file dialog filter strings into a single string containing all filters, /// with an "All Files" filter appended, suitable for the Filter property on a common file dialog. /// </summary> /// <param name="filters">Filters to be joined</param> public static string FileDialogFiltersAll(params string[] filters) { var listFilters = filters.ToList(); listFilters.Add(FileDialogFilter(Resources.TextUtil_FileDialogFiltersAll_All_Files, ".*")); // Not L10N return string.Join("|", listFilters); // Not L10N } /// <summary> /// Encrypts a string. This encryption uses the user's (i.e. not machine) key, so it is /// appropriate for strings that are marked with the [UserScopedSetting]. /// It is not appropriate for any setting marked [ApplicationScopedSetting] /// </summary> public static string EncryptString(string str) { return Convert.ToBase64String(ProtectedData.Protect(Encoding.UTF8.GetBytes(str), null, DataProtectionScope.CurrentUser)); } public static string DecryptString(string str) { return Encoding.UTF8.GetString(ProtectedData.Unprotect(Convert.FromBase64String(str), null, DataProtectionScope.CurrentUser)); } /// <summary> /// Get a common prefix, if any, among a set of strings. /// </summary> /// <param name="values">The set of strings to test for a common prefix</param> /// <param name="minLen">Minimum length of the prefix below which empty string will be returned</param> /// <returns>The common prefix or empty string if none is found</returns> public static string GetCommonPrefix(this IEnumerable<string> values, int minLen = 1) { return values.GetCommonFix(minLen, (s, i) => s[i], (s, i) => s.Substring(0, i)); } /// <summary> /// Get a common suffix, if any, among a set of strings. /// </summary> /// <param name="values">The set of strings to test for a common suffix</param> /// <param name="minLen">Minimum length of the suffix below which empty string will be returned</param> /// <returns>The common suffix or empty string if none is found</returns> public static string GetCommonSuffix(this IEnumerable<string> values, int minLen = 1) { return values.GetCommonFix(minLen, (s, i) => s[s.Length - i - 1], (s, i) => s.Substring(s.Length - i)); } private static string GetCommonFix(this IEnumerable<string> values, int minLen, Func<string, int, char> getChar, Func<string, int, string> getSubString) { string commonFix = null; foreach (string value in values) { if (commonFix == null) { commonFix = value; continue; } if (commonFix == string.Empty) { break; } for (int i = 0; i < commonFix.Length; i++) { if (i >= value.Length || getChar(commonFix, i) != getChar(value, i)) { commonFix = getSubString(commonFix, i); break; } } } return commonFix != null && commonFix.Length >= minLen ? commonFix : String.Empty; } } /// <summary> /// Reads a comma-separated variable file, normally assuming the first line contains /// the names of the columns, and all following lines contain data for each column /// When ctor's optional hasHeaders arg == false, then columns are named "0", "1","2","3" etc. /// </summary> public class CsvFileReader : DsvFileReader { public CsvFileReader(string fileName, bool hasHeaders = true) : base(fileName, TextUtil.CsvSeparator, hasHeaders) { } public CsvFileReader(TextReader reader, bool hasHeaders = true) : base(reader, TextUtil.CsvSeparator, hasHeaders) { } } /// <summary> /// Reads a delimiter-separated variable file, normally assuming the first line contains /// the names of the columns, and all following lines contain data for each column. /// When ctor's optional hasHeaders arg == false, then columns are named "0", "1","2","3" etc. /// </summary> public class DsvFileReader { private char _separator; private string[] _currentFields; private string _titleLine; private bool _rereadTitleLine; // set true for first readline if the file didn't actually have a header line private TextReader _reader; public int NumberOfFields { get; private set; } public Dictionary<string, int> FieldDict { get; private set; } public List<string> FieldNames { get; private set; } public DsvFileReader(string fileName, char separator, bool hasHeaders=true) : this(new StreamReader(fileName), separator, hasHeaders) { } public DsvFileReader(TextReader reader, char separator, bool hasHeaders = true) { Initialize(reader, separator, hasHeaders); } public DsvFileReader(TextReader reader, char separator, IReadOnlyDictionary<string, string> headerSynonyms) { Initialize(reader, separator, true, headerSynonyms); } public void Initialize(TextReader reader, char separator, bool hasHeaders = true, IReadOnlyDictionary<string, string> headerSynonyms = null) { _separator = separator; _reader = reader; FieldNames = new List<string>(); FieldDict = new Dictionary<string, int>(); _titleLine = _reader.ReadLine(); // we will re-use this if it's not actually a header line _rereadTitleLine = !hasHeaders; // tells us whether or not to reuse the supposed header line on first read var fields = _titleLine.ParseDsvFields(separator); NumberOfFields = fields.Length; if (!hasHeaders) { // that wasn't really the header line, we just used it to get column count // replace with made up column names for (int i = 0; i < fields.Length; ++i) { fields[i] = string.Format("{0}", i ); // Not L10N } } for (int i = 0; i < fields.Length; ++i) { FieldNames.Add(fields[i]); FieldDict[fields[i]] = i; // Check to see if the given column name is actually a synonym for the internal canonical (no spaces, serialized) name string syn; if (headerSynonyms != null && headerSynonyms.TryGetValue(fields[i], out syn)) { if (!FieldDict.ContainsKey(syn)) { // Note the internal name for this field FieldDict.Add(syn, i); } } } } /// <summary> /// Read a line of text, storing the fields by name for retrieval using GetFieldByName. /// Outputs a list of fields (not indexed by name) /// </summary> /// <returns>Array of fields for the next line</returns> public string[] ReadLine() { var line = _rereadTitleLine?_titleLine:_reader.ReadLine(); // re-use title line on first read if it wasn't actually header info _rereadTitleLine = false; // we no longer need to re-use that first line if (line == null) return null; _currentFields = line.ParseDsvFields(_separator); if (_currentFields.Length != NumberOfFields) { throw new IOException(string.Format(Resources.DsvFileReader_ReadLine_Line__0__has__1__fields_when__2__expected_, line, _currentFields.Length, NumberOfFields)); } return _currentFields; } /// <summary> /// For the current line, outputs the field corresponding to the column name fieldName, or null if /// there is no such field name. /// </summary> /// <param name="fieldName">Title of the column for which to get current line data</param> /// <returns>Field value</returns> public string GetFieldByName(string fieldName) { int fieldIndex = GetFieldIndex(fieldName); return GetFieldByIndex(fieldIndex); } /// <summary> /// For the current line, outputs the field numbered fieldIndex /// </summary> /// <param name="fieldIndex">Index of the field on the current line to be output</param> /// <returns>Field value</returns> public string GetFieldByIndex(int fieldIndex) { return -1 < fieldIndex && fieldIndex < _currentFields.Length ?_currentFields[fieldIndex] : null; } /// <summary> /// Get the index of the field corresponding to the column title fieldName /// </summary> /// <param name="fieldName">Column title.</param> /// <returns>Field index</returns> public int GetFieldIndex(string fieldName) { if (!FieldDict.ContainsKey(fieldName)) return -1; return FieldDict[fieldName]; } /// <summary> /// If loading from a file, use this to dispose the text reader. /// </summary> public void Dispose() { _reader.Dispose(); } } }
1
12,219
Normally we use Environment.NewLine unless you really want it to always be \r\n
ProteoWizard-pwiz
.cs
@@ -697,13 +697,6 @@ func (c *AuRa) verifyFamily(chain consensus.ChainHeaderReader, e consensus.Epoch return nil } -// VerifyHeaders is similar to VerifyHeader, but verifies a batch of headers. The -// method returns a quit channel to abort the operations and a results channel to -// retrieve the async verifications (the order is that of the input slice). -func (c *AuRa) VerifyHeaders(chain consensus.ChainHeaderReader, headers []*types.Header, _ []bool) error { - return nil -} - // VerifyUncles implements consensus.Engine, always returning an error for any // uncles as this consensus mechanism doesn't permit uncles. func (c *AuRa) VerifyUncles(chain consensus.ChainReader, header *types.Header, uncles []*types.Header) error {
1
// Copyright 2017 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package aura import ( "bytes" "container/list" "encoding/json" "fmt" "math/big" "sort" "sync" "time" lru "github.com/hashicorp/golang-lru" "github.com/holiman/uint256" libcommon "github.com/ledgerwatch/erigon-lib/common" "github.com/ledgerwatch/erigon-lib/kv" "github.com/ledgerwatch/erigon/accounts/abi" "github.com/ledgerwatch/erigon/common" "github.com/ledgerwatch/erigon/consensus" "github.com/ledgerwatch/erigon/consensus/aura/aurainterfaces" "github.com/ledgerwatch/erigon/consensus/aura/contracts" "github.com/ledgerwatch/erigon/consensus/clique" "github.com/ledgerwatch/erigon/core/state" "github.com/ledgerwatch/erigon/core/types" "github.com/ledgerwatch/erigon/crypto" "github.com/ledgerwatch/erigon/params" "github.com/ledgerwatch/erigon/rlp" "github.com/ledgerwatch/erigon/rpc" "github.com/ledgerwatch/log/v3" "github.com/ledgerwatch/secp256k1" "go.uber.org/atomic" ) const DEBUG_LOG_FROM = 999_999_999 /* Not implemented features from OS: - two_thirds_majority_transition - because no chains in OE where this is != MaxUint64 - means 1/2 majority used everywhere - emptyStepsTransition - same Repo with solidity sources: https://github.com/poanetwork/posdao-contracts */ type StepDurationInfo struct { TransitionStep uint64 TransitionTimestamp uint64 StepDuration uint64 } // EpochTransitionProof - Holds 2 proofs inside: ValidatorSetProof and FinalityProof type EpochTransitionProof struct { SignalNumber uint64 SetProof []byte FinalityProof []byte } // ValidatorSetProof - validator set proof type ValidatorSetProof struct { Header *types.Header Receipts types.Receipts } // FirstValidatorSetProof state-dependent proofs for the safe contract: // only "first" proofs are such. type FirstValidatorSetProof struct { // TODO: whaaat? here is no state! ContractAddress common.Address Header *types.Header } type EpochTransition struct { /// Block hash at which the transition occurred. BlockHash common.Hash /// Block number at which the transition occurred. BlockNumber uint64 /// "transition/epoch" proof from the engine combined with a finality proof. ProofRlp []byte } type Step struct { calibrate bool // whether calibration is enabled. inner *atomic.Uint64 // Planned durations of steps. durations []StepDurationInfo } func (s *Step) doCalibrate() { if s.calibrate { if !s.optCalibrate() { ctr := s.inner.Load() panic(fmt.Errorf("step counter under- or overflow: %d", ctr)) } } } // optCalibrate Calibrates the AuRa step number according to the current time. func (s *Step) optCalibrate() bool { now := time.Now().Second() var info StepDurationInfo i := 0 for _, d := range s.durations { if d.TransitionTimestamp >= uint64(now) { break } info = d i++ } if i == 0 { panic("durations cannot be empty") } if uint64(now) < info.TransitionTimestamp { return false } newStep := (uint64(now)-info.TransitionTimestamp)/info.StepDuration + info.TransitionStep s.inner.Store(newStep) return true } type PermissionedStep struct { inner *Step canPropose *atomic.Bool } type ReceivedStepHashes map[uint64]map[common.Address]common.Hash //BTreeMap<(u64, Address), H256> //nolint func (r ReceivedStepHashes) get(step uint64, author common.Address) (common.Hash, bool) { res, ok := r[step] if !ok { return common.Hash{}, false } result, ok := res[author] return result, ok } //nolint func (r ReceivedStepHashes) insert(step uint64, author common.Address, blockHash common.Hash) { res, ok := r[step] if !ok { res = map[common.Address]common.Hash{} r[step] = res } res[author] = blockHash } //nolint func (r ReceivedStepHashes) dropAncient(step uint64) { for i := range r { if i < step { delete(r, i) } } } //nolint type EpochManager struct { epochTransitionHash common.Hash // H256, epochTransitionNumber uint64 // BlockNumber finalityChecker *RollingFinality force bool } func NewEpochManager() *EpochManager { return &EpochManager{ finalityChecker: NewRollingFinality([]common.Address{}), force: true, } } func (e *EpochManager) noteNewEpoch() { e.force = true } // zoomValidators - Zooms to the epoch after the header with the given hash. Returns true if succeeded, false otherwise. // It's analog of zoom_to_after function in OE, but doesn't require external locking //nolint func (e *EpochManager) zoomToAfter(chain consensus.ChainHeaderReader, er consensus.EpochReader, validators ValidatorSet, hash common.Hash, call consensus.SystemCall) (*RollingFinality, uint64, bool) { var lastWasParent bool if e.finalityChecker.lastPushed != nil { lastWasParent = *e.finalityChecker.lastPushed == hash } // early exit for current target == chain head, but only if the epochs are // the same. if lastWasParent && !e.force { return e.finalityChecker, e.epochTransitionNumber, true } e.force = false // epoch_transition_for can be an expensive call, but in the absence of // forks it will only need to be called for the block directly after // epoch transition, in which case it will be O(1) and require a single // DB lookup. lastTransition, ok := epochTransitionFor2(chain, er, hash) if !ok { if lastTransition.BlockNumber > DEBUG_LOG_FROM { fmt.Printf("zoom1: %d\n", lastTransition.BlockNumber) } return e.finalityChecker, e.epochTransitionNumber, false } // extract other epoch set if it's not the same as the last. if lastTransition.BlockHash != e.epochTransitionHash { proof := &EpochTransitionProof{} if err := rlp.DecodeBytes(lastTransition.ProofRlp, proof); err != nil { panic(err) } first := proof.SignalNumber == 0 if lastTransition.BlockNumber > DEBUG_LOG_FROM { fmt.Printf("zoom2: %d,%d\n", lastTransition.BlockNumber, len(proof.SetProof)) } // use signal number so multi-set first calculation is correct. list, _, err := validators.epochSet(first, proof.SignalNumber, proof.SetProof, call) if err != nil { panic(fmt.Errorf("proof produced by this engine is invalid: %w", err)) } epochSet := list.validators log.Trace("[aura] Updating finality checker with new validator set extracted from epoch", "num", lastTransition.BlockNumber) e.finalityChecker = NewRollingFinality(epochSet) if proof.SignalNumber >= DEBUG_LOG_FROM { fmt.Printf("new rolling finality: %d\n", proof.SignalNumber) for i := 0; i < len(epochSet); i++ { fmt.Printf("\t%x\n", epochSet[i]) } } } e.epochTransitionHash = lastTransition.BlockHash e.epochTransitionNumber = lastTransition.BlockNumber return e.finalityChecker, e.epochTransitionNumber, true } /// Get the transition to the epoch the given parent hash is part of /// or transitions to. /// This will give the epoch that any children of this parent belong to. /// /// The block corresponding the the parent hash must be stored already. //nolint func epochTransitionFor2(chain consensus.ChainHeaderReader, e consensus.EpochReader, parentHash common.Hash) (transition EpochTransition, ok bool) { //TODO: probably this version of func doesn't support non-canonical epoch transitions h := chain.GetHeaderByHash(parentHash) if h == nil { return transition, false } num, hash, transitionProof, err := e.FindBeforeOrEqualNumber(h.Number.Uint64()) if err != nil { panic(err) } if transitionProof == nil { panic("genesis epoch transition must already be set") } return EpochTransition{BlockNumber: num, BlockHash: hash, ProofRlp: transitionProof}, true } //nolint func epochTransitionFor(chain consensus.ChainHeaderReader, e consensus.EpochReader, parentHash common.Hash) (transition EpochTransition, ok bool) { // slow path: loop back block by block for { h := chain.GetHeaderByHash(parentHash) if h == nil { return transition, false } // look for transition in database. transitionProof, err := e.GetEpoch(h.Hash(), h.Number.Uint64()) if err != nil { panic(err) } if transitionProof != nil { return EpochTransition{ BlockNumber: h.Number.Uint64(), BlockHash: h.Hash(), ProofRlp: transitionProof, }, true } // canonical hash -> fast breakout: // get the last epoch transition up to this block. // // if `block_hash` is canonical it will only return transitions up to // the parent. canonical := chain.GetHeaderByNumber(h.Number.Uint64()) if canonical == nil { return transition, false } //nolint if canonical.Hash() == parentHash { return EpochTransition{ BlockNumber: 0, BlockHash: common.HexToHash("0x5b28c1bfd3a15230c9a46b399cd0f9a6920d432e85381cc6a140b06e8410112f"), ProofRlp: params.SokolGenesisEpochProof, }, true /* TODO: return self .epoch_transitions() .map(|(_, t)| t) .take_while(|t| t.block_number <= details.number) .last(); */ } parentHash = h.Hash() } } // AuRa //nolint type AuRa struct { db kv.RwDB // Database to store and retrieve snapshot checkpoints exitCh chan struct{} lock sync.RWMutex // Protects the signer fields step PermissionedStep // History of step hashes recently received from peers. receivedStepHashes ReceivedStepHashes OurSigningAddress common.Address // Same as Etherbase in Mining cfg AuthorityRoundParams EmptyStepsSet *EmptyStepSet EpochManager *EpochManager // Mutex<EpochManager>, //Validators ValidatorSet //ValidateScoreTransition uint64 //ValidateStepTransition uint64 //immediateTransitions bool //blockReward map[uint64]*uint256.Int //blockRewardContractTransitions BlockRewardContractList //maximumUncleCountTransition uint64 //maximumUncleCount uint //maximumEmptySteps uint ////machine: EthereumMachine, //// If set, enables random number contract integration. It maps the transition block to the contract address. //randomnessContractAddress map[uint64]common.Address //// The addresses of contracts that determine the block gas limit. //blockGasLimitContractTransitions map[uint64]common.Address //// Memoized gas limit overrides, by block hash. //gasLimitOverrideCache *GasLimitOverride //Mutex<LruCache<H256, Option<U256>>>, //// The block number at which the consensus engine switches from AuRa to AuRa with POSDAO //// modifications. For details about POSDAO, see the whitepaper: //// https://www.xdaichain.com/for-validators/posdao-whitepaper //posdaoTransition *uint64 // Option<BlockNumber>, } type GasLimitOverride struct { cache *lru.Cache } func NewGasLimitOverride() *GasLimitOverride { // The number of recent block hashes for which the gas limit override is memoized. const GasLimitOverrideCacheCapacity = 10 cache, err := lru.New(GasLimitOverrideCacheCapacity) if err != nil { panic("error creating prefetching cache for blocks") } return &GasLimitOverride{cache: cache} } func (pb *GasLimitOverride) Pop(hash common.Hash) *uint256.Int { if val, ok := pb.cache.Get(hash); ok && val != nil { pb.cache.Remove(hash) if v, ok := val.(*uint256.Int); ok { return v } } return nil } func (pb *GasLimitOverride) Add(hash common.Hash, b *uint256.Int) { if b == nil { return } pb.cache.ContainsOrAdd(hash, b) } func NewAuRa(config *params.AuRaConfig, db kv.RwDB, ourSigningAddress common.Address, engineParamsJson []byte) (*AuRa, error) { spec := JsonSpec{} err := json.Unmarshal(engineParamsJson, &spec) if err != nil { return nil, err } auraParams, err := FromJson(spec) if err != nil { return nil, err } if _, ok := auraParams.StepDurations[0]; !ok { return nil, fmt.Errorf("authority Round step 0 duration is undefined") } for _, v := range auraParams.StepDurations { if v == 0 { return nil, fmt.Errorf("authority Round step 0 duration is undefined") } } if _, ok := auraParams.StepDurations[0]; !ok { return nil, fmt.Errorf("authority Round step duration cannot be 0") } //shouldTimeout := auraParams.StartStep == nil initialStep := uint64(0) if auraParams.StartStep != nil { initialStep = *auraParams.StartStep } var durations []StepDurationInfo durInfo := StepDurationInfo{ TransitionStep: 0, TransitionTimestamp: 0, StepDuration: auraParams.StepDurations[0], } durations = append(durations, durInfo) var i = 0 for time, dur := range auraParams.StepDurations { if i == 0 { // skip first i++ continue } step, t, ok := nextStepTimeDuration(durInfo, time) if !ok { return nil, fmt.Errorf("timestamp overflow") } durInfo.TransitionStep = step durInfo.TransitionTimestamp = t durInfo.StepDuration = dur durations = append(durations, durInfo) } step := &Step{ inner: atomic.NewUint64(initialStep), calibrate: auraParams.StartStep == nil, durations: durations, } step.doCalibrate() /* let engine = Arc::new(AuthorityRound { epoch_manager: Mutex::new(EpochManager::blank()), received_step_hashes: RwLock::new(Default::default()), gas_limit_override_cache: Mutex::new(LruCache::new(GAS_LIMIT_OVERRIDE_CACHE_CAPACITY)), }) // Do not initialize timeouts for tests. if should_timeout { let handler = TransitionHandler { step: engine.step.clone(), client: engine.client.clone(), }; engine .transition_service .register_handler(Arc::new(handler))?; } */ exitCh := make(chan struct{}) c := &AuRa{ db: db, exitCh: exitCh, step: PermissionedStep{inner: step, canPropose: atomic.NewBool(true)}, OurSigningAddress: ourSigningAddress, cfg: auraParams, receivedStepHashes: ReceivedStepHashes{}, EpochManager: NewEpochManager(), } _ = config return c, nil } // A helper accumulator function mapping a step duration and a step duration transition timestamp // to the corresponding step number and the correct starting second of the step. func nextStepTimeDuration(info StepDurationInfo, time uint64) (uint64, uint64, bool) { stepDiff := time + info.StepDuration if stepDiff < 1 { return 0, 0, false } stepDiff -= 1 if stepDiff < info.TransitionTimestamp { return 0, 0, false } stepDiff -= info.TransitionTimestamp if info.StepDuration == 0 { return 0, 0, false } stepDiff /= info.StepDuration timeDiff := stepDiff * info.StepDuration return info.TransitionStep + stepDiff, info.TransitionTimestamp + timeDiff, true } // Author implements consensus.Engine, returning the Ethereum address recovered // from the signature in the header's extra-data section. func (c *AuRa) Author(header *types.Header) (common.Address, error) { /* let message = keccak(empty_step_rlp(self.step, &self.parent_hash)); let public = publickey::recover(&self.signature.into(), &message)?; Ok(publickey::public_to_address(&public)) */ return header.Coinbase, nil } // VerifyHeader checks whether a header conforms to the consensus rules. func (c *AuRa) VerifyHeader(chain consensus.ChainHeaderReader, header *types.Header, _ bool) error { return nil } //nolint func (c *AuRa) hasReceivedStepHashes(step uint64, author common.Address, newHash common.Hash) bool { /* self .received_step_hashes .read() .get(&received_step_key) .map_or(false, |h| *h != new_hash) */ return false } //nolint func (c *AuRa) insertReceivedStepHashes(step uint64, author common.Address, newHash common.Hash) { /* self.received_step_hashes .write() .insert(received_step_key, new_hash); */ } //nolint func (c *AuRa) verifyFamily(chain consensus.ChainHeaderReader, e consensus.EpochReader, header *types.Header, call consensus.Call, syscall consensus.SystemCall) error { // TODO: I call it from Initialize - because looks like no much reason to have separated "verifyFamily" call //nolint step, err := headerStep(header) if err != nil { return err } parent := chain.GetHeader(header.ParentHash, header.Number.Uint64()-1) //nolint parentStep, err := headerStep(parent) if err != nil { return err } //nolint validators, setNumber, err := c.epochSet(chain, e, header, syscall) if err != nil { return err } return nil // Ensure header is from the step after parent. //nolint if step == parentStep || (header.Number.Uint64() >= c.cfg.ValidateStepTransition && step <= parentStep) { log.Trace("[aura] Multiple blocks proposed for step", "num", parentStep) _ = setNumber /* self.validators.report_malicious( header.author(), set_number, header.number(), Default::default(), ); Err(EngineError::DoubleVote(*header.author()))?; */ return fmt.Errorf("double vote: %x", header.Coinbase) } // Report malice if the validator produced other sibling blocks in the same step. if !c.hasReceivedStepHashes(step, header.Coinbase, header.Hash()) { /* trace!(target: "engine", "Validator {} produced sibling blocks in the same step", header.author()); self.validators.report_malicious( header.author(), set_number, header.number(), Default::default(), ); */ } else { c.insertReceivedStepHashes(step, header.Coinbase, header.Hash()) } // Remove hash records older than two full rounds of steps (picked as a reasonable trade-off between // memory consumption and fault-tolerance). cnt, err := count(validators, parent.Hash(), call) if err != nil { return err } siblingMaliceDetectionPeriod := 2 * cnt oldestStep := uint64(0) // let oldest_step = parent_step.saturating_sub(sibling_malice_detection_period); if parentStep > siblingMaliceDetectionPeriod { oldestStep = parentStep - siblingMaliceDetectionPeriod } //nolint if oldestStep > 0 { /* let mut rsh = self.received_step_hashes.write(); let new_rsh = rsh.split_off(&(oldest_step, Address::zero())); *rsh = new_rsh; */ } emptyStepLen := uint64(0) //self.report_skipped(header, step, parent_step, &*validators, set_number); /* // If empty step messages are enabled we will validate the messages in the seal, missing messages are not // reported as there's no way to tell whether the empty step message was never sent or simply not included. let empty_steps_len = if header.number() >= self.empty_steps_transition { let validate_empty_steps = || -> Result<usize, Error> { let strict_empty_steps = header.number() >= self.strict_empty_steps_transition; let empty_steps = header_empty_steps(header)?; let empty_steps_len = empty_steps.len(); let mut prev_empty_step = 0; for empty_step in empty_steps { if empty_step.step <= parent_step || empty_step.step >= step { Err(EngineError::InsufficientProof(format!( "empty step proof for invalid step: {:?}", empty_step.step )))?; } if empty_step.parent_hash != *header.parent_hash() { Err(EngineError::InsufficientProof(format!( "empty step proof for invalid parent hash: {:?}", empty_step.parent_hash )))?; } if !empty_step.verify(&*validators).unwrap_or(false) { Err(EngineError::InsufficientProof(format!( "invalid empty step proof: {:?}", empty_step )))?; } if strict_empty_steps { if empty_step.step <= prev_empty_step { Err(EngineError::InsufficientProof(format!( "{} empty step: {:?}", if empty_step.step == prev_empty_step { "duplicate" } else { "unordered" }, empty_step )))?; } prev_empty_step = empty_step.step; } } Ok(empty_steps_len) }; match validate_empty_steps() { Ok(len) => len, Err(err) => { trace!( target: "engine", "Reporting benign misbehaviour (cause: invalid empty steps) \ at block #{}, epoch set number {}. Own address: {}", header.number(), set_number, self.address().unwrap_or_default() ); self.validators .report_benign(header.author(), set_number, header.number()); return Err(err); } } } else { self.report_skipped(header, step, parent_step, &*validators, set_number); 0 }; */ if header.Number.Uint64() >= c.cfg.ValidateScoreTransition { expectedDifficulty := calculateScore(parentStep, step, emptyStepLen) if header.Difficulty.Cmp(expectedDifficulty.ToBig()) != 0 { return fmt.Errorf("invlid difficulty: expect=%s, found=%s\n", expectedDifficulty, header.Difficulty) } } return nil } // VerifyHeaders is similar to VerifyHeader, but verifies a batch of headers. The // method returns a quit channel to abort the operations and a results channel to // retrieve the async verifications (the order is that of the input slice). func (c *AuRa) VerifyHeaders(chain consensus.ChainHeaderReader, headers []*types.Header, _ []bool) error { return nil } // VerifyUncles implements consensus.Engine, always returning an error for any // uncles as this consensus mechanism doesn't permit uncles. func (c *AuRa) VerifyUncles(chain consensus.ChainReader, header *types.Header, uncles []*types.Header) error { return nil //if len(uncles) > 0 { // return errors.New("uncles not allowed") //} //return nil } // VerifySeal implements consensus.Engine, checking whether the signature contained // in the header satisfies the consensus protocol requirements. func (c *AuRa) VerifySeal(chain consensus.ChainHeaderReader, header *types.Header) error { return nil //snap, err := c.Snapshot(chain, header.Number.Uint64(), header.Hash(), nil) //if err != nil { // return err //} //return c.verifySeal(chain, header, snap) } // Prepare implements consensus.Engine, preparing all the consensus fields of the // header for running the transactions on top. func (c *AuRa) Prepare(chain consensus.ChainHeaderReader, header *types.Header) error { return nil /// If the block isn't a checkpoint, cast a random vote (good enough for now) //header.Coinbase = common.Address{} //header.Nonce = types.BlockNonce{} // //number := header.Number.Uint64() /// Assemble the voting snapshot to check which votes make sense //snap, err := c.Snapshot(chain, number-1, header.ParentHash, nil) //if err != nil { // return err //} //if number%c.config.Epoch != 0 { // c.lock.RLock() // // // Gather all the proposals that make sense voting on // addresses := make([]common.Address, 0, len(c.proposals)) // for address, authorize := range c.proposals { // if snap.validVote(address, authorize) { // addresses = append(addresses, address) // } // } // // If there's pending proposals, cast a vote on them // if len(addresses) > 0 { // header.Coinbase = addresses[rand.Intn(len(addresses))] // if c.proposals[header.Coinbase] { // copy(header.Nonce[:], NonceAuthVote) // } else { // copy(header.Nonce[:], nonceDropVote) // } // } // c.lock.RUnlock() //} /// Set the correct difficulty //header.Difficulty = calcDifficulty(snap, c.signer) // /// Ensure the extra data has all its components //if len(header.Extra) < ExtraVanity { // header.Extra = append(header.Extra, bytes.Repeat([]byte{0x00}, ExtraVanity-len(header.Extra))...) //} //header.Extra = header.Extra[:ExtraVanity] // //if number%c.config.Epoch == 0 { // for _, signer := range snap.GetSigners() { // header.Extra = append(header.Extra, signer[:]...) // } //} //header.Extra = append(header.Extra, make([]byte, ExtraSeal)...) // /// Mix digest is reserved for now, set to empty //header.MixDigest = common.Hash{} // /// Ensure the timestamp has the correct delay //parent := chain.GetHeader(header.ParentHash, number-1) //if parent == nil { // return consensus.ErrUnknownAncestor //} //header.Time = parent.Time + c.config.Period // //now := uint64(time.Now().Unix()) //if header.Time < now { // header.Time = now //} // //return nil } func (c *AuRa) Initialize(config *params.ChainConfig, chain consensus.ChainHeaderReader, e consensus.EpochReader, header *types.Header, txs []types.Transaction, uncles []*types.Header, syscall consensus.SystemCall) { //TODO: hardcoded boolean!!! // let is_epoch_begin = chain.epoch_transition(parent.number(), *header.parent_hash()).is_some(); if header.Number.Uint64() == 1 { proof, err := c.GenesisEpochData(header, syscall) if err != nil { panic(err) } err = e.PutEpoch(header.ParentHash, 0, proof) //TODO: block 0 hardcoded - need fix it inside validators if err != nil { panic(err) } } //if err := c.verifyFamily(chain, e, header, call, syscall); err != nil { //TODO: OE has it as a separate engine call? why? // panic(err) //} // check_and_lock_block -> check_epoch_end_signal epoch, err := e.GetEpoch(header.ParentHash, header.Number.Uint64()-1) if err != nil { log.Warn("[aura] initialize block: on epoch begin", "err", err) return } isEpochBegin := epoch != nil if !isEpochBegin { return } err = c.cfg.Validators.onEpochBegin(isEpochBegin, header, syscall) if err != nil { log.Warn("[aura] initialize block: on epoch begin", "err", err) return } // check_and_lock_block -> check_epoch_end_signal END (before enact) } //word `signal epoch` == word `pending epoch` func (c *AuRa) Finalize(config *params.ChainConfig, header *types.Header, state *state.IntraBlockState, txs []types.Transaction, uncles []*types.Header, r types.Receipts, e consensus.EpochReader, chain consensus.ChainHeaderReader, syscall consensus.SystemCall) error { // accumulateRewards retrieves rewards for a block and applies them to the coinbase accounts for miner and uncle miners beneficiaries, _, rewards, err := AccumulateRewards(config, c, header, uncles, syscall) if err != nil { return fmt.Errorf("buildAncestrySubChain: %w", err) } for i := range beneficiaries { //fmt.Printf("beneficiary: n=%d, %x,%d\n", header.Number.Uint64(), beneficiaries[i], rewards[i]) state.AddBalance(beneficiaries[i], rewards[i]) } // check_and_lock_block -> check_epoch_end_signal (after enact) if header.Number.Uint64() >= DEBUG_LOG_FROM { fmt.Printf("finalize1: %d,%d\n", header.Number.Uint64(), len(r)) } pendingTransitionProof, err := c.cfg.Validators.signalEpochEnd(header.Number.Uint64() == 0, header, r) if err != nil { return err } if pendingTransitionProof != nil { if header.Number.Uint64() >= DEBUG_LOG_FROM { fmt.Printf("insert_pending_trancition: %d,receipts=%d, lenProof=%d\n", header.Number.Uint64(), len(r), len(pendingTransitionProof)) } if err = e.PutPendingEpoch(header.Hash(), header.Number.Uint64(), pendingTransitionProof); err != nil { return err } } // check_and_lock_block -> check_epoch_end_signal END finalized := buildFinality(c.EpochManager, chain, e, c.cfg.Validators, header, syscall) c.EpochManager.finalityChecker.print(header.Number.Uint64()) epochEndProof, err := isEpochEnd(chain, e, finalized, header) if err != nil { return err } if epochEndProof != nil { c.EpochManager.noteNewEpoch() log.Info("[aura] epoch transition", "block_num", header.Number.Uint64()) if err := e.PutEpoch(header.Hash(), header.Number.Uint64(), epochEndProof); err != nil { return err } } return nil } func buildFinality(e *EpochManager, chain consensus.ChainHeaderReader, er consensus.EpochReader, validators ValidatorSet, header *types.Header, syscall consensus.SystemCall) []unAssembledHeader { // commit_block -> aura.build_finality _, _, ok := e.zoomToAfter(chain, er, validators, header.ParentHash, syscall) if !ok { return []unAssembledHeader{} } if e.finalityChecker.lastPushed == nil || *e.finalityChecker.lastPushed != header.ParentHash { if err := e.finalityChecker.buildAncestrySubChain(func(hash common.Hash) ([]common.Address, common.Hash, common.Hash, uint64, bool) { h := chain.GetHeaderByHash(hash) if h == nil { return nil, common.Hash{}, common.Hash{}, 0, false } return []common.Address{h.Coinbase}, h.Hash(), h.ParentHash, h.Number.Uint64(), true }, header.ParentHash, e.epochTransitionHash); err != nil { //log.Warn("[aura] buildAncestrySubChain", "err", err) return []unAssembledHeader{} } } res, err := e.finalityChecker.push(header.Hash(), header.Number.Uint64(), []common.Address{header.Coinbase}) if err != nil { //log.Warn("[aura] finalityChecker.push", "err", err) return []unAssembledHeader{} } return res } func isEpochEnd(chain consensus.ChainHeaderReader, e consensus.EpochReader, finalized []unAssembledHeader, header *types.Header) ([]byte, error) { // commit_block -> aura.is_epoch_end for i := range finalized { pendingTransitionProof, err := e.GetPendingEpoch(finalized[i].hash, finalized[i].number) if err != nil { return nil, err } if pendingTransitionProof == nil { continue } if header.Number.Uint64() >= DEBUG_LOG_FROM { fmt.Printf("pending transition: %d,%x,len=%d\n", finalized[i].number, finalized[i].hash, len(pendingTransitionProof)) } finalityProof := allHeadersUntil(chain, header, finalized[i].hash) var finalizedHeader *types.Header if finalized[i].hash == header.Hash() { finalizedHeader = header } else { finalizedHeader = chain.GetHeader(finalized[i].hash, finalized[i].number) } signalNumber := finalizedHeader.Number finalityProof = append(finalityProof, finalizedHeader) for i, j := 0, len(finalityProof)-1; i < j; i, j = i+1, j-1 { // reverse finalityProof[i], finalityProof[j] = finalityProof[j], finalityProof[i] } finalityProofRLP, err := rlp.EncodeToBytes(finalityProof) if err != nil { return nil, err } /* // We turn off can_propose here because upon validator set change there can // be two valid proposers for a single step: one from the old set and // one from the new. // // This way, upon encountering an epoch change, the proposer from the // new set will be forced to wait until the next step to avoid sealing a // block that breaks the invariant that the parent's step < the block's step. self.step.can_propose.store(false, AtomicOrdering::SeqCst); */ return rlp.EncodeToBytes(EpochTransitionProof{SignalNumber: signalNumber.Uint64(), SetProof: pendingTransitionProof, FinalityProof: finalityProofRLP}) } return nil, nil } // allHeadersUntil walk the chain backwards from current head until finalized_hash // to construct transition proof. author == ec_recover(sig) known // since the blocks are in the DB. func allHeadersUntil(chain consensus.ChainHeaderReader, from *types.Header, to common.Hash) (out []*types.Header) { var header = from for { header = chain.GetHeader(header.ParentHash, header.Number.Uint64()-1) if header == nil { panic("not found header") } if header.Number.Uint64() == 0 { break } if to == header.Hash() { break } out = append(out, header) } return out } //func (c *AuRa) check_epoch_end(cc *params.ChainConfig, header *types.Header, state *state.IntraBlockState, txs []types.Transaction, uncles []*types.Header, syscall consensus.SystemCall) { //} // FinalizeAndAssemble implements consensus.Engine func (c *AuRa) FinalizeAndAssemble(chainConfig *params.ChainConfig, header *types.Header, state *state.IntraBlockState, txs []types.Transaction, uncles []*types.Header, r types.Receipts, e consensus.EpochReader, chain consensus.ChainHeaderReader, syscall consensus.SystemCall, call consensus.Call) (*types.Block, error) { c.Finalize(chainConfig, header, state, txs, uncles, r, e, chain, syscall) // Assemble and return the final block for sealing return types.NewBlock(header, txs, uncles, r), nil } // Authorize injects a private key into the consensus engine to mint new blocks // with. func (c *AuRa) Authorize(signer common.Address, signFn clique.SignerFn) { c.lock.Lock() defer c.lock.Unlock() //c.signer = signer //c.signFn = signFn } func (c *AuRa) GenesisEpochData(header *types.Header, caller consensus.SystemCall) ([]byte, error) { setProof, err := c.cfg.Validators.genesisEpochData(header, caller) if err != nil { return nil, err } res, err := rlp.EncodeToBytes(EpochTransitionProof{SignalNumber: 0, SetProof: setProof, FinalityProof: []byte{}}) if err != nil { panic(err) } //fmt.Printf("reere: %x\n", res) //f91a84f9020da00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a0fad4af258fd11939fae0c6c6eec9d340b1caac0b0196fd9a1bc3f489c5bf00b3a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200008083663be080808080b8410000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f91871b914c26060604052600436106100fc576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806303aca79214610101578063108552691461016457806340a141ff1461019d57806340c9cdeb146101d65780634110a489146101ff57806345199e0a1461025757806349285b58146102c15780634d238c8e14610316578063752862111461034f578063900eb5a8146103645780639a573786146103c7578063a26a47d21461041c578063ae4b1b5b14610449578063b3f05b971461049e578063b7ab4db5146104cb578063d3e848f114610535578063fa81b2001461058a578063facd743b146105df575b600080fd5b341561010c57600080fd5b6101226004808035906020019091905050610630565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b341561016f57600080fd5b61019b600480803573ffffffffffffffffffffffffffffffffffffffff1690602001909190505061066f565b005b34156101a857600080fd5b6101d4600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050610807565b005b34156101e157600080fd5b6101e9610bb7565b6040518082815260200191505060405180910390f35b341561020a57600080fd5b610236600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050610bbd565b60405180831515151581526020018281526020019250505060405180910390f35b341561026257600080fd5b61026a610bee565b6040518080602001828103825283818151815260200191508051906020019060200280838360005b838110156102ad578082015181840152602081019050610292565b505050509050019250505060405180910390f35b34156102cc57600080fd5b6102d4610c82565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b341561032157600080fd5b61034d600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050610d32565b005b341561035a57600080fd5b610362610fcc565b005b341561036f57600080fd5b61038560048080359060200190919050506110fc565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b34156103d257600080fd5b6103da61113b565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b341561042757600080fd5b61042f6111eb565b604051808215151515815260200191505060405180910390f35b341561045457600080fd5b61045c6111fe565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b34156104a957600080fd5b6104b1611224565b604051808215151515815260200191505060405180910390f35b34156104d657600080fd5b6104de611237565b6040518080602001828103825283818151815260200191508051906020019060200280838360005b83811015610521578082015181840152602081019050610506565b505050509050019250505060405180910390f35b341561054057600080fd5b6105486112cb565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b341561059557600080fd5b61059d6112f1565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b34156105ea57600080fd5b610616600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050611317565b604051808215151515815260200191505060405180910390f35b60078181548110151561063f57fe5b90600052602060002090016000915054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b600460029054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff161415156106cb57600080fd5b600460019054906101000a900460ff161515156106e757600080fd5b600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff161415151561072357600080fd5b80600a60006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506001600460016101000a81548160ff0219169083151502179055507f600bcf04a13e752d1e3670a5a9f1c21177ca2a93c6f5391d4f1298d098097c22600a60009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a150565b600080600061081461113b565b73ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff1614151561084d57600080fd5b83600960008273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060000160009054906101000a900460ff1615156108a957600080fd5b600960008673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600101549350600160078054905003925060078381548110151561090857fe5b906000526020600020900160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1691508160078581548110151561094657fe5b906000526020600020900160006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555083600960008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600101819055506007838154811015156109e557fe5b906000526020600020900160006101000a81549073ffffffffffffffffffffffffffffffffffffffff02191690556000600780549050111515610a2757600080fd5b6007805480919060019003610a3c9190611370565b506000600960008773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600101819055506000600960008773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060000160006101000a81548160ff0219169083151502179055506000600460006101000a81548160ff0219169083151502179055506001430340600019167f55252fa6eee4741b4e24a74a70e9c11fd2c2281df8d6ea13126ff845f7825c89600760405180806020018281038252838181548152602001915080548015610ba257602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311610b58575b50509250505060405180910390a25050505050565b60085481565b60096020528060005260406000206000915090508060000160009054906101000a900460ff16908060010154905082565b610bf661139c565b6007805480602002602001604051908101604052809291908181526020018280548015610c7857602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311610c2e575b5050505050905090565b6000600a60009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff166349285b586000604051602001526040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b1515610d1257600080fd5b6102c65a03f11515610d2357600080fd5b50505060405180519050905090565b610d3a61113b565b73ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16141515610d7357600080fd5b80600960008273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060000160009054906101000a900460ff16151515610dd057600080fd5b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614151515610e0c57600080fd5b6040805190810160405280600115158152602001600780549050815250600960008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008201518160000160006101000a81548160ff0219169083151502179055506020820151816001015590505060078054806001018281610ea991906113b0565b9160005260206000209001600084909190916101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550506000600460006101000a81548160ff0219169083151502179055506001430340600019167f55252fa6eee4741b4e24a74a70e9c11fd2c2281df8d6ea13126ff845f7825c89600760405180806020018281038252838181548152602001915080548015610fba57602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311610f70575b50509250505060405180910390a25050565b600560009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff161480156110365750600460009054906101000a900460ff16155b151561104157600080fd5b6001600460006101000a81548160ff0219169083151502179055506007600690805461106e9291906113dc565b506006805490506008819055507f8564cd629b15f47dc310d45bcbfc9bcf5420b0d51bf0659a16c67f91d27632536110a4611237565b6040518080602001828103825283818151815260200191508051906020019060200280838360005b838110156110e75780820151818401526020810190506110cc565b505050509050019250505060405180910390a1565b60068181548110151561110b57fe5b90600052602060002090016000915054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b6000600a60009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16639a5737866000604051602001526040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b15156111cb57600080fd5b6102c65a03f115156111dc57600080fd5b50505060405180519050905090565b600460019054906101000a900460ff1681565b600a60009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b600460009054906101000a900460ff1681565b61123f61139c565b60068054806020026020016040519081016040528092919081815260200182805480156112c157602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311611277575b5050505050905090565b600560009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b600460029054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b6000600960008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060000160009054906101000a900460ff169050919050565b81548183558181151161139757818360005260206000209182019101611396919061142e565b5b505050565b602060405190810160405280600081525090565b8154818355818115116113d7578183600052602060002091820191016113d6919061142e565b5b505050565b82805482825590600052602060002090810192821561141d5760005260206000209182015b8281111561141c578254825591600101919060010190611401565b5b50905061142a9190611453565b5090565b61145091905b8082111561144c576000816000905550600101611434565b5090565b90565b61149391905b8082111561148f57600081816101000a81549073ffffffffffffffffffffffffffffffffffffffff021916905550600101611459565b5090565b905600a165627a7a7230582036ea35935c8246b68074adece2eab70c40e69a0193c08a6277ce06e5b25188510029b8f3f8f1a08023c0d95fc2364e0bf7593f5ff32e1db8ef9f4b41c0bd474eae62d1af896e99808080a0b47b4f0b3e73b5edc8f9a9da1cbcfed562eb06bf54619b6aefeadebf5b3604c280a0da6ec08940a924cb08c947dd56cdb40076b29a6f0ea4dba4e2d02d9a9a72431b80a030cc4138c9e74b6cf79d624b4b5612c0fd888e91f55316cfee7d1694e1a90c0b80a0c5d54b915b56a888eee4e6eeb3141e778f9b674d1d322962eed900f02c29990aa017256b36ef47f907c6b1378a2636942ce894c17075e56fc054d4283f6846659e808080a03340bbaeafcda3a8672eb83099231dbbfab8dae02a1e8ec2f7180538fac207e080b86bf869a033aa5d69545785694b808840be50c182dad2ec3636dfccbe6572fb69828742c0b846f8440101a0663ce0d171e545a26aa67e4ca66f72ba96bb48287dbcc03beea282867f80d44ba01f0e7726926cb43c03a0abf48197dba78522ec8ba1b158e2aa30da7d2a2c6f9eb838f7a03868bdfa8727775661e4ccf117824a175a33f8703d728c04488fbfffcafda9f99594e8ddc5c7a2d2f0d7a9798459c0104fdf5e987acaa3e2a02052222313e28459528d920b65115c16c04f3efc82aaedc97be59f3f377c0d3f01b853f851808080a07bb75cabebdcbd1dbb4331054636d0c6d7a2b08483b9e04df057395a7434c9e080808080808080a0e61e567237b49c44d8f906ceea49027260b4010c10a547b38d8b131b9d3b6f848080808080b8d3f8d1a0dc277c93a9f9dcee99aac9b8ba3cfa4c51821998522469c37715644e8fbac0bfa0ab8cdb808c8303bb61fb48e276217be9770fa83ecf3f90f2234d558885f5abf1808080a0fe137c3a474fbde41d89a59dd76da4c55bf696b86d3af64a55632f76cf30786780808080a06301b39b2ea8a44df8b0356120db64b788e71f52e1d7a6309d0d2e5b86fee7cb80a0da5d8b08dea0c5a4799c0f44d8a24d7cdf209f9b7a5588c1ecafb5361f6b9f07a01b7779e149cadf24d4ffb77ca7e11314b8db7097e4d70b2a173493153ca2e5a0808080b853f851808080a0a87d9bb950836582673aa0eecc0ff64aac607870637a2dd2012b8b1b31981f698080a08da6d5c36a404670c553a2c9052df7cd604f04e3863c4c7b9e0027bfd54206d680808080808080808080b86bf869a02080c7b7ae81a58eb98d9c78de4a1fd7fd9535fc953ed2be602daaa41767312ab846f8448080a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470 //f91a8c80b91a87f91a84f9020da00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a0fad4af258fd11939fae0c6c6eec9d340b1caac0b0196fd9a1bc3f489c5bf00b3a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200008083663be080808080b8410000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f91871b914c26060604052600436106100fc576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806303aca79214610101578063108552691461016457806340a141ff1461019d57806340c9cdeb146101d65780634110a489146101ff57806345199e0a1461025757806349285b58146102c15780634d238c8e14610316578063752862111461034f578063900eb5a8146103645780639a573786146103c7578063a26a47d21461041c578063ae4b1b5b14610449578063b3f05b971461049e578063b7ab4db5146104cb578063d3e848f114610535578063fa81b2001461058a578063facd743b146105df575b600080fd5b341561010c57600080fd5b6101226004808035906020019091905050610630565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b341561016f57600080fd5b61019b600480803573ffffffffffffffffffffffffffffffffffffffff1690602001909190505061066f565b005b34156101a857600080fd5b6101d4600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050610807565b005b34156101e157600080fd5b6101e9610bb7565b6040518082815260200191505060405180910390f35b341561020a57600080fd5b610236600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050610bbd565b60405180831515151581526020018281526020019250505060405180910390f35b341561026257600080fd5b61026a610bee565b6040518080602001828103825283818151815260200191508051906020019060200280838360005b838110156102ad578082015181840152602081019050610292565b505050509050019250505060405180910390f35b34156102cc57600080fd5b6102d4610c82565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b341561032157600080fd5b61034d600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050610d32565b005b341561035a57600080fd5b610362610fcc565b005b341561036f57600080fd5b61038560048080359060200190919050506110fc565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b34156103d257600080fd5b6103da61113b565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b341561042757600080fd5b61042f6111eb565b604051808215151515815260200191505060405180910390f35b341561045457600080fd5b61045c6111fe565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b34156104a957600080fd5b6104b1611224565b604051808215151515815260200191505060405180910390f35b34156104d657600080fd5b6104de611237565b6040518080602001828103825283818151815260200191508051906020019060200280838360005b83811015610521578082015181840152602081019050610506565b505050509050019250505060405180910390f35b341561054057600080fd5b6105486112cb565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b341561059557600080fd5b61059d6112f1565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b34156105ea57600080fd5b610616600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050611317565b604051808215151515815260200191505060405180910390f35b60078181548110151561063f57fe5b90600052602060002090016000915054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b600460029054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff161415156106cb57600080fd5b600460019054906101000a900460ff161515156106e757600080fd5b600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff161415151561072357600080fd5b80600a60006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506001600460016101000a81548160ff0219169083151502179055507f600bcf04a13e752d1e3670a5a9f1c21177ca2a93c6f5391d4f1298d098097c22600a60009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a150565b600080600061081461113b565b73ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff1614151561084d57600080fd5b83600960008273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060000160009054906101000a900460ff1615156108a957600080fd5b600960008673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600101549350600160078054905003925060078381548110151561090857fe5b906000526020600020900160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1691508160078581548110151561094657fe5b906000526020600020900160006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555083600960008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600101819055506007838154811015156109e557fe5b906000526020600020900160006101000a81549073ffffffffffffffffffffffffffffffffffffffff02191690556000600780549050111515610a2757600080fd5b6007805480919060019003610a3c9190611370565b506000600960008773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600101819055506000600960008773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060000160006101000a81548160ff0219169083151502179055506000600460006101000a81548160ff0219169083151502179055506001430340600019167f55252fa6eee4741b4e24a74a70e9c11fd2c2281df8d6ea13126ff845f7825c89600760405180806020018281038252838181548152602001915080548015610ba257602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311610b58575b50509250505060405180910390a25050505050565b60085481565b60096020528060005260406000206000915090508060000160009054906101000a900460ff16908060010154905082565b610bf661139c565b6007805480602002602001604051908101604052809291908181526020018280548015610c7857602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311610c2e575b5050505050905090565b6000600a60009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff166349285b586000604051602001526040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b1515610d1257600080fd5b6102c65a03f11515610d2357600080fd5b50505060405180519050905090565b610d3a61113b565b73ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16141515610d7357600080fd5b80600960008273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060000160009054906101000a900460ff16151515610dd057600080fd5b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614151515610e0c57600080fd5b6040805190810160405280600115158152602001600780549050815250600960008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008201518160000160006101000a81548160ff0219169083151502179055506020820151816001015590505060078054806001018281610ea991906113b0565b9160005260206000209001600084909190916101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550506000600460006101000a81548160ff0219169083151502179055506001430340600019167f55252fa6eee4741b4e24a74a70e9c11fd2c2281df8d6ea13126ff845f7825c89600760405180806020018281038252838181548152602001915080548015610fba57602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311610f70575b50509250505060405180910390a25050565b600560009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff161480156110365750600460009054906101000a900460ff16155b151561104157600080fd5b6001600460006101000a81548160ff0219169083151502179055506007600690805461106e9291906113dc565b506006805490506008819055507f8564cd629b15f47dc310d45bcbfc9bcf5420b0d51bf0659a16c67f91d27632536110a4611237565b6040518080602001828103825283818151815260200191508051906020019060200280838360005b838110156110e75780820151818401526020810190506110cc565b505050509050019250505060405180910390a1565b60068181548110151561110b57fe5b90600052602060002090016000915054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b6000600a60009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16639a5737866000604051602001526040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b15156111cb57600080fd5b6102c65a03f115156111dc57600080fd5b50505060405180519050905090565b600460019054906101000a900460ff1681565b600a60009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b600460009054906101000a900460ff1681565b61123f61139c565b60068054806020026020016040519081016040528092919081815260200182805480156112c157602002820191906000526020600020905b8160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019060010190808311611277575b5050505050905090565b600560009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b600460029054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b6000600960008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060000160009054906101000a900460ff169050919050565b81548183558181151161139757818360005260206000209182019101611396919061142e565b5b505050565b602060405190810160405280600081525090565b8154818355818115116113d7578183600052602060002091820191016113d6919061142e565b5b505050565b82805482825590600052602060002090810192821561141d5760005260206000209182015b8281111561141c578254825591600101919060010190611401565b5b50905061142a9190611453565b5090565b61145091905b8082111561144c576000816000905550600101611434565b5090565b90565b61149391905b8082111561148f57600081816101000a81549073ffffffffffffffffffffffffffffffffffffffff021916905550600101611459565b5090565b905600a165627a7a7230582036ea35935c8246b68074adece2eab70c40e69a0193c08a6277ce06e5b25188510029b8f3f8f1a08023c0d95fc2364e0bf7593f5ff32e1db8ef9f4b41c0bd474eae62d1af896e99808080a0b47b4f0b3e73b5edc8f9a9da1cbcfed562eb06bf54619b6aefeadebf5b3604c280a0da6ec08940a924cb08c947dd56cdb40076b29a6f0ea4dba4e2d02d9a9a72431b80a030cc4138c9e74b6cf79d624b4b5612c0fd888e91f55316cfee7d1694e1a90c0b80a0c5d54b915b56a888eee4e6eeb3141e778f9b674d1d322962eed900f02c29990aa017256b36ef47f907c6b1378a2636942ce894c17075e56fc054d4283f6846659e808080a03340bbaeafcda3a8672eb83099231dbbfab8dae02a1e8ec2f7180538fac207e080b86bf869a033aa5d69545785694b808840be50c182dad2ec3636dfccbe6572fb69828742c0b846f8440101a0663ce0d171e545a26aa67e4ca66f72ba96bb48287dbcc03beea282867f80d44ba01f0e7726926cb43c03a0abf48197dba78522ec8ba1b158e2aa30da7d2a2c6f9eb838f7a03868bdfa8727775661e4ccf117824a175a33f8703d728c04488fbfffcafda9f99594e8ddc5c7a2d2f0d7a9798459c0104fdf5e987acaa3e2a02052222313e28459528d920b65115c16c04f3efc82aaedc97be59f3f377c0d3f01b853f851808080a07bb75cabebdcbd1dbb4331054636d0c6d7a2b08483b9e04df057395a7434c9e080808080808080a0e61e567237b49c44d8f906ceea49027260b4010c10a547b38d8b131b9d3b6f848080808080b8d3f8d1a0dc277c93a9f9dcee99aac9b8ba3cfa4c51821998522469c37715644e8fbac0bfa0ab8cdb808c8303bb61fb48e276217be9770fa83ecf3f90f2234d558885f5abf1808080a0fe137c3a474fbde41d89a59dd76da4c55bf696b86d3af64a55632f76cf30786780808080a06301b39b2ea8a44df8b0356120db64b788e71f52e1d7a6309d0d2e5b86fee7cb80a0da5d8b08dea0c5a4799c0f44d8a24d7cdf209f9b7a5588c1ecafb5361f6b9f07a01b7779e149cadf24d4ffb77ca7e11314b8db7097e4d70b2a173493153ca2e5a0808080b853f851808080a0a87d9bb950836582673aa0eecc0ff64aac607870637a2dd2012b8b1b31981f698080a08da6d5c36a404670c553a2c9052df7cd604f04e3863c4c7b9e0027bfd54206d680808080808080808080b86bf869a02080c7b7ae81a58eb98d9c78de4a1fd7fd9535fc953ed2be602daaa41767312ab846f8448080a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a47080 return res, nil } func (c *AuRa) Seal(chain consensus.ChainHeaderReader, block *types.Block, results chan<- *types.Block, stop <-chan struct{}) error { return nil //header := block.Header() // /// Sealing the genesis block is not supported //number := header.Number.Uint64() //if number == 0 { // return errUnknownBlock //} /// For 0-period chains, refuse to seal empty blocks (no reward but would spin sealing) //if c.config.Period == 0 && len(block.Transactions()) == 0 { // log.Info("Sealing paused, waiting for transactions") // return nil //} /// Don't hold the signer fields for the entire sealing procedure //c.lock.RLock() //signer, signFn := c.signer, c.signFn //c.lock.RUnlock() // /// Bail out if we're unauthorized to sign a block //snap, err := c.Snapshot(chain, number-1, header.ParentHash, nil) //if err != nil { // return err //} //if _, authorized := snap.Signers[signer]; !authorized { // return ErrUnauthorizedSigner //} /// If we're amongst the recent signers, wait for the next block //for seen, recent := range snap.Recents { // if recent == signer { // // Signer is among RecentsRLP, only wait if the current block doesn't shift it out // if limit := uint64(len(snap.Signers)/2 + 1); number < limit || seen > number-limit { // log.Info("Signed recently, must wait for others") // return nil // } // } //} /// Sweet, the protocol permits us to sign the block, wait for our time //delay := time.Unix(int64(header.Time), 0).Sub(time.Now()) // nolint: gosimple //if header.Difficulty.Cmp(diffNoTurn) == 0 { // // It's not our turn explicitly to sign, delay it a bit // wiggle := time.Duration(len(snap.Signers)/2+1) * wiggleTime // delay += time.Duration(rand.Int63n(int64(wiggle))) // // log.Trace("Out-of-turn signing requested", "wiggle", common.PrettyDuration(wiggle)) //} /// Sign all the things! //sighash, err := signFn(signer, accounts.MimetypeClique, CliqueRLP(header)) //if err != nil { // return err //} //copy(header.Extra[len(header.Extra)-ExtraSeal:], sighash) /// Wait until sealing is terminated or delay timeout. //log.Trace("Waiting for slot to sign and propagate", "delay", common.PrettyDuration(delay)) //go func() { // select { // case <-stop: // return // case <-time.After(delay): // } // // select { // case results <- block.WithSeal(header): // default: // log.Warn("Sealing result is not read by miner", "sealhash", SealHash(header)) // } //}() // //return nil } func stepProposer(validators ValidatorSet, blockHash common.Hash, step uint64, call consensus.Call) (common.Address, error) { //c, err := validators.defaultCaller(blockHash) //if err != nil { // return common.Address{}, err //} return validators.getWithCaller(blockHash, uint(step), call) } // GenerateSeal - Attempt to seal the block internally. // // This operation is synchronous and may (quite reasonably) not be available, in which case // `Seal::None` will be returned. func (c *AuRa) GenerateSeal(chain consensus.ChainHeaderReader, current, parent *types.Header, call consensus.Call) []rlp.RawValue { // first check to avoid generating signature most of the time // (but there's still a race to the `compare_exchange`) if !c.step.canPropose.Load() { log.Trace("[aura] Aborting seal generation. Can't propose.") return nil } parentStep, err := headerStep(parent) if err != nil { panic(err) } step := c.step.inner.inner.Load() // filter messages from old and future steps and different parents expectedDiff := calculateScore(parentStep, step, 0) if current.Difficulty.Cmp(expectedDiff.ToBig()) != 0 { log.Trace(fmt.Sprintf("[aura] Aborting seal generation. The step or empty_steps have changed in the meantime. %d != %d", current.Difficulty, expectedDiff)) return nil } if parentStep > step { log.Warn(fmt.Sprintf("[aura] Aborting seal generation for invalid step: %d > %d", parentStep, step)) return nil } validators, setNumber, err := c.epochSet(chain, nil, current, nil) if err != nil { log.Warn("[aura] Unable to generate seal", "err", err) return nil } stepProposerAddr, err := stepProposer(validators, current.ParentHash, step, call) if err != nil { log.Warn("[aura] Unable to get stepProposer", "err", err) return nil } if stepProposerAddr != current.Coinbase { return nil } // this is guarded against by `can_propose` unless the block was signed // on the same step (implies same key) and on a different node. if parentStep == step { log.Warn("Attempted to seal block on the same step as parent. Is this authority sealing with more than one node?") return nil } _ = setNumber /* signature, err := c.sign(current.bareHash()) if err != nil { log.Warn("[aura] generate_seal: FAIL: Accounts secret key unavailable.", "err", err) return nil } */ /* // only issue the seal if we were the first to reach the compare_exchange. if self .step .can_propose .compare_exchange(true, false, AtomicOrdering::SeqCst, AtomicOrdering::SeqCst) .is_ok() { // we can drop all accumulated empty step messages that are // older than the parent step since we're including them in // the seal self.clear_empty_steps(parent_step); // report any skipped primaries between the parent block and // the block we're sealing, unless we have empty steps enabled if header.number() < self.empty_steps_transition { self.report_skipped(header, step, parent_step, &*validators, set_number); } let mut fields = vec![encode(&step), encode(&(H520::from(signature).as_bytes()))]; if let Some(empty_steps_rlp) = empty_steps_rlp { fields.push(empty_steps_rlp); } return Seal::Regular(fields); } */ return nil } // epochSet fetch correct validator set for epoch at header, taking into account // finality of previous transitions. func (c *AuRa) epochSet(chain consensus.ChainHeaderReader, e consensus.EpochReader, h *types.Header, call consensus.SystemCall) (ValidatorSet, uint64, error) { if c.cfg.ImmediateTransitions { return c.cfg.Validators, h.Number.Uint64(), nil } finalityChecker, epochTransitionNumber, ok := c.EpochManager.zoomToAfter(chain, e, c.cfg.Validators, h.ParentHash, call) if !ok { return nil, 0, fmt.Errorf("unable to zoomToAfter to epoch") } return finalityChecker.signers, epochTransitionNumber, nil } //nolint func headerStep(current *types.Header) (val uint64, err error) { if len(current.Seal) < 1 { panic("was either checked with verify_block_basic or is genesis; has 2 fields; qed (Make sure the spec file has a correct genesis seal)") } err = rlp.Decode(bytes.NewReader(current.Seal[0]), &val) if err != nil { return val, err } return val, err } func (c *AuRa) CalcDifficulty(chain consensus.ChainHeaderReader, time, parentTime uint64, parentDifficulty *big.Int, parentNumber uint64, parentHash, parentUncleHash common.Hash, parentSeal []rlp.RawValue) *big.Int { var parentStep uint64 err := rlp.Decode(bytes.NewReader(parentSeal[0]), &parentStep) if err != nil { panic(err) } currentStep := c.step.inner.inner.Load() currentEmptyStepsLen := 0 return calculateScore(parentStep, currentStep, uint64(currentEmptyStepsLen)).ToBig() /* TODO: do I need gasLimit override logic here ? if let Some(gas_limit) = self.gas_limit_override(header) { trace!(target: "engine", "Setting gas limit to {} for block {}.", gas_limit, header.number()); let parent_gas_limit = *parent.gas_limit(); header.set_gas_limit(gas_limit); if parent_gas_limit != gas_limit { info!(target: "engine", "Block gas limit was changed from {} to {}.", parent_gas_limit, gas_limit); } } */ } // calculateScore - analog of PoW difficulty: // sqrt(U256::max_value()) + parent_step - current_step + current_empty_steps func calculateScore(parentStep, currentStep, currentEmptySteps uint64) *uint256.Int { maxU128 := uint256.NewInt(0).SetAllOne() maxU128 = maxU128.Rsh(maxU128, 128) res := maxU128.Add(maxU128, uint256.NewInt(parentStep)) res = res.Sub(res, uint256.NewInt(currentStep)) res = res.Add(res, uint256.NewInt(currentEmptySteps)) return res } func (c *AuRa) SealHash(header *types.Header) common.Hash { return clique.SealHash(header) } // Close implements consensus.Engine. It's a noop for clique as there are no background threads. func (c *AuRa) Close() error { libcommon.SafeClose(c.exitCh) return nil } // APIs implements consensus.Engine, returning the user facing RPC API to allow // controlling the signer voting. func (c *AuRa) APIs(chain consensus.ChainHeaderReader) []rpc.API { return []rpc.API{ //{ //Namespace: "clique", //Version: "1.0", //Service: &API{chain: chain, clique: c}, //Public: false, //} } } //nolint func (c *AuRa) emptySteps(fromStep, toStep uint64, parentHash common.Hash) []EmptyStep { from := EmptyStep{step: fromStep + 1, parentHash: parentHash} to := EmptyStep{step: toStep} res := []EmptyStep{} if to.LessOrEqual(&from) { return res } c.EmptyStepsSet.Sort() c.EmptyStepsSet.ForEach(func(i int, step *EmptyStep) { if step.Less(&from) || (&to).Less(step) { return } if step.parentHash != parentHash { return } res = append(res, *step) }) return res } // AccumulateRewards returns rewards for a given block. The mining reward consists // of the static blockReward plus a reward for each included uncle (if any). Individual // uncle rewards are also returned in an array. func AccumulateRewards(_ *params.ChainConfig, aura *AuRa, header *types.Header, _ []*types.Header, syscall consensus.SystemCall) (beneficiaries []common.Address, rewardKind []aurainterfaces.RewardKind, rewards []*uint256.Int, err error) { beneficiaries = append(beneficiaries, header.Coinbase) rewardKind = append(rewardKind, aurainterfaces.RewardAuthor) var rewardContractAddress BlockRewardContract var foundContract bool for _, c := range aura.cfg.BlockRewardContractTransitions { if c.blockNum > header.Number.Uint64() { break } foundContract = true rewardContractAddress = c } if foundContract { beneficiaries, rewards = callBlockRewardAbi(rewardContractAddress.address, syscall, beneficiaries, rewardKind) rewardKind = rewardKind[:len(beneficiaries)] for i := 0; i < len(rewardKind); i++ { rewardKind[i] = aurainterfaces.RewardExternal } } else { // block_reward.iter.rev().find(|&(block, _)| *block <= number) var reward BlockReward var found bool for i := range aura.cfg.BlockReward { if aura.cfg.BlockReward[i].blockNum > header.Number.Uint64() { break } found = true reward = aura.cfg.BlockReward[i] } if !found { panic("Current block's reward is not found; this indicates a chain config error") } for range beneficiaries { rewards = append(rewards, reward.amount) } } //err = aura.cfg.Validators.onCloseBlock(header, aura.OurSigningAddress) //if err != nil { // return //} return } func callBlockRewardAbi(contractAddr common.Address, syscall consensus.SystemCall, beneficiaries []common.Address, rewardKind []aurainterfaces.RewardKind) ([]common.Address, []*uint256.Int) { castedKind := make([]uint16, len(rewardKind)) for i := range rewardKind { castedKind[i] = uint16(rewardKind[i]) } packed, err := blockRewardAbi().Pack("reward", beneficiaries, castedKind) if err != nil { panic(err) } out, err := syscall(contractAddr, packed) if err != nil { panic(err) } if len(out) == 0 { return nil, nil } res, err := blockRewardAbi().Unpack("reward", out) if err != nil { panic(err) } _ = res[0] _ = res[1] return nil, nil } func blockRewardAbi() abi.ABI { a, err := abi.JSON(bytes.NewReader(contracts.BlockReward)) if err != nil { panic(err) } return a } // An empty step message that is included in a seal, the only difference is that it doesn't include // the `parent_hash` in order to save space. The included signature is of the original empty step // message, which can be reconstructed by using the parent hash of the block in which this sealed // empty message is inc luded. //nolint type SealedEmptyStep struct { signature []byte // H520 step uint64 } /* // extracts the empty steps from the header seal. should only be called when there are 3 fields in the seal // (i.e. header.number() >= self.empty_steps_transition). func headerEmptySteps(header *types.Header) ([]EmptyStep, error) { s := headerEmptyStepsRaw(header) sealedSteps := []SealedEmptyStep{} err := rlp.DecodeBytes(s, &sealedSteps) if err != nil { return nil, err } steps := make([]EmptyStep, len(sealedSteps)) for i := range sealedSteps { steps[i] = newEmptyStepFromSealed(sealedSteps[i], header.ParentHash) } return steps, nil } func newEmptyStepFromSealed(step SealedEmptyStep, parentHash common.Hash) EmptyStep { return EmptyStep{ signature: step.signature, step: step.step, parentHash: parentHash, } } // extracts the raw empty steps vec from the header seal. should only be called when there are 3 fields in the seal // (i.e. header.number() >= self.empty_steps_transition) func headerEmptyStepsRaw(header *types.Header) []byte { if len(header.Seal) < 3 { panic("was checked with verify_block_basic; has 3 fields; qed") } return header.Seal[2] } */ // A message broadcast by authorities when it's their turn to seal a block but there are no // transactions. Other authorities accumulate these messages and later include them in the seal as // proof. // // An empty step message is created _instead of_ a block if there are no pending transactions. // It cannot itself be a parent, and `parent_hash` always points to the most recent block. E.g.: // * Validator A creates block `bA`. // * Validator B has no pending transactions, so it signs an empty step message `mB` // instead whose hash points to block `bA`. // * Validator C also has no pending transactions, so it also signs an empty step message `mC` // instead whose hash points to block `bA`. // * Validator D creates block `bD`. The parent is block `bA`, and the header includes `mB` and `mC`. type EmptyStep struct { // The signature of the other two fields, by the message's author. signature []byte // H520 // This message's step number. step uint64 // The hash of the most recent block. parentHash common.Hash // H256 } func (s *EmptyStep) Less(other *EmptyStep) bool { if s.step < other.step { return true } if bytes.Compare(s.parentHash[:], other.parentHash[:]) < 0 { return true } if bytes.Compare(s.signature, other.signature) < 0 { return true } return false } func (s *EmptyStep) LessOrEqual(other *EmptyStep) bool { if s.step <= other.step { return true } if bytes.Compare(s.parentHash[:], other.parentHash[:]) <= 0 { return true } if bytes.Compare(s.signature, other.signature) <= 0 { return true } return false } // Returns `true` if the message has a valid signature by the expected proposer in the message's step. func (s *EmptyStep) verify(validators ValidatorSet) (bool, error) { //nolint //sRlp, err := EmptyStepRlp(s.step, s.parentHash) //if err != nil { // return false, err //} //message := crypto.Keccak256(sRlp) /* let correct_proposer = step_proposer(validators, &self.parent_hash, self.step); publickey::verify_address(&correct_proposer, &self.signature.into(), &message) .map_err(|e| e.into()) */ return true, nil } //nolint func (s *EmptyStep) author() (common.Address, error) { sRlp, err := EmptyStepRlp(s.step, s.parentHash) if err != nil { return common.Address{}, err } message := crypto.Keccak256(sRlp) public, err := secp256k1.RecoverPubkey(message, s.signature) if err != nil { return common.Address{}, err } ecdsa, err := crypto.UnmarshalPubkey(public) if err != nil { return common.Address{}, err } return crypto.PubkeyToAddress(*ecdsa), nil } type EmptyStepSet struct { lock sync.Mutex list []*EmptyStep } func (s *EmptyStepSet) Less(i, j int) bool { return s.list[i].Less(s.list[j]) } func (s *EmptyStepSet) Swap(i, j int) { s.list[i], s.list[j] = s.list[j], s.list[i] } func (s *EmptyStepSet) Len() int { return len(s.list) } func (s *EmptyStepSet) Sort() { s.lock.Lock() defer s.lock.Unlock() sort.Stable(s) } func (s *EmptyStepSet) ForEach(f func(int, *EmptyStep)) { s.lock.Lock() defer s.lock.Unlock() for i, el := range s.list { f(i, el) } } func EmptyStepFullRlp(signature []byte, emptyStepRlp []byte) ([]byte, error) { type A struct { s []byte r []byte } return rlp.EncodeToBytes(A{s: signature, r: emptyStepRlp}) } func EmptyStepRlp(step uint64, parentHash common.Hash) ([]byte, error) { type A struct { s uint64 h common.Hash } return rlp.EncodeToBytes(A{s: step, h: parentHash}) } //nolint type unAssembledHeader struct { hash common.Hash number uint64 signers []common.Address } type unAssembledHeaders struct { l *list.List } func (u unAssembledHeaders) PushBack(header *unAssembledHeader) { u.l.PushBack(header) } func (u unAssembledHeaders) PushFront(header *unAssembledHeader) { u.l.PushFront(header) } func (u unAssembledHeaders) Pop() *unAssembledHeader { e := u.l.Front() if e == nil { return nil } u.l.Remove(e) return e.Value.(*unAssembledHeader) } func (u unAssembledHeaders) Front() *unAssembledHeader { e := u.l.Front() if e == nil { return nil } return e.Value.(*unAssembledHeader) } // RollingFinality checker for authority round consensus. // Stores a chain of unfinalized hashes that can be pushed onto. //nolint type RollingFinality struct { headers unAssembledHeaders //nolint signers *SimpleList signCount map[common.Address]uint lastPushed *common.Hash // Option<H256>, } // NewRollingFinality creates a blank finality checker under the given validator set. func NewRollingFinality(signers []common.Address) *RollingFinality { return &RollingFinality{ signers: NewSimpleList(signers), headers: unAssembledHeaders{l: list.New()}, signCount: map[common.Address]uint{}, } } // Clears the finality status, but keeps the validator set. func (f *RollingFinality) print(num uint64) { if num > DEBUG_LOG_FROM { h := f.headers fmt.Printf("finality_heads: %d\n", num) i := 0 for e := h.l.Front(); e != nil; e = e.Next() { i++ a := e.Value.(*unAssembledHeader) fmt.Printf("\t%d,%x\n", a.number, a.signers[0]) } if i == 0 { fmt.Printf("\tempty\n") } } } func (f *RollingFinality) clear() { f.headers = unAssembledHeaders{l: list.New()} f.signCount = map[common.Address]uint{} f.lastPushed = nil } // Push a hash onto the rolling finality checker (implying `subchain_head` == head.parent) // // Fails if `signer` isn't a member of the active validator set. // Returns a list of all newly finalized headers. func (f *RollingFinality) push(head common.Hash, num uint64, signers []common.Address) (newlyFinalized []unAssembledHeader, err error) { for i := range signers { if !f.hasSigner(signers[i]) { return nil, fmt.Errorf("unknown validator") } } f.addSigners(signers) f.headers.PushBack(&unAssembledHeader{hash: head, number: num, signers: signers}) for f.isFinalized() { e := f.headers.Pop() if e == nil { panic("headers length always greater than sign count length") } f.removeSigners(e.signers) newlyFinalized = append(newlyFinalized, *e) } f.lastPushed = &head return newlyFinalized, nil } // isFinalized returns whether the first entry in `self.headers` is finalized. func (f *RollingFinality) isFinalized() bool { e := f.headers.Front() if e == nil { return false } return len(f.signCount)*2 > len(f.signers.validators) } func (f *RollingFinality) hasSigner(signer common.Address) bool { for j := range f.signers.validators { if f.signers.validators[j] == signer { return true } } return false } func (f *RollingFinality) addSigners(signers []common.Address) bool { for i := range signers { count, ok := f.signCount[signers[i]] if ok { f.signCount[signers[i]] = count + 1 } else { f.signCount[signers[i]] = 1 } } return false } func (f *RollingFinality) removeSigners(signers []common.Address) { for i := range signers { count, ok := f.signCount[signers[i]] if !ok { panic("all hashes in `header` should have entries in `sign_count` for their signers") //continue } if count <= 1 { delete(f.signCount, signers[i]) } else { f.signCount[signers[i]] = count - 1 } } } func (f *RollingFinality) buildAncestrySubChain(get func(hash common.Hash) ([]common.Address, common.Hash, common.Hash, uint64, bool), parentHash, epochTransitionHash common.Hash) error { // starts from chainHeadParentHash f.clear() for { signers, blockHash, newParentHash, blockNum, ok := get(parentHash) if !ok { return nil } if blockHash == epochTransitionHash { return nil } for i := range signers { if !f.hasSigner(signers[i]) { return fmt.Errorf("unknown validator: blockNum=%d", blockNum) } } if f.lastPushed == nil { copyHash := parentHash f.lastPushed = &copyHash } f.addSigners(signers) f.headers.PushFront(&unAssembledHeader{hash: blockHash, number: blockNum, signers: signers}) // break when we've got our first finalized block. if f.isFinalized() { e := f.headers.Pop() if e == nil { panic("we just pushed a block") } f.removeSigners(e.signers) //log.Info("[aura] finality encountered already finalized block", "hash", e.hash.String(), "number", e.number) break } parentHash = newParentHash } return nil }
1
22,942
why did you remove `verifyHeaders`?
ledgerwatch-erigon
go
@@ -66,14 +66,14 @@ class RegistrationsController < Devise::RegistrationsController if other_org.nil? redirect_to(after_sign_up_error_path_for(resource), alert: _('You cannot be assigned to other organisation since that option does not exist in the system. Please contact your system administrators.')) and return end - params[:user][:org_id] = other_org.id + params[:user][:org_id] = other_org.id end build_resource(sign_up_params) if resource.save if resource.active_for_authentication? set_flash_message :notice, :signed_up if is_navigational_format? sign_up(resource_name, resource) - UserMailer.welcome_notification(current_user).deliver + UserMailer.welcome_notification(current_user).deliver_now unless oauth.nil? # The OAuth provider could not be determined or there was no unique UID! unless oauth['provider'].nil? || oauth['uid'].nil?
1
# app/controllers/registrations_controller.rb class RegistrationsController < Devise::RegistrationsController def edit @user = current_user @prefs = @user.get_preferences(:email) @languages = Language.sorted_by_abbreviation @orgs = Org.where(parent_id: nil).order("name") @other_organisations = Org.where(parent_id: nil, is_other: true).pluck(:id) @identifier_schemes = IdentifierScheme.where(active: true).order(:name) @default_org = current_user.org if !@prefs flash[:alert] = 'No default preferences found (should be in branding.yml).' end end # GET /resource def new oauth = {provider: nil, uid: nil} IdentifierScheme.all.each do |scheme| oauth = session["devise.#{scheme.name.downcase}_data"] unless session["devise.#{scheme.name.downcase}_data"].nil? end @user = User.new unless oauth.nil? # The OAuth provider could not be determined or there was no unique UID! if oauth['provider'].nil? || oauth['uid'].nil? # flash[:alert] = _('We were unable to verify your account. Please use the following form to create a new account. You will be able to link your new account afterward.') else # Connect the new user with the identifier sent back by the OAuth provider flash[:notice] = _('Please make a choice below. After linking your details to a %{application_name} account, you will be able to sign in directly with your institutional credentials.') % {application_name: Rails.configuration.branding[:application][:name]} UserIdentifier.create(identifier_scheme: IdentifierScheme.find_by(name: oauth['provider'].downcase), identifier: oauth['uid'], user: @user) end end end # POST /resource def create oauth = {provider: nil, uid: nil} IdentifierScheme.all.each do |scheme| oauth = session["devise.#{scheme.name.downcase}_data"] unless session["devise.#{scheme.name.downcase}_data"].nil? end if !sign_up_params[:accept_terms] redirect_to after_sign_up_error_path_for(resource), alert: _('You must accept the terms and conditions to register.') elsif params[:user][:org_id].blank? && params[:user][:other_organisation].blank? redirect_to after_sign_up_error_path_for(resource), alert: _('Please select an organisation from the list, or enter your organisation\'s name.') else existing_user = User.where_case_insensitive('email', sign_up_params[:email]).first if existing_user.present? if existing_user.invitation_token.present? && !existing_user.accept_terms? existing_user.destroy # Destroys the existing user since the accept terms are nil/false. and they have an invitation # Note any existing role for that user will be deleted too. Added to accommodate issue at: # https://github.com/DMPRoadmap/roadmap/issues/322 when invited user creates an account outside the invite workflow else redirect_to after_sign_up_error_path_for(resource), alert: _('That email address is already registered.') return end end if params[:user][:org_id].blank? other_org = Org.find_by(is_other: true) if other_org.nil? redirect_to(after_sign_up_error_path_for(resource), alert: _('You cannot be assigned to other organisation since that option does not exist in the system. Please contact your system administrators.')) and return end params[:user][:org_id] = other_org.id end build_resource(sign_up_params) if resource.save if resource.active_for_authentication? set_flash_message :notice, :signed_up if is_navigational_format? sign_up(resource_name, resource) UserMailer.welcome_notification(current_user).deliver unless oauth.nil? # The OAuth provider could not be determined or there was no unique UID! unless oauth['provider'].nil? || oauth['uid'].nil? prov = IdentifierScheme.find_by(name: oauth['provider'].downcase) # Until we enable ORCID signups if prov.name == 'shibboleth' UserIdentifier.create(identifier_scheme: prov, identifier: oauth['uid'], user: @user) flash[:notice] = _('Welcome! You have signed up successfully with your institutional credentials. You will now be able to access your account with them.') end end end respond_with resource, location: after_sign_up_path_for(resource) else set_flash_message :notice, :"signed_up_but_#{resource.inactive_message}" if is_navigational_format? respond_with resource, location: after_inactive_sign_up_path_for(resource) end else clean_up_passwords resource redirect_to after_sign_up_error_path_for(resource), alert: _('Error processing registration. Please check that you have entered a valid email address and that your chosen password is at least 8 characters long.') end end end def update if user_signed_in? then @prefs = @user.get_preferences(:email) @orgs = Org.where(parent_id: nil).order("name") @default_org = current_user.org @other_organisations = Org.where(parent_id: nil, is_other: true).pluck(:id) @identifier_schemes = IdentifierScheme.where(active: true).order(:name) @languages = Language.sorted_by_abbreviation if params[:skip_personal_details] == "true" do_update_password(current_user, params) else do_update(require_password=needs_password?(current_user, params)) end else render(:file => File.join(Rails.root, 'public/403.html'), :status => 403, :layout => false) end end private # check if we need password to update user data # ie if password or email was changed # extend this as needed def needs_password?(user, params) user.email != params[:user][:email] || params[:user][:password].present? end def do_update(require_password = true, confirm = false) mandatory_params = true message = _('Save Unsuccessful.') + ' ' # added to by below, overwritten otherwise # ensure that the required fields are present if params[:user][:email].blank? message +=_('Please enter an email address.') + ' ' mandatory_params &&= false end if params[:user][:firstname].blank? message +=_('Please enter a First name.') + ' ' mandatory_params &&= false end if params[:user][:surname].blank? message +=_('Please enter a Last name.') + ' ' mandatory_params &&= false end if params[:user][:org_id].blank? && params[:user][:other_organisation].blank? message += _('Please select an organisation from the list, or enter your organisation\'s name.') mandatory_params &&= false end if mandatory_params # has the user entered all the details if require_password # user is changing email or password if current_user.email != params[:user][:email] # if user is changing email if params[:user][:password].blank? # password needs to be present message = _('Please enter your password to change email address.') successfully_updated = false else successfully_updated = current_user.update_with_password(password_update) end else # This case is never reached since this method when called with require_password = true is because the email changed. The case for password changed goes to do_update_password instead successfully_updated = current_user.update_without_password(update_params) end else # password not required successfully_updated = current_user.update_without_password(update_params) end else successfully_updated = false end #unlink shibboleth from user's details if params[:unlink_flag] == 'true' then current_user.update_attributes(shibboleth_id: "") end #render the correct page if successfully_updated if confirm current_user.skip_confirmation! # will error out if confirmable is turned off in user model current_user.save! end session[:locale] = current_user.get_locale unless current_user.get_locale.nil? set_gettext_locale #Method defined at controllers/application_controller.rb set_flash_message :notice, success_message(_('profile'), _('saved')) sign_in current_user, bypass: true # Sign in the user bypassing validation in case his password changed redirect_to "#{edit_user_registration_path}\#personal-details", notice: success_message(_('profile'), _('saved')) else flash[:alert] = message.blank? ? failed_update_error(current_user, _('profile')) : message render "edit" end end def do_update_password(current_user, params) if params[:user][:current_password].blank? message = _('Please enter your current password') elsif params[:user][:password_confirmation].blank? message = _('Please enter a password confirmation') elsif params[:user][:password] != params[:user][:password_confirmation] message = _('Password and comfirmation must match') else successfully_updated = current_user.update_with_password(password_update) end #render the correct page if successfully_updated session[:locale] = current_user.get_locale unless current_user.get_locale.nil? set_gettext_locale #Method defined at controllers/application_controller.rb set_flash_message :notice, success_message(_('password'), _('saved')) sign_in current_user, bypass: true # TODO this method is deprecated redirect_to "#{edit_user_registration_path}\#password-details", notice: success_message(_('password'), _('saved')) else flash[:alert] = message.blank? ? failed_update_error(current_user, _('profile')) : message redirect_to "#{edit_user_registration_path}\#password-details" end end def sign_up_params params.require(:user).permit(:email, :password, :password_confirmation, :firstname, :surname, :recovery_email, :accept_terms, :org_id, :other_organisation) end def update_params params.require(:user).permit(:firstname, :org_id, :other_organisation, :language_id, :surname) end def password_update params.require(:user).permit(:email, :firstname, :current_password, :org_id, :language_id, :password, :password_confirmation, :surname, :other_organisation) end end
1
17,954
Thanks for cleaning up these deprecated calls
DMPRoadmap-roadmap
rb
@@ -110,13 +110,13 @@ def _create_user(username, password='', email=None, is_admin=False, requires_activation=True, requires_reset=False): def check_conflicts(username, email): if not VALID_USERNAME_RE.match(username): - raise ValidationException("Unacceptable username.") + raise ValidationException("Invalid username.") if blacklisted_name(username): - raise ValidationException("Unacceptable username.") + raise ValidationException("Invalid username.") if email is None: raise ValidationException("Must provide email.") if not VALID_EMAIL_RE.match(email): - raise ValidationException("Unacceptable email.") + raise ValidationException("Invalid email.") if User.query.filter_by(name=username).one_or_none(): raise ConflictException("Username already taken.") if User.query.filter_by(email=email).one_or_none():
1
import base64 from datetime import datetime, timedelta import json import uuid from flask import redirect, request import itsdangerous import jwt from passlib.context import CryptContext from sqlalchemy import func from . import app, db from .const import (VALID_EMAIL_RE, VALID_USERNAME_RE, blacklisted_name, ACTIVATE_SALT, PASSWORD_RESET_SALT, MAX_LINK_AGE, CODE_EXP_MINUTES) from .mail import (send_activation_email, send_reset_email, send_new_user_email, send_welcome_email) from .models import ActivationToken, Code, PasswordResetToken, Token, User CATALOG_URL = app.config['CATALOG_URL'] pwd_context = CryptContext( schemes=['pbkdf2_sha512', 'django_pbkdf2_sha256'], pbkdf2_sha512__default_rounds=500000 ) # Each round should take about half a second, # 500000 rounds experimentally determined class AuthException(Exception): """ Base class for Auth exceptions. """ def __init__(self, msg): super().__init__() self.message = msg class ValidationException(AuthException): """ Represents a failure to deserialize a signed link, a password that is too short, etc. """ pass class ConflictException(AuthException): """ Represents an exception involving an attempt to register a username that already exists, etc. """ pass class NotFoundException(AuthException): """ Represents an exception involving an attempted operation on an entity that could not be located. """ pass class CredentialException(AuthException): """ Represents an exception involving things like an incorrect token, an incorrect password, etc. """ pass def generate_uuid(): return str(uuid.uuid4()) def hash_password(password): return pwd_context.hash(password) def get_admins(): return [user.email for user in User.query.filter_by(is_admin=True).all()] def activate_response(link): payload = verify_activation_link(link) if payload: _activate_user(User.query.filter_by(id=payload['id']).with_for_update().one_or_none()) db.session.commit() return redirect("{CATALOG_URL}/signin".format(CATALOG_URL=CATALOG_URL), code=302) return redirect("{CATALOG_URL}/activation_error".format(CATALOG_URL=CATALOG_URL), code=302) def validate_password(password): if len(password) < 8: raise ValidationException("Password must be at least 8 characters long.") def reset_password_from_email(email): user = User.query.filter_by(email=email).with_for_update().one_or_none() if user: reset_password(user) def change_password(raw_password, link): validate_password(raw_password) payload = verify_reset_link(link) if not payload: raise CredentialException("Reset token invalid") user_id = payload['id'] user = User.query.filter_by(id=user_id).with_for_update().one_or_none() if not user: raise NotFoundException("User not found") user.password = hash_password(raw_password) db.session.add(user) def _create_user(username, password='', email=None, is_admin=False, requires_activation=True, requires_reset=False): def check_conflicts(username, email): if not VALID_USERNAME_RE.match(username): raise ValidationException("Unacceptable username.") if blacklisted_name(username): raise ValidationException("Unacceptable username.") if email is None: raise ValidationException("Must provide email.") if not VALID_EMAIL_RE.match(email): raise ValidationException("Unacceptable email.") if User.query.filter_by(name=username).one_or_none(): raise ConflictException("Username already taken.") if User.query.filter_by(email=email).one_or_none(): raise ConflictException("Email already taken.") check_conflicts(username, email) validate_password(password) new_password = "" if requires_reset else hash_password(password) if requires_activation: is_active = False else: is_active = True user = User( id=generate_uuid(), name=username, password=new_password, email=email, is_active=is_active, is_admin=is_admin ) db.session.add(user) if requires_activation: db.session.flush() # necessary due to link token foreign key relationship with User send_activation_email(user, generate_activation_link(user.id)) if requires_reset: db.session.flush() # necessary due to link token foreign key relationship with User send_welcome_email(user, user.email, generate_reset_link(user.id)) def _update_user(username, password=None, email=None, is_admin=None, is_active=None): existing_user = User.query.filter_by(name=username).with_for_update().one_or_none() if not existing_user: raise NotFoundException("User to update not found") if password is not None: new_password = hash_password(password) existing_user.password = new_password if email is not None: existing_user.email = email if is_admin is not None: existing_user.is_admin = is_admin if is_active is not None: existing_user.is_active = is_active db.session.add(existing_user) def _activate_user(user): if user is None: raise NotFoundException("User not found") user.is_active = True db.session.add(user) admins = get_admins() if admins: send_new_user_email(user.name, user.email, admins) def update_last_login(user): user.last_login = func.now() db.session.add(user) def _delete_user(user): if user: revoke_user_code_tokens(user) db.session.delete(user) else: raise NotFoundException("User to delete not found") return user def _enable_user(user): if user: user.is_active = True db.session.add(user) else: raise NotFoundException("User to enable not found") def _disable_user(user): if user: revoke_user_code_tokens(user) user.is_active = False db.session.add(user) else: raise NotFoundException("User to disable not found") def issue_code(user): user_id = user.id expires = datetime.utcnow() + timedelta(minutes=CODE_EXP_MINUTES) code = Code(user_id=user_id, code=generate_uuid(), expires=expires) db.session.add(code) return encode_code({'id': user_id, 'code': code.code}) def encode_code(code_dict): return base64.b64encode(bytes(json.dumps(code_dict), 'utf-8')).decode('utf8') def decode_code(code_str): try: return json.loads(base64.b64decode(code_str).decode('utf8')) except Exception: raise ValidationException("Decoding code failed") def decode_token(token_str): try: return jwt.decode(token_str, app.secret_key, algorithm='HS256') except jwt.exceptions.InvalidTokenError: raise ValidationException("Token could not be deserialized") def check_token(user_id, token): return Token.query.filter_by(user_id=user_id, token=token).one_or_none() is not None def _verify(payload): user_id = payload['id'] uuid = payload['uuid'] user = User.query.filter_by(id=user_id).one_or_none() if user is None: raise CredentialException('User ID invalid') if not check_token(user_id, uuid): raise CredentialException('Token invalid') return user def verify_token_string(token_string): token = decode_token(token_string) user = _verify(token) return user def exp_from_token(token): token = decode_token(token) return token['exp'] def revoke_token_string(token_str): token = decode_token(token_str) user_id = token['id'] uuid = token['uuid'] return revoke_token(user_id, uuid) def revoke_token(user_id, token): found = Token.query.filter_by(user_id=user_id, token=token).with_for_update().one_or_none() if found is None: return False db.session.delete(found) return True def revoke_tokens(user): tokens = Token.query.filter_by(user_id=user.id).with_for_update().all() for token in tokens: db.session.delete(token) def revoke_user_code_tokens(user): codes = Code.query.filter_by(user_id=user.id).with_for_update().all() for code in codes: db.session.delete(code) revoke_tokens(user) def get_exp(mins=30): return datetime.utcnow() + timedelta(minutes=mins) def issue_token(user, exp=None): uuid = generate_uuid() token = Token(user_id=user.id, token=uuid) db.session.add(token) exp = exp or get_exp() payload = {'id': user.id, 'uuid': uuid, 'exp': exp} token = jwt.encode(payload, app.secret_key, algorithm='HS256') return token.decode('utf-8') def consume_code_string(code_str): code = decode_code(code_str) return consume_code(code['id'], code['code']) def consume_code(user_id, code): found = Code.query.filter_by(user_id=user_id, code=code).with_for_update().one_or_none() if found is None: raise ValidationException("Code not found") if found.expires.timetuple() < datetime.utcnow().timetuple(): db.session.delete(found) raise CredentialException("Code expired") db.session.delete(found) return User.query.filter_by(id=user_id).one_or_none() def verify_hash(password, pw_hash): try: if not pwd_context.verify(password, pw_hash): raise CredentialException('Password verification failed') except ValueError: raise CredentialException('Password verification failed') def try_login(user, password): if not user.is_active: return False try: verify_hash(password, user.password) except CredentialException: return False update_last_login(user) return True linkgenerator = itsdangerous.URLSafeTimedSerializer( app.secret_key, salt='quilt' ) def dump_link(payload, salt=None): link = linkgenerator.dumps(payload, salt=salt) return link.replace('.', '~') def load_link(link, max_age, salt=None): payload = link.replace('~', '.') return linkgenerator.loads(payload, max_age=max_age, salt=salt) def generate_activation_token(user_id): new_token = ActivationToken(user_id=user_id, token=generate_uuid()) db.session.add(new_token) return new_token.token def consume_activation_token(user_id, token): found = ( ActivationToken.query .filter_by(user_id=user_id, token=token) .with_for_update() .one_or_none() ) if not found: return False db.session.delete(found) return True def generate_reset_token(user_id): reset_token = generate_uuid() PasswordResetToken.upsert(user_id, reset_token) return reset_token def consume_reset_token(user_id, token): found = ( PasswordResetToken .query .filter_by(user_id=user_id, token=token) .with_for_update() .one_or_none() ) if not found: return False db.session.delete(found) return True def generate_activation_link(user_id): token = generate_activation_token(user_id) payload = {'id': user_id, 'token': token} return dump_link(payload, ACTIVATE_SALT) def generate_reset_link(user_id): token = generate_reset_token(user_id) payload = {'id': user_id, 'token': token} return dump_link(payload, PASSWORD_RESET_SALT) def verify_activation_link(link, max_age=None): max_age = max_age if max_age is not None else MAX_LINK_AGE try: payload = load_link(link, max_age=max_age, salt=ACTIVATE_SALT) if not consume_activation_token(payload['id'], payload['token']): return None return payload except (TypeError, KeyError, ValueError, itsdangerous.BadData): return None def verify_reset_link(link, max_age=None): max_age = max_age if max_age is not None else MAX_LINK_AGE try: payload = load_link(link, max_age=max_age, salt=PASSWORD_RESET_SALT) if not consume_reset_token(payload['id'], payload['token']): return None return payload except (TypeError, KeyError, ValueError, itsdangerous.BadData): return None def reset_password(user, set_unusable=False): if set_unusable: user.password = '' db.session.add(user) link = generate_reset_link(user.id) send_reset_email(user, link)
1
16,867
@akarve this breaks error catching logic on the front end, I'll fix this. That's why I prefer to use some machine-friendly error codes that won't change frequently.
quiltdata-quilt
py
@@ -115,7 +115,7 @@ func parseResource(resource string) (string, string, string) { // is resource type require remote query func requireRemoteQuery(resType string) bool { - return resType == model.ResourceTypeConfigmap || resType == model.ResourceTypeSecret || resType == constants.ResourceTypeService || resType == constants.ResourceTypeEndpoints + return resType == model.ResourceTypeConfigmap || resType == model.ResourceTypeSecret || resType == constants.ResourceTypeEndpoints } func isConnected() bool {
1
package metamanager import ( "encoding/json" "fmt" "strings" "time" "github.com/kubeedge/beehive/pkg/common/config" "github.com/kubeedge/beehive/pkg/common/log" "github.com/kubeedge/beehive/pkg/common/util" "github.com/kubeedge/beehive/pkg/core/context" "github.com/kubeedge/beehive/pkg/core/model" "github.com/kubeedge/kubeedge/common/constants" connect "github.com/kubeedge/kubeedge/edge/pkg/common/cloudconnection" messagepkg "github.com/kubeedge/kubeedge/edge/pkg/common/message" "github.com/kubeedge/kubeedge/edge/pkg/common/modules" "github.com/kubeedge/kubeedge/edge/pkg/metamanager/dao" "k8s.io/api/core/v1" ) //Constants to check metamanager processes const ( OK = "OK" DefaultSyncInterval = 60 GroupResource = "resource" OperationMetaSync = "meta-internal-sync" OperationFunctionAction = "action" OperationFunctionActionResult = "action_result" EdgeFunctionModel = "edgefunction" CloudFunctionModel = "funcmgr" CloudControlerModel = "controller" ) var connected = false // sendModuleGroupName is the name of the group to which we send the message var sendModuleGroupName = modules.HubGroup // sendModuleName is the name of send module for remote query var sendModuleName = "websocket" func init() { var err error groupName, err := config.CONFIG.GetValue("metamanager.context-send-group").ToString() if err == nil && groupName != "" { sendModuleGroupName = groupName } edgeSite, err := config.CONFIG.GetValue("metamanager.edgesite").ToBool() if err == nil && edgeSite == true { connected = true } moduleName, err := config.CONFIG.GetValue("metamanager.context-send-module").ToString() if err == nil && moduleName != "" { sendModuleName = moduleName } } func feedbackError(err error, info string, request model.Message, c *context.Context) { errInfo := "Something wrong" if err != nil { errInfo = fmt.Sprintf(info+": %v", err) } errResponse := model.NewErrorMessage(&request, errInfo).SetRoute(MetaManagerModuleName, request.GetGroup()) if request.GetSource() == modules.EdgedModuleName { send2Edged(errResponse, request.IsSync(), c) } else { send2Cloud(errResponse, c) } } func send2Edged(message *model.Message, sync bool, c *context.Context) { if sync { c.SendResp(*message) } else { c.Send(modules.EdgedModuleName, *message) } } func send2EdgeMesh(message *model.Message, sync bool, c *context.Context) { if sync { c.SendResp(*message) } else { c.Send(modules.EdgeMeshModuleName, *message) } } func send2Cloud(message *model.Message, c *context.Context) { c.Send2Group(sendModuleGroupName, *message) } // Resource format: <namespace>/<restype>[/resid] // return <reskey, restype, resid> func parseResource(resource string) (string, string, string) { tokens := strings.Split(resource, constants.ResourceSep) resType := "" resID := "" switch len(tokens) { case 2: resType = tokens[len(tokens)-1] case 3: resType = tokens[len(tokens)-2] resID = tokens[len(tokens)-1] default: } return resource, resType, resID } // is resource type require remote query func requireRemoteQuery(resType string) bool { return resType == model.ResourceTypeConfigmap || resType == model.ResourceTypeSecret || resType == constants.ResourceTypeService || resType == constants.ResourceTypeEndpoints } func isConnected() bool { return connected } func msgDebugInfo(message *model.Message) string { return fmt.Sprintf("msgID[%s] resource[%s]", message.GetID(), message.GetResource()) } func resourceUnchanged(resType string, resKey string, content []byte) bool { if resType == model.ResourceTypePodStatus { dbRecord, err := dao.QueryMeta("key", resKey) if err == nil && len(*dbRecord) > 0 && string(content) == (*dbRecord)[0] { return true } } return false } func (m *metaManager) processInsert(message model.Message) { var err error var content []byte switch message.GetContent().(type) { case []uint8: content = message.GetContent().([]byte) default: content, err = json.Marshal(message.GetContent()) if err != nil { log.LOGGER.Errorf("marshal update message content failed, %s", msgDebugInfo(&message)) feedbackError(err, "Error to marshal message content", message, m.context) return } } resKey, resType, _ := parseResource(message.GetResource()) meta := &dao.Meta{ Key: resKey, Type: resType, Value: string(content)} err = dao.SaveMeta(meta) if err != nil { log.LOGGER.Errorf("save meta failed, %s: %v", msgDebugInfo(&message), err) feedbackError(err, "Error to save meta to DB", message, m.context) return } if resType == constants.ResourceTypeService || resType == constants.ResourceTypeEndpoints { // Notify edgemesh send2EdgeMesh(&message, false, m.context) } else { // Notify edged send2Edged(&message, false, m.context) } resp := message.NewRespByMessage(&message, OK) send2Cloud(resp, m.context) } func (m *metaManager) processUpdate(message model.Message) { var err error var content []byte switch message.GetContent().(type) { case []uint8: content = message.GetContent().([]byte) default: content, err = json.Marshal(message.GetContent()) if err != nil { log.LOGGER.Errorf("marshal update message content failed, %s", msgDebugInfo(&message)) feedbackError(err, "Error to marshal message content", message, m.context) return } } resKey, resType, _ := parseResource(message.GetResource()) if resType == constants.ResourceTypeServiceList || resType == constants.ResourceTypeEndpointsList || resType == model.ResourceTypePodlist { switch resType { case constants.ResourceTypeEndpointsList: var epsList []v1.Endpoints err = json.Unmarshal(content, &epsList) if err != nil { log.LOGGER.Errorf("Unmarshal update message content failed, %s", msgDebugInfo(&message)) feedbackError(err, "Error to unmarshal", message, m.context) return } for _, eps := range epsList { data, err := json.Marshal(eps) if err != nil { log.LOGGER.Errorf("Marshal endpoints content failed, %v", eps) continue } meta := &dao.Meta{ Key: fmt.Sprintf("%s/%s/%s", eps.Namespace, constants.ResourceTypeEndpoints, eps.Name), Type: constants.ResourceTypeEndpoints, Value: string(data)} err = dao.InsertOrUpdate(meta) if err != nil { log.LOGGER.Errorf("Update meta failed, %v", eps) continue } } send2EdgeMesh(&message, false, m.context) resp := message.NewRespByMessage(&message, OK) send2Cloud(resp, m.context) return case constants.ResourceTypeServiceList: var svcList []v1.Service err = json.Unmarshal(content, &svcList) if err != nil { log.LOGGER.Errorf("Unmarshal update message content failed, %s", msgDebugInfo(&message)) feedbackError(err, "Error to unmarshal", message, m.context) return } for _, svc := range svcList { data, err := json.Marshal(svc) if err != nil { log.LOGGER.Errorf("Marshal service content failed, %v", svc) continue } meta := &dao.Meta{ Key: fmt.Sprintf("%s/%s/%s", svc.Namespace, constants.ResourceTypeService, svc.Name), Type: constants.ResourceTypeService, Value: string(data)} err = dao.InsertOrUpdate(meta) if err != nil { log.LOGGER.Errorf("Update meta failed, %v", svc) continue } } send2EdgeMesh(&message, false, m.context) resp := message.NewRespByMessage(&message, OK) send2Cloud(resp, m.context) return case model.ResourceTypePodlist: meta := &dao.Meta{ Key: resKey, Type: resType, Value: string(content)} err = dao.InsertOrUpdate(meta) if err != nil { log.LOGGER.Errorf("Update meta failed, %s", msgDebugInfo(&message)) feedbackError(err, "Error to update meta to DB", message, m.context) return } send2EdgeMesh(&message, false, m.context) resp := message.NewRespByMessage(&message, OK) send2Cloud(resp, m.context) return default: log.LOGGER.Warnf("Resource type %s unknown", resType) return } } if resourceUnchanged(resType, resKey, content) { resp := message.NewRespByMessage(&message, OK) send2Edged(resp, message.IsSync(), m.context) log.LOGGER.Infof("resouce[%s] unchanged, no notice", resKey) return } meta := &dao.Meta{ Key: resKey, Type: resType, Value: string(content)} err = dao.InsertOrUpdate(meta) if err != nil { log.LOGGER.Errorf("update meta failed, %s", msgDebugInfo(&message)) feedbackError(err, "Error to update meta to DB", message, m.context) return } switch message.GetSource() { //case core.EdgedModuleName: case modules.EdgedModuleName: send2Cloud(&message, m.context) resp := message.NewRespByMessage(&message, OK) send2Edged(resp, message.IsSync(), m.context) case CloudControlerModel: if resType == constants.ResourceTypeService || resType == constants.ResourceTypeEndpoints { send2EdgeMesh(&message, message.IsSync(), m.context) } else { send2Edged(&message, message.IsSync(), m.context) } resp := message.NewRespByMessage(&message, OK) send2Cloud(resp, m.context) case CloudFunctionModel: m.context.Send(EdgeFunctionModel, message) case EdgeFunctionModel: send2Cloud(&message, m.context) } } func (m *metaManager) processResponse(message model.Message) { var err error var content []byte switch message.GetContent().(type) { case []uint8: content = message.GetContent().([]byte) default: content, err = json.Marshal(message.GetContent()) if err != nil { log.LOGGER.Errorf("marshal response message content failed, %s", msgDebugInfo(&message)) feedbackError(err, "Error to marshal message content", message, m.context) return } } resKey, resType, _ := parseResource(message.GetResource()) meta := &dao.Meta{ Key: resKey, Type: resType, Value: string(content)} err = dao.InsertOrUpdate(meta) if err != nil { log.LOGGER.Errorf("update meta failed, %s", msgDebugInfo(&message)) feedbackError(err, "Error to update meta to DB", message, m.context) return } // Notify edged or edgemesh if the data if coming from cloud if message.GetSource() == CloudControlerModel { if resType == constants.ResourceTypeService || resType == constants.ResourceTypeEndpoints { send2EdgeMesh(&message, message.IsSync(), m.context) } else { send2Edged(&message, message.IsSync(), m.context) } } else { // Send to cloud if the update request is coming from edged send2Cloud(&message, m.context) } } func (m *metaManager) processDelete(message model.Message) { err := dao.DeleteMetaByKey(message.GetResource()) if err != nil { log.LOGGER.Errorf("delete meta failed, %s", msgDebugInfo(&message)) feedbackError(err, "Error to delete meta to DB", message, m.context) return } _, resType, _ := parseResource(message.GetResource()) if resType == constants.ResourceTypeService || resType == constants.ResourceTypeEndpoints { // Notify edgemesh send2EdgeMesh(&message, false, m.context) resp := message.NewRespByMessage(&message, OK) send2Cloud(resp, m.context) return } // Notify edged send2Edged(&message, false, m.context) resp := message.NewRespByMessage(&message, OK) send2Cloud(resp, m.context) } func (m *metaManager) processQuery(message model.Message) { resKey, resType, resID := parseResource(message.GetResource()) var metas *[]string var err error if requireRemoteQuery(resType) && isConnected() { metas, err = dao.QueryMeta("key", resKey) if err != nil || len(*metas) == 0 { m.processRemoteQuery(message) } else { resp := message.NewRespByMessage(&message, *metas) resp.SetRoute(MetaManagerModuleName, resp.GetGroup()) send2Edged(resp, message.IsSync(), m.context) } return } if resID == "" { // Get specific type resources metas, err = dao.QueryMeta("type", resType) } else { metas, err = dao.QueryMeta("key", resKey) } if err != nil { log.LOGGER.Errorf("query meta failed, %s", msgDebugInfo(&message)) feedbackError(err, "Error to query meta in DB", message, m.context) } else { resp := message.NewRespByMessage(&message, *metas) resp.SetRoute(MetaManagerModuleName, resp.GetGroup()) if resType == constants.ResourceTypeService || resType == constants.ResourceTypeEndpoints || resType == model.ResourceTypePodlist { send2EdgeMesh(resp, message.IsSync(), m.context) } else { send2Edged(resp, message.IsSync(), m.context) } } } func (m *metaManager) processRemoteQuery(message model.Message) { go func() { // TODO: retry originalID := message.GetID() message.UpdateID() resp, err := m.context.SendSync(sendModuleName, message, 60*time.Second) // TODO: configurable log.LOGGER.Infof("########## process get: req[%+v], resp[%+v], err[%+v]", message, resp, err) if err != nil { log.LOGGER.Errorf("remote query failed: %v", err) feedbackError(err, "Error to query meta in DB", message, m.context) return } var content []byte switch resp.GetContent().(type) { case []uint8: content = resp.GetContent().([]byte) default: content, err = json.Marshal(resp.GetContent()) if err != nil { log.LOGGER.Errorf("marshal remote query response content failed, %s", msgDebugInfo(&resp)) feedbackError(err, "Error to marshal message content", message, m.context) return } } resKey, resType, _ := parseResource(message.GetResource()) meta := &dao.Meta{ Key: resKey, Type: resType, Value: string(content)} err = dao.InsertOrUpdate(meta) if err != nil { log.LOGGER.Errorf("update meta failed, %s", msgDebugInfo(&resp)) } resp.BuildHeader(resp.GetID(), originalID, resp.GetTimestamp()) if resType == constants.ResourceTypeService || resType == constants.ResourceTypeEndpoints { send2EdgeMesh(&resp, message.IsSync(), m.context) } else { send2Edged(&resp, message.IsSync(), m.context) } }() } func (m *metaManager) processNodeConnection(message model.Message) { content, _ := message.GetContent().(string) log.LOGGER.Infof("node connection event occur: %s", content) if content == connect.CloudConnected { connected = true } else if content == connect.CloudDisconnected { connected = false } } func (m *metaManager) processSync(message model.Message) { m.syncPodStatus() } func (m *metaManager) syncPodStatus() { log.LOGGER.Infof("start to sync pod status") podStatusRecords, err := dao.QueryAllMeta("type", model.ResourceTypePodStatus) if err != nil { log.LOGGER.Errorf("list pod status failed: %v", err) return } if len(*podStatusRecords) <= 0 { log.LOGGER.Infof("list pod status, no record, skip sync") return } var namespace string content := make([]interface{}, 0, len(*podStatusRecords)) for _, v := range *podStatusRecords { if namespace == "" { namespace, _, _, _ = util.ParseResourceEdge(v.Key, model.QueryOperation) } podKey := strings.Replace(v.Key, constants.ResourceSep+model.ResourceTypePodStatus+constants.ResourceSep, constants.ResourceSep+model.ResourceTypePod+constants.ResourceSep, 1) podRecord, err := dao.QueryMeta("key", podKey) if err != nil { log.LOGGER.Errorf("query pod[%s] failed: %v", podKey, err) return } if len(*podRecord) <= 0 { // pod already deleted, clear the corresponding podstatus record err = dao.DeleteMetaByKey(v.Key) log.LOGGER.Infof("pod[%s] already deleted, clear podstatus record, result:%v", podKey, err) continue } var podStatus interface{} err = json.Unmarshal([]byte(v.Value), &podStatus) if err != nil { log.LOGGER.Errorf("unmarshal podstatus[%s] failed, content[%s]: %v", v.Key, v.Value, err) continue } content = append(content, podStatus) } msg := model.NewMessage("").BuildRouter(MetaManagerModuleName, GroupResource, namespace+constants.ResourceSep+model.ResourceTypePodStatus, model.UpdateOperation).FillBody(content) send2Cloud(msg, m.context) log.LOGGER.Infof("sync pod status successful, %s", msgDebugInfo(msg)) } func (m *metaManager) processFunctionAction(message model.Message) { var err error var content []byte switch message.GetContent().(type) { case []uint8: content = message.GetContent().([]byte) default: content, err = json.Marshal(message.GetContent()) if err != nil { log.LOGGER.Errorf("marshal save message content failed, %s: %v", msgDebugInfo(&message), err) feedbackError(err, "Error to marshal message content", message, m.context) return } } resKey, resType, _ := parseResource(message.GetResource()) meta := &dao.Meta{ Key: resKey, Type: resType, Value: string(content)} err = dao.SaveMeta(meta) if err != nil { log.LOGGER.Errorf("save meta failed, %s: %v", msgDebugInfo(&message), err) feedbackError(err, "Error to save meta to DB", message, m.context) return } m.context.Send(EdgeFunctionModel, message) } func (m *metaManager) processFunctionActionResult(message model.Message) { var err error var content []byte switch message.GetContent().(type) { case []uint8: content = message.GetContent().([]byte) default: content, err = json.Marshal(message.GetContent()) if err != nil { log.LOGGER.Errorf("marshal save message content failed, %s: %v", msgDebugInfo(&message), err) feedbackError(err, "Error to marshal message content", message, m.context) return } } resKey, resType, _ := parseResource(message.GetResource()) meta := &dao.Meta{ Key: resKey, Type: resType, Value: string(content)} err = dao.SaveMeta(meta) if err != nil { log.LOGGER.Errorf("save meta failed, %s: %v", msgDebugInfo(&message), err) feedbackError(err, "Error to save meta to DB", message, m.context) return } send2Cloud(&message, m.context) } func (m *metaManager) process(message model.Message) { resource := message.GetOperation() switch resource { case model.InsertOperation: m.processInsert(message) case model.UpdateOperation: m.processUpdate(message) case model.DeleteOperation: m.processDelete(message) case model.QueryOperation: m.processQuery(message) case model.ResponseOperation: m.processResponse(message) case messagepkg.OperationNodeConnection: m.processNodeConnection(message) case OperationMetaSync: m.processSync(message) case OperationFunctionAction: m.processFunctionAction(message) case OperationFunctionActionResult: m.processFunctionActionResult(message) } } func (m *metaManager) mainLoop() { go func() { for { if msg, err := m.context.Receive(m.Name()); err == nil { log.LOGGER.Infof("get a message %+v", msg) m.process(msg) } else { log.LOGGER.Errorf("get a message %+v: %v", msg, err) } } }() }
1
12,548
We support process remoteQuery for Service. Why are we removing it ?
kubeedge-kubeedge
go
@@ -6,10 +6,17 @@ require 'bolt/task' require 'json' class TransportAPI < Sinatra::Base + # This disables Sinatra's error page generation + set :show_exceptions, false + get '/' do 200 end + get '/500_error' do + raise 'Unexpected error' + end + post '/ssh/run_task' do content_type :json
1
# frozen_string_literal: true require 'sinatra' require 'bolt' require 'bolt/task' require 'json' class TransportAPI < Sinatra::Base get '/' do 200 end post '/ssh/run_task' do content_type :json body = JSON.parse(request.body.read) keys = %w[user password port ssh-key-content connect-timeout run-as-command run-as tmpdir host-key-check known-hosts-content private-key-content sudo-password] opts = body['target'].select { |k, _| keys.include? k } if opts['private-key-content'] && opts['password'] return [400, "Only include one of 'password' and 'private-key-content'"] end if opts['private-key-content'] opts['private-key'] = { 'key-data' => opts['private-key-content'] } opts.delete('private-key-content') end target = [Bolt::Target.new(body['target']['hostname'], opts)] task = Bolt::Task.new(body['task']) parameters = body['parameters'] || {} executor = Bolt::Executor.new(load_config: false) # Since this will only be on one node we can just return the first result results = executor.run_task(target, task, parameters) [200, results.first.to_json] end post '/winrm/run_task' do content_type :json body = JSON.parse(request.body.read) keys = %w[user password port connect-timeout ssl ssl-verify tmpdir cacert extensions] opts = body['target'].select { |k, _| keys.include? k } opts['protocol'] = 'winrm' target = [Bolt::Target.new(body['target']['hostname'], opts)] task = Bolt::Task.new(body['task']) parameters = body['parameters'] || {} executor = Bolt::Executor.new(load_config: false) # Since this will only be on one node we can just return the first result results = executor.run_task(target, task, parameters) [200, results.first.to_json] end end
1
9,405
This is specifically for testing?
puppetlabs-bolt
rb
@@ -71,7 +71,6 @@ class TagsController extends AbstractAdmin { $view = $this->createViewModel(); $view->setTemplate('admin/tags/home'); - $view->statistics = $this->getTable('resourcetags')->getStatistics(true); return $view; }
1
<?php /** * Admin Tag Controller * * PHP version 5 * * Copyright (C) Villanova University 2010. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * @category VuFind * @package Controller * @author Demian Katz <[email protected]> * @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License * @link https://vufind.org Main Site */ namespace VuFindAdmin\Controller; /** * Class controls distribution of tags and resource tags. * * @category VuFind * @package Controller * @author Demian Katz <[email protected]> * @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License * @link https://vufind.org Main Site */ class TagsController extends AbstractAdmin { /** * Params * * @var array */ protected $params; /** * Get the url parameters * * @param string $param A key to check the url params for * * @return string */ protected function getParam($param) { return (isset($this->params[$param])) ? $this->params[$param] : $this->params()->fromPost( $param, $this->params()->fromQuery($param, null) ); } /** * Tag Details * * @return \Zend\View\Model\ViewModel */ public function homeAction() { $view = $this->createViewModel(); $view->setTemplate('admin/tags/home'); $view->statistics = $this->getTable('resourcetags')->getStatistics(true); return $view; } /** * Manage Tags * * @return \Zend\View\Model\ViewModel */ public function manageAction() { $this->params = $this->params()->fromQuery(); $view = $this->createViewModel(); $view->setTemplate('admin/tags/manage'); $view->type = !is_null($this->params()->fromPost('type', null)) ? $this->params()->fromPost('type') : $this->params()->fromQuery('type', null); $view->uniqueTags = $this->getUniqueTags()->toArray(); $view->uniqueUsers = $this->getUniqueUsers()->toArray(); $view->uniqueResources = $this->getUniqueResources()->toArray(); $view->params = $this->params; return $view; } /** * List Tags * * @return \Zend\View\Model\ViewModel */ public function listAction() { $this->params = $this->params()->fromQuery(); $view = $this->createViewModel(); $view->setTemplate('admin/tags/list'); $view->uniqueTags = $this->getUniqueTags()->toArray(); $view->uniqueUsers = $this->getUniqueUsers()->toArray(); $view->uniqueResources = $this->getUniqueResources()->toArray(); $view->results = $this->getResourceTags(); $view->params = $this->params; return $view; } /** * Delete Tags * * @return \Zend\View\Model\ViewModel */ public function deleteAction() { $this->params = $this->params()->fromPost(); $tags = $this->getTable('ResourceTags'); $origin = $this->params() ->fromPost('origin', $this->params()->fromQuery('origin')); $action = ("list" == $origin) ? 'List' : 'Manage'; $originUrl = $this->url() ->fromRoute('admin/tags', ['action' => $action]); if ($action == 'List') { $originUrl .= '?' . http_build_query( [ 'user_id' => $this->getParam('user_id'), 'resource_id' => $this->getParam('resource_id'), 'tag_id' => $this->getParam('tag_id'), ] ); } $newUrl = $this->url()->fromRoute('admin/tags', ['action' => 'Delete']); $confirm = $this->params()->fromPost('confirm', false); // Delete All if ("manage" == $origin || !is_null($this->getRequest()->getPost('deleteFilter')) || !is_null($this->getRequest()->getQuery('deleteFilter')) ) { if (false === $confirm) { return $this->confirmTagsDeleteByFilter($tags, $originUrl, $newUrl); } $delete = $this->deleteResourceTagsByFilter(); } else { // Delete by ID // Fail if we have nothing to delete: $ids = is_null($this->getRequest()->getPost('deletePage')) ? $this->params()->fromPost('ids') : $this->params()->fromPost('idsAll'); if (!is_array($ids) || empty($ids)) { $this->flashMessenger()->addMessage('bulk_noitems_advice', 'error'); return $this->redirect()->toUrl($originUrl); } if (false === $confirm) { return $this->confirmTagsDelete($ids, $originUrl, $newUrl); } $delete = $tags->deleteByIdArray($ids); } if (0 == $delete) { $this->flashMessenger()->addMessage('tags_delete_fail', 'error'); return $this->redirect()->toUrl($originUrl); } $this->flashMessenger()->addMessage( [ 'msg' => 'tags_deleted', 'tokens' => ['%count%' => $delete] ], 'success' ); return $this->redirect()->toUrl($originUrl); } /** * Get confirmation messages. * * @param int $count Count of tags that are about to be deleted * * @return array */ protected function getConfirmDeleteMessages($count) { $messages = []; $user = $this->getTable('user') ->select(['id' => $this->getParam('user_id')]) ->current(); $userMsg = (false !== $user) ? $user->username . " (" . $user->id . ")" : "All"; $tag = $this->getTable('tags') ->select(['id' => $this->getParam('tag_id')]) ->current(); $tagMsg = (false !== $tag) ? $tag->tag . " (" . $tag->id . ")" : " All"; $resource = $this->getTable('resource') ->select(['id' => $this->getParam('resource_id')]) ->current(); $resourceMsg = (false !== $resource) ? $resource->title . " (" . $resource->id . ")" : " All"; $messages[] = [ 'msg' => 'tag_delete_warning', 'tokens' => ['%count%' => $count] ]; if (false !== $user || false !== $tag || false !== $resource) { $messages[] = [ 'msg' => 'tag_delete_filter', 'tokens' => [ '%username%' => $userMsg, '%tag%' => $tagMsg, '%resource%' => $resourceMsg ] ]; } $messages[] = ['msg' => 'confirm_delete']; return $messages; } /** * Confirm Delete by Id * * @param array $ids A list of resource tag Ids * @param string $originUrl An origin url * @param string $newUrl The url of the desired action * * @return mixed */ protected function confirmTagsDelete($ids, $originUrl, $newUrl) { $count = count($ids); $data = [ 'data' => [ 'confirm' => $newUrl, 'cancel' => $originUrl, 'title' => "confirm_delete_tags_brief", 'messages' => $this->getConfirmDeleteMessages($count), 'ids' => $ids, 'extras' => [ 'origin' => 'list', 'user_id' => $this->getParam('user_id'), 'tag_id' => $this->getParam('tag_id'), 'resource_id' => $this->getParam('resource_id'), 'ids' => $ids ] ] ]; return $this->forwardTo('Confirm', 'Confirm', $data); } /** * Confirm Tag Delete by Filter * * @param object $tagModel A Tag object * @param string $originUrl An origin url * @param string $newUrl The url of the desired action * * @return mixed */ protected function confirmTagsDeleteByFilter($tagModel, $originUrl, $newUrl) { $count = $tagModel->getResourceTags( $this->convertFilter($this->getParam('user_id')), $this->convertFilter($this->getParam('resource_id')), $this->convertFilter($this->getParam('tag_id')) )->getTotalItemCount(); $data = [ 'data' => [ 'confirm' => $newUrl, 'cancel' => $originUrl, 'title' => "confirm_delete_tags_brief", 'messages' => $this->getConfirmDeleteMessages($count), 'extras' => [ 'origin' => 'manage', 'type' => $this->getParam('type'), 'user_id' => $this->getParam('user_id'), 'tag_id' => $this->getParam('tag_id'), 'resource_id' => $this->getParam('resource_id'), 'deleteFilter' => $this->getParam('deleteFilter') ] ] ]; return $this->forwardTo('Confirm', 'Confirm', $data); } /** * Gets a list of unique resources based on the url params * * @return \Zend\Db\ResultSet */ protected function getUniqueResources() { return $this->getTable('ResourceTags')->getUniqueResources( $this->convertFilter($this->getParam('user_id')), $this->convertFilter($this->getParam('resource_id')), $this->convertFilter($this->getParam('tag_id')) ); } /** * Gets a list of unique tags based on the url params * * @return \Zend\Db\ResultSet */ protected function getUniqueTags() { return $this->getTable('ResourceTags')->getUniqueTags( $this->convertFilter($this->getParam('user_id')), $this->convertFilter($this->getParam('resource_id')), $this->convertFilter($this->getParam('tag_id')) ); } /** * Gets a list of unique users based on the url params * * @return \Zend\Db\ResultSet */ protected function getUniqueUsers() { return $this->getTable('ResourceTags')->getUniqueUsers( $this->convertFilter($this->getParam('user_id')), $this->convertFilter($this->getParam('resource_id')), $this->convertFilter($this->getParam('tag_id')) ); } /** * Converts empty params and "ALL" to null * * @param string $value A parameter to check * * @return string|null A modified parameter */ protected function convertFilter($value) { return ("ALL" !== $value && "" !== $value && null !== $value) ? $value : null; } /** * Get and set a list of resource tags * * @return \Zend\Paginator\Paginator */ protected function getResourceTags() { $currentPage = isset($this->params['page']) ? $this->params['page'] : "1"; $resourceTags = $this->getTable('ResourceTags'); $tags = $resourceTags->getResourceTags( $this->convertFilter($this->getParam('user_id')), $this->convertFilter($this->getParam('resource_id')), $this->convertFilter($this->getParam('tag_id')), $this->getParam('order'), $currentPage ); return $tags; } /** * Delete tags based on filter settings. * * @return int Number of IDs deleted */ protected function deleteResourceTagsByFilter() { $tags = $this->getResourceTags(); $ids = []; foreach ($tags as $tag) { $ids[] = $tag->id; } return $this->getTable('ResourceTags')->deleteByIdArray($ids); } }
1
24,899
I think this is related to the social stats and probably does not need to be removed.
vufind-org-vufind
php
@@ -13,6 +13,7 @@ import { PhysicalIndexToValueMap as IndexToValueMap } from './../../translations const privatePool = new WeakMap(); const COLUMN_SIZE_MAP_NAME = 'autoColumnSize'; +/* eslint-disable jsdoc/require-description-complete-sentence */ /** * @plugin AutoColumnSize *
1
import BasePlugin from './../_base'; import { arrayEach, arrayFilter, arrayReduce, arrayMap } from './../../helpers/array'; import { cancelAnimationFrame, requestAnimationFrame } from './../../helpers/feature'; import GhostTable from './../../utils/ghostTable'; import { isObject, hasOwnProperty } from './../../helpers/object'; import { valueAccordingPercent, rangeEach } from './../../helpers/number'; import { registerPlugin } from './../../plugins'; import SamplesGenerator from './../../utils/samplesGenerator'; import { isPercentValue } from './../../helpers/string'; import { ViewportColumnsCalculator } from './../../3rdparty/walkontable/src'; import { PhysicalIndexToValueMap as IndexToValueMap } from './../../translations'; const privatePool = new WeakMap(); const COLUMN_SIZE_MAP_NAME = 'autoColumnSize'; /** * @plugin AutoColumnSize * * @description * This plugin allows to set column widths based on their widest cells. * * By default, the plugin is declared as `undefined`, which makes it enabled (same as if it was declared as `true`). * Enabling this plugin may decrease the overall table performance, as it needs to calculate the widths of all cells to * resize the columns accordingly. * If you experience problems with the performance, try turning this feature off and declaring the column widths manually. * * Column width calculations are divided into sync and async part. Each of this parts has their own advantages and * disadvantages. Synchronous calculations are faster but they block the browser UI, while the slower asynchronous * operations don't block the browser UI. * * To configure the sync/async distribution, you can pass an absolute value (number of columns) or a percentage value to a config object: * ```js * // as a number (300 columns in sync, rest async) * autoColumnSize: {syncLimit: 300},. * * // as a string (percent) * autoColumnSize: {syncLimit: '40%'}, * ```. * * To configure this plugin see {@link Options#autoColumnSize}. * * @example * ```js * const hot = new Handsontable(document.getElementById('example'), { * data: getData(), * autoColumnSize: true * }); * // Access to plugin instance: * const plugin = hot.getPlugin('autoColumnSize'); * * plugin.getColumnWidth(4); * * if (plugin.isEnabled()) { * // code... * } * ``` */ class AutoColumnSize extends BasePlugin { static get CALCULATION_STEP() { return 50; } static get SYNC_CALCULATION_LIMIT() { return 50; } constructor(hotInstance) { super(hotInstance); privatePool.set(this, { /** * Cached column header names. It is used to diff current column headers with previous state and detect which * columns width should be updated. * * @private * @type {Array} */ cachedColumnHeaders: [], }); /** * Instance of {@link GhostTable} for rows and columns size calculations. * * @private * @type {GhostTable} */ this.ghostTable = new GhostTable(this.hot); /** * Instance of {@link SamplesGenerator} for generating samples necessary for columns width calculations. * * @private * @type {SamplesGenerator} */ this.samplesGenerator = new SamplesGenerator((row, column) => { const cellMeta = this.hot.getCellMeta(row, column); let cellValue = ''; if (!cellMeta.spanned) { cellValue = this.hot.getDataAtCell(row, column); } let bundleCountSeed = 0; if (cellMeta.label) { const { value: labelValue, property: labelProperty } = cellMeta.label; let labelText = ''; if (labelValue) { labelText = typeof labelValue === 'function' ? labelValue(row, column, this.hot.colToProp(column), cellValue) : labelValue; } else if (labelProperty) { const labelData = this.hot.getDataAtRowProp(row, labelProperty); labelText = labelData !== null ? labelData : ''; } bundleCountSeed = labelText.length; } return { value: cellValue, bundleCountSeed }; }); /** * `true` only if the first calculation was performed. * * @private * @type {boolean} */ this.firstCalculation = true; /** * `true` if the size calculation is in progress. * * @type {boolean} */ this.inProgress = false; /** * Number of already measured columns (we already know their sizes). * * @type {number} */ this.measuredColumns = 0; /** * PhysicalIndexToValueMap to keep and track widths for physical column indexes. * * @private * @type {PhysicalIndexToValueMap} */ this.columnWidthsMap = new IndexToValueMap(); this.hot.columnIndexMapper.registerMap(COLUMN_SIZE_MAP_NAME, this.columnWidthsMap); // Leave the listener active to allow auto-sizing the columns when the plugin is disabled. // This is necesseary for width recalculation for resize handler doubleclick (ManualColumnResize). this.addHook('beforeColumnResize', (size, column, isDblClick) => this.onBeforeColumnResize(size, column, isDblClick)); } /** * Checks if the plugin is enabled in the handsontable settings. This method is executed in {@link Hooks#beforeInit} * hook and if it returns `true` than the {@link AutoColumnSize#enablePlugin} method is called. * * @returns {boolean} */ isEnabled() { return this.hot.getSettings().autoColumnSize !== false && !this.hot.getSettings().colWidths; } /** * Enables the plugin functionality for this Handsontable instance. */ enablePlugin() { if (this.enabled) { return; } const setting = this.hot.getSettings().autoColumnSize; if (setting && setting.useHeaders !== null && setting.useHeaders !== void 0) { this.ghostTable.setSetting('useHeaders', setting.useHeaders); } this.setSamplingOptions(); this.addHook('afterLoadData', () => this.onAfterLoadData()); this.addHook('beforeChange', changes => this.onBeforeChange(changes)); this.addHook('beforeRender', force => this.onBeforeRender(force)); this.addHook('modifyColWidth', (width, col) => this.getColumnWidth(col, width)); this.addHook('afterInit', () => this.onAfterInit()); super.enablePlugin(); } /** * Updates the plugin state. This method is executed when {@link Core#updateSettings} is invoked. */ updatePlugin() { const changedColumns = this.findColumnsWhereHeaderWasChanged(); if (changedColumns.length) { this.clearCache(changedColumns); this.calculateVisibleColumnsWidth(); } super.updatePlugin(); } /** * Disables the plugin functionality for this Handsontable instance. */ disablePlugin() { super.disablePlugin(); // Leave the listener active to allow auto-sizing the columns when the plugin is disabled. // This is necesseary for width recalculation for resize handler doubleclick (ManualColumnResize). this.addHook('beforeColumnResize', (size, column, isDblClick) => this.onBeforeColumnResize(size, column, isDblClick)); } /** * Calculates visible columns width. */ calculateVisibleColumnsWidth() { const rowsCount = this.hot.countRows(); // Keep last column widths unchanged for situation when all rows was deleted or trimmed (pro #6) if (!rowsCount) { return; } const force = this.hot.renderCall; const firstVisibleColumn = this.getFirstVisibleColumn(); const lastVisibleColumn = this.getLastVisibleColumn(); if (firstVisibleColumn === -1 || lastVisibleColumn === -1) { return; } this.calculateColumnsWidth({ from: firstVisibleColumn, to: lastVisibleColumn }, void 0, force); } /** * Calculates a columns width. * * @param {number|object} colRange Visual column index or an object with `from` and `to` visual indexes as a range. * @param {number|object} rowRange Visual row index or an object with `from` and `to` visual indexes as a range. * @param {boolean} [force=false] If `true` the calculation will be processed regardless of whether the width exists in the cache. */ calculateColumnsWidth(colRange = { from: 0, to: this.hot.countCols() - 1 }, rowRange = { from: 0, to: this.hot.countRows() - 1 }, force = false) { // eslint-disable-line max-len const columnsRange = typeof colRange === 'number' ? { from: colRange, to: colRange } : colRange; const rowsRange = typeof rowRange === 'number' ? { from: rowRange, to: rowRange } : rowRange; rangeEach(columnsRange.from, columnsRange.to, (visualColumn) => { let physicalColumn = this.hot.toPhysicalColumn(visualColumn); if (physicalColumn === null) { physicalColumn = visualColumn; } if (force || (this.columnWidthsMap.getValueAtIndex(physicalColumn) === null && !this.hot._getColWidthFromSettings(physicalColumn))) { const samples = this.samplesGenerator.generateColumnSamples(visualColumn, rowsRange); arrayEach(samples, ([column, sample]) => this.ghostTable.addColumn(column, sample)); } }); if (this.ghostTable.columns.length) { this.hot.batch(() => { this.ghostTable.getWidths((visualColumn, width) => { const physicalColumn = this.hot.toPhysicalColumn(visualColumn); this.columnWidthsMap.setValueAtIndex(physicalColumn, width); }); }); this.measuredColumns = columnsRange.to + 1; this.ghostTable.clean(); } } /** * Calculates all columns width. The calculated column will be cached in the {@link AutoColumnSize#widths} property. * To retrieve width for specified column use {@link AutoColumnSize#getColumnWidth} method. * * @param {object|number} rowRange Row index or an object with `from` and `to` properties which define row range. */ calculateAllColumnsWidth(rowRange = { from: 0, to: this.hot.countRows() - 1 }) { let current = 0; const length = this.hot.countCols() - 1; let timer = null; this.inProgress = true; const loop = () => { // When hot was destroyed after calculating finished cancel frame if (!this.hot) { cancelAnimationFrame(timer); this.inProgress = false; return; } this.calculateColumnsWidth({ from: current, to: Math.min(current + AutoColumnSize.CALCULATION_STEP, length) }, rowRange); current = current + AutoColumnSize.CALCULATION_STEP + 1; if (current < length) { timer = requestAnimationFrame(loop); } else { cancelAnimationFrame(timer); this.inProgress = false; // @TODO Should call once per render cycle, currently fired separately in different plugins this.hot.view.wt.wtOverlays.adjustElementsSize(); } }; const syncLimit = this.getSyncCalculationLimit(); // sync if (this.firstCalculation && syncLimit >= 0) { this.calculateColumnsWidth({ from: 0, to: syncLimit }, rowRange); this.firstCalculation = false; current = syncLimit + 1; } // async if (current < length) { loop(); } else { this.inProgress = false; } } /** * Sets the sampling options. * * @private */ setSamplingOptions() { const setting = this.hot.getSettings().autoColumnSize; const samplingRatio = setting && hasOwnProperty(setting, 'samplingRatio') ? this.hot.getSettings().autoColumnSize.samplingRatio : void 0; const allowSampleDuplicates = setting && hasOwnProperty(setting, 'allowSampleDuplicates') ? this.hot.getSettings().autoColumnSize.allowSampleDuplicates : void 0; if (samplingRatio && !isNaN(samplingRatio)) { this.samplesGenerator.setSampleCount(parseInt(samplingRatio, 10)); } if (allowSampleDuplicates) { this.samplesGenerator.setAllowDuplicates(allowSampleDuplicates); } } /** * Recalculates all columns width (overwrite cache values). */ recalculateAllColumnsWidth() { if (this.hot.view && this.hot.view.wt.wtTable.isVisible()) { this.clearCache(); this.calculateAllColumnsWidth(); } } /** * Gets value which tells how many columns should be calculated synchronously (rest of the columns will be calculated * asynchronously). The limit is calculated based on `syncLimit` set to `autoColumnSize` option (see {@link Options#autoColumnSize}). * * @returns {number} */ getSyncCalculationLimit() { /* eslint-disable no-bitwise */ let limit = AutoColumnSize.SYNC_CALCULATION_LIMIT; const colsLimit = this.hot.countCols() - 1; if (isObject(this.hot.getSettings().autoColumnSize)) { limit = this.hot.getSettings().autoColumnSize.syncLimit; if (isPercentValue(limit)) { limit = valueAccordingPercent(colsLimit, limit); } else { // Force to Number limit >>= 0; } } return Math.min(limit, colsLimit); } /** * Gets the calculated column width. * * @param {number} column Visual column index. * @param {number} [defaultWidth] Default column width. It will be picked up if no calculated width found. * @param {boolean} [keepMinimum=true] If `true` then returned value won't be smaller then 50 (default column width). * @returns {number} */ getColumnWidth(column, defaultWidth = void 0, keepMinimum = true) { let width = defaultWidth; if (width === void 0) { width = this.columnWidthsMap.getValueAtIndex(this.hot.toPhysicalColumn(column)); if (keepMinimum && typeof width === 'number') { width = Math.max(width, ViewportColumnsCalculator.DEFAULT_WIDTH); } } return width; } /** * Gets the first visible column. * * @returns {number} Returns visual column index, -1 if table is not rendered or if there are no columns to base the the calculations on. */ getFirstVisibleColumn() { const wot = this.hot.view.wt; if (wot.wtViewport.columnsVisibleCalculator) { // Fist fully visible column is stored as renderable index. const firstFullyVisibleColumn = wot.wtTable.getFirstVisibleColumn(); if (firstFullyVisibleColumn !== -1) { return this.hot.columnIndexMapper.getVisualFromRenderableIndex(firstFullyVisibleColumn); } } if (wot.wtViewport.columnsRenderCalculator) { const firstRenderedColumn = wot.wtTable.getFirstRenderedColumn(); // There are no rendered column. if (firstRenderedColumn !== -1) { return this.hot.columnIndexMapper.getVisualFromRenderableIndex(firstRenderedColumn); } } return -1; } /** * Gets the last visible column. * * @returns {number} Returns visual column index or -1 if table is not rendered. */ getLastVisibleColumn() { const wot = this.hot.view.wt; if (wot.wtViewport.columnsVisibleCalculator) { // Last fully visible column is stored as renderable index. const lastFullyVisibleColumn = wot.wtTable.getLastVisibleColumn(); if (lastFullyVisibleColumn !== -1) { return this.hot.columnIndexMapper.getVisualFromRenderableIndex(lastFullyVisibleColumn); } } if (wot.wtViewport.columnsRenderCalculator) { // Last fully visible column is stored as renderable index. const lastRenderedColumn = wot.wtTable.getLastRenderedColumn(); // There are no rendered columns. if (lastRenderedColumn !== -1) { return this.hot.columnIndexMapper.getVisualFromRenderableIndex(lastRenderedColumn); } } return -1; } /** * Collects all columns which titles has been changed in comparison to the previous state. * * @private * @returns {Array} It returns an array of physical column indexes. */ findColumnsWhereHeaderWasChanged() { const columnHeaders = this.hot.getColHeader(); const { cachedColumnHeaders } = privatePool.get(this); const changedColumns = arrayReduce(columnHeaders, (acc, columnTitle, physicalColumn) => { const cachedColumnsLength = cachedColumnHeaders.length; if (cachedColumnsLength - 1 < physicalColumn || cachedColumnHeaders[physicalColumn] !== columnTitle) { acc.push(physicalColumn); } if (cachedColumnsLength - 1 < physicalColumn) { cachedColumnHeaders.push(columnTitle); } else { cachedColumnHeaders[physicalColumn] = columnTitle; } return acc; }, []); return changedColumns; } /** * Clears cache of calculated column widths. If you want to clear only selected columns pass an array with their indexes. * Otherwise whole cache will be cleared. * * @param {number[]} [columns] List of physical column indexes to clear. */ clearCache(columns = []) { if (columns.length) { this.hot.batch(() => { arrayEach(columns, (physicalIndex) => { this.columnWidthsMap.setValueAtIndex(physicalIndex, null); }); }); } else { this.columnWidthsMap.clear(); } } /** * Checks if all widths were calculated. If not then return `true` (need recalculate). * * @returns {boolean} */ isNeedRecalculate() { return !!arrayFilter(this.columnWidthsMap.getValues() .slice(0, this.measuredColumns), item => (item === null)).length; } /** * On before render listener. * * @private */ onBeforeRender() { this.calculateVisibleColumnsWidth(); if (this.isNeedRecalculate() && !this.inProgress) { this.calculateAllColumnsWidth(); } } /** * On after load data listener. * * @private */ onAfterLoadData() { if (this.hot.view) { this.recalculateAllColumnsWidth(); } else { // first load - initialization setTimeout(() => { if (this.hot) { this.recalculateAllColumnsWidth(); } }, 0); } } /** * On before change listener. * * @private * @param {Array} changes An array of modified data. */ onBeforeChange(changes) { const changedColumns = arrayMap(changes, ([, columnProperty]) => this.hot.toPhysicalColumn(this.hot.propToCol(columnProperty))); this.clearCache(Array.from(new Set(changedColumns))); } /** * On before column resize listener. * * @private * @param {number} size Calculated new column width. * @param {number} column Visual index of the resized column. * @param {boolean} isDblClick Flag that determines whether there was a double-click. * @returns {number} */ onBeforeColumnResize(size, column, isDblClick) { let newSize = size; if (isDblClick) { this.calculateColumnsWidth(column, void 0, true); newSize = this.getColumnWidth(column, void 0, false); } return newSize; } /** * On after Handsontable init fill plugin with all necessary values. * * @private */ onAfterInit() { privatePool.get(this).cachedColumnHeaders = this.hot.getColHeader(); } /** * Destroys the plugin instance. */ destroy() { this.hot.columnIndexMapper.unregisterMap(COLUMN_SIZE_MAP_NAME); this.ghostTable.clean(); super.destroy(); } } registerPlugin('autoColumnSize', AutoColumnSize); export default AutoColumnSize;
1
17,104
Yep, jsdoc again .. It seems that when the plugin description is wrapped within `eslint-disable/enable` expression it's not generated at all. After adding the `@class AutoColumnSize` tag right after the `@plugin` tag the plugin appears in the docs. Please review the other plugins.
handsontable-handsontable
js
@@ -58,6 +58,13 @@ func (c *azureClient) GetVirtualMachineResourceID(ctx context.Context, principal } values := result.Values() + for len(values) == 0 { + nerr := result.NextWithContext(ctx) + if nerr != nil { + return "", errs.Wrap(nerr) + } + values = result.Values() + } if len(values) == 0 { return "", errs.New("principal %q not found", principalID) }
1
package azure import ( "context" "fmt" "github.com/Azure/azure-sdk-for-go/profiles/latest/compute/mgmt/compute" "github.com/Azure/azure-sdk-for-go/profiles/latest/network/mgmt/network" "github.com/Azure/azure-sdk-for-go/profiles/latest/resources/mgmt/resources" "github.com/Azure/go-autorest/autorest" "github.com/zeebo/errs" ) // apiClient is an interface representing all of the API methods the resolver // needs to do its job. type apiClient interface { SubscriptionID() string GetVirtualMachineResourceID(ctx context.Context, principalID string) (string, error) GetVirtualMachine(ctx context.Context, resourceGroup string, name string) (*compute.VirtualMachine, error) GetNetworkInterface(ctx context.Context, resourceGroup string, name string) (*network.Interface, error) } // azureClient implements apiClient using Azure SDK client implementations type azureClient struct { subscriptionID string r resources.Client v compute.VirtualMachinesClient n network.InterfacesClient } func newAzureClient(subscriptionID string, authorizer autorest.Authorizer) apiClient { r := resources.NewClient(subscriptionID) r.Authorizer = authorizer v := compute.NewVirtualMachinesClient(subscriptionID) v.Authorizer = authorizer n := network.NewInterfacesClient(subscriptionID) n.Authorizer = authorizer return &azureClient{ subscriptionID: subscriptionID, r: r, v: v, n: n, } } func (c *azureClient) SubscriptionID() string { return c.subscriptionID } func (c *azureClient) GetVirtualMachineResourceID(ctx context.Context, principalID string) (string, error) { filter := fmt.Sprintf("resourceType eq 'Microsoft.Compute/virtualMachines' and identity/principalId eq '%s'", principalID) result, err := c.r.List(ctx, filter, "", nil) if err != nil { return "", errs.Wrap(err) } values := result.Values() if len(values) == 0 { return "", errs.New("principal %q not found", principalID) } if len(values) > 1 { return "", errs.New("expected one result for principal %q at most", principalID) } if values[0].ID == nil || *values[0].ID == "" { return "", errs.New("resource missing ID") } return *values[0].ID, nil } func (c *azureClient) GetVirtualMachine(ctx context.Context, resourceGroup string, name string) (*compute.VirtualMachine, error) { vm, err := c.v.Get(ctx, resourceGroup, name, "") if err != nil { return nil, errs.Wrap(err) } return &vm, nil } func (c *azureClient) GetNetworkInterface(ctx context.Context, resourceGroup string, name string) (*network.Interface, error) { ni, err := c.n.Get(ctx, resourceGroup, name, "") if err != nil { return nil, errs.Wrap(err) } return &ni, nil }
1
16,101
This usage of the result doesn't look quite right. I would not expect the first page of values to be empty if there were multiple pages of results. This also obscures the error case when no values are returned (handled in the next `if` block) by returning a more generic error from the Azure SDK from `result.NextWithContext()`.
spiffe-spire
go
@@ -1358,6 +1358,10 @@ func (exp *Service) GetBlockOrActionByHash(hashStr string) (explorer.GetBlkOrAct return explorer.GetBlkOrActResponse{Execution: &exe}, nil } + if exe, err := exp.GetAddressDetails(hashStr); err == nil { + return explorer.GetBlkOrActResponse{AddressDetails: &exe}, nil + } + return explorer.GetBlkOrActResponse{}, nil }
1
// Copyright (c) 2018 IoTeX // This is an alpha (internal) release and is not suitable for production. This source code is provided 'as is' and no // warranties are given as to title or non-infringement, merchantability or fitness for purpose and, to the extent // permitted by law, all liability for your use of the code is disclaimed. This source code is governed by Apache // License 2.0 that can be found in the LICENSE file. package explorer import ( "context" "encoding/hex" "fmt" "math/big" "github.com/golang/protobuf/jsonpb" "github.com/golang/protobuf/proto" peerstore "github.com/libp2p/go-libp2p-peerstore" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "go.uber.org/zap" "github.com/iotexproject/iotex-core/action" "github.com/iotexproject/iotex-core/action/protocol/multichain/mainchain" "github.com/iotexproject/iotex-core/actpool" "github.com/iotexproject/iotex-core/address" "github.com/iotexproject/iotex-core/blockchain" "github.com/iotexproject/iotex-core/config" "github.com/iotexproject/iotex-core/consensus" "github.com/iotexproject/iotex-core/dispatcher" "github.com/iotexproject/iotex-core/explorer/idl/explorer" "github.com/iotexproject/iotex-core/indexservice" "github.com/iotexproject/iotex-core/pkg/hash" "github.com/iotexproject/iotex-core/pkg/keypair" "github.com/iotexproject/iotex-core/pkg/log" iproto "github.com/iotexproject/iotex-core/proto" ) var ( // ErrInternalServer indicates the internal server error ErrInternalServer = errors.New("internal server error") // ErrTransfer indicates the error of transfer ErrTransfer = errors.New("invalid transfer") // ErrVote indicates the error of vote ErrVote = errors.New("invalid vote") // ErrExecution indicates the error of execution ErrExecution = errors.New("invalid execution") // ErrReceipt indicates the error of receipt ErrReceipt = errors.New("invalid receipt") // ErrAction indicates the error of action ErrAction = errors.New("invalid action") ) var ( requestMtc = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "iotex_explorer_request", Help: "IoTeX Explorer request counter.", }, []string{"method", "succeed"}, ) ) func init() { prometheus.MustRegister(requestMtc) } type ( // BroadcastOutbound sends a broadcast message to the whole network BroadcastOutbound func(ctx context.Context, chainID uint32, msg proto.Message) error // Neighbors returns the neighbors' addresses Neighbors func(context.Context) ([]peerstore.PeerInfo, error) // NetworkInfo returns the self network information NetworkInfo func() peerstore.PeerInfo ) // Service provide api for user to query blockchain data type Service struct { bc blockchain.Blockchain c consensus.Consensus dp dispatcher.Dispatcher ap actpool.ActPool gs GasStation broadcastHandler BroadcastOutbound neighborsHandler Neighbors networkInfoHandler NetworkInfo cfg config.Explorer idx *indexservice.Server // TODO: the way to make explorer to access the data model managed by main-chain protocol is hack. We need to // refactor the code later mainChain *mainchain.Protocol } // SetMainChainProtocol sets the main-chain side multi-chain protocol func (exp *Service) SetMainChainProtocol(mainChain *mainchain.Protocol) { exp.mainChain = mainChain } // GetBlockchainHeight returns the current blockchain tip height func (exp *Service) GetBlockchainHeight() (int64, error) { tip := exp.bc.TipHeight() return int64(tip), nil } // GetAddressBalance returns the balance of an address func (exp *Service) GetAddressBalance(address string) (string, error) { state, err := exp.bc.StateByAddr(address) if err != nil { return "", err } return state.Balance.String(), nil } // GetAddressDetails returns the properties of an address func (exp *Service) GetAddressDetails(address string) (explorer.AddressDetails, error) { state, err := exp.bc.StateByAddr(address) if err != nil { return explorer.AddressDetails{}, err } pendingNonce, err := exp.ap.GetPendingNonce(address) if err != nil { return explorer.AddressDetails{}, err } details := explorer.AddressDetails{ Address: address, TotalBalance: state.Balance.String(), Nonce: int64(state.Nonce), PendingNonce: int64(pendingNonce), IsCandidate: state.IsCandidate, } return details, nil } // GetLastTransfersByRange returns transfers in [-(offset+limit-1), -offset] from block // with height startBlockHeight func (exp *Service) GetLastTransfersByRange(startBlockHeight int64, offset int64, limit int64, showCoinBase bool) ([]explorer.Transfer, error) { var res []explorer.Transfer transferCount := int64(0) for height := startBlockHeight; height >= 0; height-- { var blkID string hash, err := exp.bc.GetHashByHeight(uint64(height)) if err != nil { return []explorer.Transfer{}, err } blkID = hex.EncodeToString(hash[:]) blk, err := exp.bc.GetBlockByHeight(uint64(height)) if err != nil { return []explorer.Transfer{}, err } selps := make([]action.SealedEnvelope, 0) for _, selp := range blk.Actions { act := selp.Action() if _, ok := act.(*action.Transfer); ok { selps = append(selps, selp) } } for i := len(selps) - 1; i >= 0; i-- { transferCount++ if transferCount <= offset { continue } if int64(len(res)) >= limit { return res, nil } explorerTransfer, err := convertTsfToExplorerTsf(selps[i], false) if err != nil { return []explorer.Transfer{}, errors.Wrapf(err, "failed to convert transfer %v to explorer's JSON transfer", selps[i]) } explorerTransfer.Timestamp = blk.ConvertToBlockHeaderPb().GetTimestamp().GetSeconds() explorerTransfer.BlockID = blkID res = append(res, explorerTransfer) } } return res, nil } // GetTransferByID returns transfer by transfer id func (exp *Service) GetTransferByID(transferID string) (explorer.Transfer, error) { bytes, err := hex.DecodeString(transferID) if err != nil { return explorer.Transfer{}, err } var transferHash hash.Hash256 copy(transferHash[:], bytes) return getTransfer(exp.bc, exp.ap, transferHash, exp.idx, exp.cfg.UseIndexer) } // GetTransfersByAddress returns all transfers associated with an address func (exp *Service) GetTransfersByAddress(address string, offset int64, limit int64) ([]explorer.Transfer, error) { var res []explorer.Transfer var transfers []hash.Hash256 if exp.cfg.UseIndexer { transferHistory, err := exp.idx.Indexer().GetIndexHistory(config.IndexTransfer, address) if err != nil { return []explorer.Transfer{}, err } transfers = append(transfers, transferHistory...) } else { transfersFromAddress, err := exp.bc.GetTransfersFromAddress(address) if err != nil { return []explorer.Transfer{}, err } transfersToAddress, err := exp.bc.GetTransfersToAddress(address) if err != nil { return []explorer.Transfer{}, err } transfersFromAddress = append(transfersFromAddress, transfersToAddress...) transfers = append(transfers, transfersFromAddress...) } for i, transferHash := range transfers { if int64(i) < offset { continue } if int64(len(res)) >= limit { break } explorerTransfer, err := getTransfer(exp.bc, exp.ap, transferHash, exp.idx, exp.cfg.UseIndexer) if err != nil { return []explorer.Transfer{}, err } res = append(res, explorerTransfer) } return res, nil } // GetUnconfirmedTransfersByAddress returns all unconfirmed transfers in actpool associated with an address func (exp *Service) GetUnconfirmedTransfersByAddress(address string, offset int64, limit int64) ([]explorer.Transfer, error) { res := make([]explorer.Transfer, 0) if _, err := exp.bc.StateByAddr(address); err != nil { return []explorer.Transfer{}, err } selps := exp.ap.GetUnconfirmedActs(address) tsfIndex := int64(0) for _, selp := range selps { act := selp.Action() transfer, ok := act.(*action.Transfer) if !ok { continue } if tsfIndex < offset { tsfIndex++ continue } if int64(len(res)) >= limit { break } explorerTransfer, err := convertTsfToExplorerTsf(selp, true) if err != nil { return []explorer.Transfer{}, errors.Wrapf(err, "failed to convert transfer %v to explorer's JSON transfer", transfer) } res = append(res, explorerTransfer) } return res, nil } // GetTransfersByBlockID returns transfers in a block func (exp *Service) GetTransfersByBlockID(blkID string, offset int64, limit int64) ([]explorer.Transfer, error) { var res []explorer.Transfer bytes, err := hex.DecodeString(blkID) if err != nil { return []explorer.Transfer{}, err } var hash hash.Hash256 copy(hash[:], bytes) blk, err := exp.bc.GetBlockByHash(hash) if err != nil { return []explorer.Transfer{}, err } var num int for _, selp := range blk.Actions { if _, ok := selp.Action().(*action.Transfer); !ok { continue } if int64(num) < offset { continue } if int64(len(res)) >= limit { break } explorerTransfer, err := convertTsfToExplorerTsf(selp, false) if err != nil { return []explorer.Transfer{}, errors.Wrapf(err, "failed to convert transfer %v to explorer's JSON transfer", selp) } explorerTransfer.Timestamp = blk.ConvertToBlockHeaderPb().GetTimestamp().GetSeconds() explorerTransfer.BlockID = blkID res = append(res, explorerTransfer) num++ } return res, nil } // GetLastVotesByRange returns votes in [-(offset+limit-1), -offset] from block // with height startBlockHeight func (exp *Service) GetLastVotesByRange(startBlockHeight int64, offset int64, limit int64) ([]explorer.Vote, error) { var res []explorer.Vote voteCount := uint64(0) for height := startBlockHeight; height >= 0; height-- { hash, err := exp.bc.GetHashByHeight(uint64(height)) if err != nil { return []explorer.Vote{}, err } blkID := hex.EncodeToString(hash[:]) blk, err := exp.bc.GetBlockByHeight(uint64(height)) if err != nil { return []explorer.Vote{}, err } selps := make([]action.SealedEnvelope, 0) for _, selp := range blk.Actions { act := selp.Action() if _, ok := act.(*action.Vote); ok { selps = append(selps, selp) } } for i := int64(len(selps) - 1); i >= 0; i-- { voteCount++ if voteCount <= uint64(offset) { continue } if int64(len(res)) >= limit { return res, nil } explorerVote, err := convertVoteToExplorerVote(selps[i], false) if err != nil { return []explorer.Vote{}, errors.Wrapf(err, "failed to convert vote %v to explorer's JSON vote", selps[i]) } explorerVote.Timestamp = blk.ConvertToBlockHeaderPb().GetTimestamp().GetSeconds() explorerVote.BlockID = blkID res = append(res, explorerVote) } } return res, nil } // GetVoteByID returns vote by vote id func (exp *Service) GetVoteByID(voteID string) (explorer.Vote, error) { bytes, err := hex.DecodeString(voteID) if err != nil { return explorer.Vote{}, err } var voteHash hash.Hash256 copy(voteHash[:], bytes) return getVote(exp.bc, exp.ap, voteHash, exp.idx, exp.cfg.UseIndexer) } // GetVotesByAddress returns all votes associated with an address func (exp *Service) GetVotesByAddress(address string, offset int64, limit int64) ([]explorer.Vote, error) { var res []explorer.Vote var votes []hash.Hash256 if exp.cfg.UseIndexer { voteHistory, err := exp.idx.Indexer().GetIndexHistory(config.IndexVote, address) if err != nil { return []explorer.Vote{}, err } votes = append(votes, voteHistory...) } else { votesFromAddress, err := exp.bc.GetVotesFromAddress(address) if err != nil { return []explorer.Vote{}, err } votesToAddress, err := exp.bc.GetVotesToAddress(address) if err != nil { return []explorer.Vote{}, err } votesFromAddress = append(votesFromAddress, votesToAddress...) votes = append(votes, votesFromAddress...) } for i, voteHash := range votes { if int64(i) < offset { continue } if int64(len(res)) >= limit { break } explorerVote, err := getVote(exp.bc, exp.ap, voteHash, exp.idx, exp.cfg.UseIndexer) if err != nil { return []explorer.Vote{}, err } res = append(res, explorerVote) } return res, nil } // GetUnconfirmedVotesByAddress returns all unconfirmed votes in actpool associated with an address func (exp *Service) GetUnconfirmedVotesByAddress(address string, offset int64, limit int64) ([]explorer.Vote, error) { res := make([]explorer.Vote, 0) if _, err := exp.bc.StateByAddr(address); err != nil { return []explorer.Vote{}, err } selps := exp.ap.GetUnconfirmedActs(address) voteIndex := int64(0) for _, selp := range selps { act := selp.Action() vote, ok := act.(*action.Vote) if !ok { continue } if voteIndex < offset { voteIndex++ continue } if int64(len(res)) >= limit { break } explorerVote, err := convertVoteToExplorerVote(selp, true) if err != nil { return []explorer.Vote{}, errors.Wrapf(err, "failed to convert vote %v to explorer's JSON vote", vote) } res = append(res, explorerVote) } return res, nil } // GetVotesByBlockID returns votes in a block func (exp *Service) GetVotesByBlockID(blkID string, offset int64, limit int64) ([]explorer.Vote, error) { var res []explorer.Vote bytes, err := hex.DecodeString(blkID) if err != nil { return []explorer.Vote{}, err } var hash hash.Hash256 copy(hash[:], bytes) blk, err := exp.bc.GetBlockByHash(hash) if err != nil { return []explorer.Vote{}, err } var num int for _, selp := range blk.Actions { if _, ok := selp.Action().(*action.Vote); !ok { continue } if int64(num) < offset { continue } if int64(len(res)) >= limit { break } explorerVote, err := convertVoteToExplorerVote(selp, false) if err != nil { return []explorer.Vote{}, errors.Wrapf(err, "failed to convert vote %v to explorer's JSON vote", selp) } explorerVote.Timestamp = blk.ConvertToBlockHeaderPb().GetTimestamp().GetSeconds() explorerVote.BlockID = blkID res = append(res, explorerVote) num++ } return res, nil } // GetLastExecutionsByRange returns executions in [-(offset+limit-1), -offset] from block // with height startBlockHeight func (exp *Service) GetLastExecutionsByRange(startBlockHeight int64, offset int64, limit int64) ([]explorer.Execution, error) { var res []explorer.Execution executionCount := uint64(0) for height := startBlockHeight; height >= 0; height-- { hash, err := exp.bc.GetHashByHeight(uint64(height)) if err != nil { return []explorer.Execution{}, err } blkID := hex.EncodeToString(hash[:]) blk, err := exp.bc.GetBlockByHeight(uint64(height)) if err != nil { return []explorer.Execution{}, err } selps := make([]action.SealedEnvelope, 0) for _, selp := range blk.Actions { act := selp.Action() if _, ok := act.(*action.Execution); ok { selps = append(selps, selp) } } for i := len(selps) - 1; i >= 0; i-- { executionCount++ if executionCount <= uint64(offset) { continue } if int64(len(res)) >= limit { return res, nil } explorerExecution, err := convertExecutionToExplorerExecution(selps[i], false) if err != nil { return []explorer.Execution{}, errors.Wrapf(err, "failed to convert execution %v to explorer's JSON execution", selps[i]) } explorerExecution.Timestamp = blk.ConvertToBlockHeaderPb().GetTimestamp().GetSeconds() explorerExecution.BlockID = blkID res = append(res, explorerExecution) } } return res, nil } // GetExecutionByID returns execution by execution id func (exp *Service) GetExecutionByID(executionID string) (explorer.Execution, error) { bytes, err := hex.DecodeString(executionID) if err != nil { return explorer.Execution{}, err } var executionHash hash.Hash256 copy(executionHash[:], bytes) return getExecution(exp.bc, exp.ap, executionHash, exp.idx, exp.cfg.UseIndexer) } // GetExecutionsByAddress returns all executions associated with an address func (exp *Service) GetExecutionsByAddress(address string, offset int64, limit int64) ([]explorer.Execution, error) { var res []explorer.Execution var executions []hash.Hash256 if exp.cfg.UseIndexer { executionHistory, err := exp.idx.Indexer().GetIndexHistory(config.IndexExecution, address) if err != nil { return []explorer.Execution{}, err } executions = append(executions, executionHistory...) } else { executionsFromAddress, err := exp.bc.GetExecutionsFromAddress(address) if err != nil { return []explorer.Execution{}, err } executionsToAddress, err := exp.bc.GetExecutionsToAddress(address) if err != nil { return []explorer.Execution{}, err } executionsFromAddress = append(executionsFromAddress, executionsToAddress...) executions = append(executions, executionsFromAddress...) } for i, executionHash := range executions { if int64(i) < offset { continue } if int64(len(res)) >= limit { break } explorerExecution, err := getExecution(exp.bc, exp.ap, executionHash, exp.idx, exp.cfg.UseIndexer) if err != nil { return []explorer.Execution{}, err } res = append(res, explorerExecution) } return res, nil } // GetUnconfirmedExecutionsByAddress returns all unconfirmed executions in actpool associated with an address func (exp *Service) GetUnconfirmedExecutionsByAddress(address string, offset int64, limit int64) ([]explorer.Execution, error) { res := make([]explorer.Execution, 0) if _, err := exp.bc.StateByAddr(address); err != nil { return []explorer.Execution{}, err } selps := exp.ap.GetUnconfirmedActs(address) executionIndex := int64(0) for _, selp := range selps { if _, ok := selp.Action().(*action.Execution); !ok { continue } if executionIndex < offset { executionIndex++ continue } if int64(len(res)) >= limit { break } explorerExecution, err := convertExecutionToExplorerExecution(selp, true) if err != nil { return []explorer.Execution{}, errors.Wrapf(err, "failed to convert execution %v to explorer's JSON execution", selp) } res = append(res, explorerExecution) } return res, nil } // GetExecutionsByBlockID returns executions in a block func (exp *Service) GetExecutionsByBlockID(blkID string, offset int64, limit int64) ([]explorer.Execution, error) { var res []explorer.Execution bytes, err := hex.DecodeString(blkID) if err != nil { return []explorer.Execution{}, err } var hash hash.Hash256 copy(hash[:], bytes) blk, err := exp.bc.GetBlockByHash(hash) if err != nil { return []explorer.Execution{}, err } var num int for _, selp := range blk.Actions { if _, ok := selp.Action().(*action.Execution); !ok { continue } if int64(num) < offset { continue } if int64(len(res)) >= limit { break } explorerExecution, err := convertExecutionToExplorerExecution(selp, false) if err != nil { return []explorer.Execution{}, errors.Wrapf(err, "failed to convert execution %v to explorer's JSON execution", selp) } explorerExecution.Timestamp = blk.ConvertToBlockHeaderPb().GetTimestamp().GetSeconds() explorerExecution.BlockID = blkID res = append(res, explorerExecution) num++ } return res, nil } // GetReceiptByExecutionID gets receipt with corresponding execution id // Deprecated func (exp *Service) GetReceiptByExecutionID(id string) (explorer.Receipt, error) { return exp.GetReceiptByActionID(id) } // GetReceiptByActionID gets receipt with corresponding action id func (exp *Service) GetReceiptByActionID(id string) (explorer.Receipt, error) { bytes, err := hex.DecodeString(id) if err != nil { return explorer.Receipt{}, err } var actionHash hash.Hash256 copy(actionHash[:], bytes) // get receipt from boltdb if !exp.cfg.UseIndexer { receipt, err := exp.bc.GetReceiptByActionHash(actionHash) if err != nil { return explorer.Receipt{}, err } return convertReceiptToExplorerReceipt(receipt) } // get receipt from indexer blkHash, err := exp.idx.Indexer().GetBlockByIndex(config.IndexReceipt, actionHash) if err != nil { return explorer.Receipt{}, err } blk, err := exp.bc.GetBlockByHash(blkHash) if err != nil { return explorer.Receipt{}, err } for _, receipt := range blk.Receipts { if receipt.Hash() == actionHash { return convertReceiptToExplorerReceipt(receipt) } } return explorer.Receipt{}, err } // GetCreateDeposit gets create deposit by ID func (exp *Service) GetCreateDeposit(createDepositID string) (explorer.CreateDeposit, error) { bytes, err := hex.DecodeString(createDepositID) if err != nil { return explorer.CreateDeposit{}, err } var createDepositHash hash.Hash256 copy(createDepositHash[:], bytes) return getCreateDeposit(exp.bc, exp.ap, createDepositHash) } // GetCreateDepositsByAddress gets the relevant create deposits of an address func (exp *Service) GetCreateDepositsByAddress( address string, offset int64, limit int64, ) ([]explorer.CreateDeposit, error) { res := make([]explorer.CreateDeposit, 0) depositsFromAddress, err := exp.bc.GetActionsFromAddress(address) if err != nil { return []explorer.CreateDeposit{}, err } for i, depositHash := range depositsFromAddress { if int64(i) < offset { continue } if int64(len(res)) >= limit { break } createDeposit, err := getCreateDeposit(exp.bc, exp.ap, depositHash) if err != nil { continue } res = append(res, createDeposit) } return res, nil } // GetSettleDeposit gets settle deposit by ID func (exp *Service) GetSettleDeposit(settleDepositID string) (explorer.SettleDeposit, error) { bytes, err := hex.DecodeString(settleDepositID) if err != nil { return explorer.SettleDeposit{}, err } var settleDepositHash hash.Hash256 copy(settleDepositHash[:], bytes) return getSettleDeposit(exp.bc, exp.ap, settleDepositHash) } // GetSettleDepositsByAddress gets the relevant settle deposits of an address func (exp *Service) GetSettleDepositsByAddress( address string, offset int64, limit int64, ) ([]explorer.SettleDeposit, error) { res := make([]explorer.SettleDeposit, 0) depositsToAddress, err := exp.bc.GetActionsToAddress(address) if err != nil { return []explorer.SettleDeposit{}, err } for i, depositHash := range depositsToAddress { if int64(i) < offset { continue } if int64(len(res)) >= limit { break } settleDeposit, err := getSettleDeposit(exp.bc, exp.ap, depositHash) if err != nil { continue } res = append(res, settleDeposit) } return res, nil } // GetLastBlocksByRange get block with height [offset-limit+1, offset] func (exp *Service) GetLastBlocksByRange(offset int64, limit int64) ([]explorer.Block, error) { var res []explorer.Block for height := offset; height >= 0 && int64(len(res)) < limit; height-- { blk, err := exp.bc.GetBlockByHeight(uint64(height)) if err != nil { return []explorer.Block{}, err } blockHeaderPb := blk.ConvertToBlockHeaderPb() hash, err := exp.bc.GetHashByHeight(uint64(height)) if err != nil { return []explorer.Block{}, err } transfers, votes, executions := action.ClassifyActions(blk.Actions) totalAmount := big.NewInt(0) totalSize := uint32(0) for _, transfer := range transfers { totalAmount.Add(totalAmount, transfer.Amount()) totalSize += transfer.TotalSize() } txRoot := blk.TxRoot() stateRoot := blk.StateRoot() deltaStateDigest := blk.DeltaStateDigest() explorerBlock := explorer.Block{ ID: hex.EncodeToString(hash[:]), Height: int64(blockHeaderPb.Height), Timestamp: blockHeaderPb.GetTimestamp().GetSeconds(), Transfers: int64(len(transfers)), Votes: int64(len(votes)), Executions: int64(len(executions)), Amount: totalAmount.String(), Size: int64(totalSize), GenerateBy: explorer.BlockGenerator{ Name: "", Address: keypair.EncodePublicKey(blk.PublicKey()), }, TxRoot: hex.EncodeToString(txRoot[:]), StateRoot: hex.EncodeToString(stateRoot[:]), DeltaStateDigest: hex.EncodeToString(deltaStateDigest[:]), } res = append(res, explorerBlock) } return res, nil } // GetBlockByID returns block by block id func (exp *Service) GetBlockByID(blkID string) (explorer.Block, error) { bytes, err := hex.DecodeString(blkID) if err != nil { return explorer.Block{}, err } var hash hash.Hash256 copy(hash[:], bytes) blk, err := exp.bc.GetBlockByHash(hash) if err != nil { return explorer.Block{}, err } blkHeaderPb := blk.ConvertToBlockHeaderPb() transfers, votes, executions := action.ClassifyActions(blk.Actions) totalAmount := big.NewInt(0) totalSize := uint32(0) for _, transfer := range transfers { totalAmount.Add(totalAmount, transfer.Amount()) totalSize += transfer.TotalSize() } txRoot := blk.TxRoot() stateRoot := blk.StateRoot() deltaStateDigest := blk.DeltaStateDigest() explorerBlock := explorer.Block{ ID: blkID, Height: int64(blkHeaderPb.Height), Timestamp: blkHeaderPb.GetTimestamp().GetSeconds(), Transfers: int64(len(transfers)), Votes: int64(len(votes)), Executions: int64(len(executions)), Amount: totalAmount.String(), Size: int64(totalSize), GenerateBy: explorer.BlockGenerator{ Name: "", Address: keypair.EncodePublicKey(blk.PublicKey()), }, TxRoot: hex.EncodeToString(txRoot[:]), StateRoot: hex.EncodeToString(stateRoot[:]), DeltaStateDigest: hex.EncodeToString(deltaStateDigest[:]), } return explorerBlock, nil } // GetCoinStatistic returns stats in blockchain func (exp *Service) GetCoinStatistic() (explorer.CoinStatistic, error) { stat := explorer.CoinStatistic{} tipHeight := exp.bc.TipHeight() totalTransfers, err := exp.bc.GetTotalTransfers() if err != nil { return stat, err } totalVotes, err := exp.bc.GetTotalVotes() if err != nil { return stat, err } totalExecutions, err := exp.bc.GetTotalExecutions() if err != nil { return stat, err } blockLimit := int64(exp.cfg.TpsWindow) if blockLimit <= 0 { return stat, errors.Wrapf(ErrInternalServer, "block limit is %d", blockLimit) } // avoid genesis block if int64(tipHeight) < blockLimit { blockLimit = int64(tipHeight) } blks, err := exp.GetLastBlocksByRange(int64(tipHeight), blockLimit) if err != nil { return stat, err } if len(blks) == 0 { return stat, errors.New("get 0 blocks! not able to calculate aps") } timeDuration := blks[0].Timestamp - blks[len(blks)-1].Timestamp // if time duration is less than 1 second, we set it to be 1 second if timeDuration == 0 { timeDuration = 1 } actionNumber := int64(0) for _, blk := range blks { actionNumber += blk.Transfers + blk.Votes + blk.Executions } aps := actionNumber / timeDuration explorerCoinStats := explorer.CoinStatistic{ Height: int64(tipHeight), Supply: blockchain.Gen.TotalSupply.String(), Transfers: int64(totalTransfers), Votes: int64(totalVotes), Executions: int64(totalExecutions), Aps: aps, } return explorerCoinStats, nil } // GetConsensusMetrics returns the latest consensus metrics func (exp *Service) GetConsensusMetrics() (explorer.ConsensusMetrics, error) { cm, err := exp.c.Metrics() if err != nil { return explorer.ConsensusMetrics{}, err } dStrs := make([]string, len(cm.LatestDelegates)) copy(dStrs, cm.LatestDelegates) var bpStr string if cm.LatestBlockProducer != "" { bpStr = cm.LatestBlockProducer } cStrs := make([]string, len(cm.Candidates)) copy(cStrs, cm.Candidates) return explorer.ConsensusMetrics{ LatestEpoch: int64(cm.LatestEpoch), LatestDelegates: dStrs, LatestBlockProducer: bpStr, Candidates: cStrs, }, nil } // GetCandidateMetrics returns the latest delegates metrics func (exp *Service) GetCandidateMetrics() (explorer.CandidateMetrics, error) { cm, err := exp.c.Metrics() if err != nil { return explorer.CandidateMetrics{}, errors.Wrapf( err, "Failed to get the candidate metrics") } delegateSet := make(map[string]bool, len(cm.LatestDelegates)) for _, d := range cm.LatestDelegates { delegateSet[d] = true } allCandidates, err := exp.bc.CandidatesByHeight(cm.LatestHeight) if err != nil { return explorer.CandidateMetrics{}, errors.Wrapf(err, "Failed to get the candidate metrics") } candidates := make([]explorer.Candidate, len(cm.Candidates)) for i, c := range allCandidates { candidates[i] = explorer.Candidate{ Address: c.Address, TotalVote: c.Votes.String(), CreationHeight: int64(c.CreationHeight), LastUpdateHeight: int64(c.LastUpdateHeight), IsDelegate: false, IsProducer: false, } if _, ok := delegateSet[c.Address]; ok { candidates[i].IsDelegate = true } if cm.LatestBlockProducer == c.Address { candidates[i].IsProducer = true } } return explorer.CandidateMetrics{ Candidates: candidates, LatestEpoch: int64(cm.LatestEpoch), LatestHeight: int64(cm.LatestHeight), }, nil } // GetCandidateMetricsByHeight returns the candidates metrics for given height. func (exp *Service) GetCandidateMetricsByHeight(h int64) (explorer.CandidateMetrics, error) { if h < 0 { return explorer.CandidateMetrics{}, errors.New("Invalid height") } allCandidates, err := exp.bc.CandidatesByHeight(uint64(h)) if err != nil { return explorer.CandidateMetrics{}, errors.Wrapf(err, "Failed to get the candidate metrics") } candidates := make([]explorer.Candidate, 0, len(allCandidates)) for _, c := range allCandidates { pubKey := keypair.EncodePublicKey(c.PublicKey) candidates = append(candidates, explorer.Candidate{ Address: c.Address, PubKey: pubKey, TotalVote: c.Votes.String(), CreationHeight: int64(c.CreationHeight), LastUpdateHeight: int64(c.LastUpdateHeight), }) } return explorer.CandidateMetrics{ Candidates: candidates, }, nil } // SendTransfer sends a transfer func (exp *Service) SendTransfer(tsfJSON explorer.SendTransferRequest) (resp explorer.SendTransferResponse, err error) { log.L().Debug("receive send transfer request") defer func() { succeed := "true" if err != nil { succeed = "false" } requestMtc.WithLabelValues("SendTransfer", succeed).Inc() }() actPb, err := convertExplorerTransferToActionPb(&tsfJSON, exp.cfg.MaxTransferPayloadBytes) if err != nil { return explorer.SendTransferResponse{}, err } // broadcast to the network if err = exp.broadcastHandler(context.Background(), exp.bc.ChainID(), actPb); err != nil { return explorer.SendTransferResponse{}, err } // send to actpool via dispatcher exp.dp.HandleBroadcast(context.Background(), exp.bc.ChainID(), actPb) tsf := &action.SealedEnvelope{} if err := tsf.LoadProto(actPb); err != nil { return explorer.SendTransferResponse{}, err } h := tsf.Hash() return explorer.SendTransferResponse{Hash: hex.EncodeToString(h[:])}, nil } // SendVote sends a vote func (exp *Service) SendVote(voteJSON explorer.SendVoteRequest) (resp explorer.SendVoteResponse, err error) { log.L().Debug("receive send vote request") defer func() { succeed := "true" if err != nil { succeed = "false" } requestMtc.WithLabelValues("SendVote", succeed).Inc() }() selfPubKey, err := keypair.StringToPubKeyBytes(voteJSON.VoterPubKey) if err != nil { return explorer.SendVoteResponse{}, err } signature, err := hex.DecodeString(voteJSON.Signature) if err != nil { return explorer.SendVoteResponse{}, err } gasPrice, ok := big.NewInt(0).SetString(voteJSON.GasPrice, 10) if !ok { return explorer.SendVoteResponse{}, errors.New("failed to set vote gas price") } actPb := &iproto.ActionPb{ Action: &iproto.ActionPb_Vote{ Vote: &iproto.VotePb{ VoteeAddress: voteJSON.Votee, }, }, Version: uint32(voteJSON.Version), SenderPubKey: selfPubKey, Nonce: uint64(voteJSON.Nonce), GasLimit: uint64(voteJSON.GasLimit), GasPrice: gasPrice.Bytes(), Signature: signature, } // broadcast to the network if err := exp.broadcastHandler(context.Background(), exp.bc.ChainID(), actPb); err != nil { return explorer.SendVoteResponse{}, err } // send to actpool via dispatcher exp.dp.HandleBroadcast(context.Background(), exp.bc.ChainID(), actPb) v := &action.SealedEnvelope{} if err := v.LoadProto(actPb); err != nil { return explorer.SendVoteResponse{}, err } h := v.Hash() return explorer.SendVoteResponse{Hash: hex.EncodeToString(h[:])}, nil } // PutSubChainBlock put block merkel root on root chain. func (exp *Service) PutSubChainBlock(putBlockJSON explorer.PutSubChainBlockRequest) (resp explorer.PutSubChainBlockResponse, err error) { log.L().Debug("receive put block request") defer func() { succeed := "true" if err != nil { succeed = "false" } requestMtc.WithLabelValues("PutBlock", succeed).Inc() }() senderPubKey, err := keypair.StringToPubKeyBytes(putBlockJSON.SenderPubKey) if err != nil { return explorer.PutSubChainBlockResponse{}, err } signature, err := hex.DecodeString(putBlockJSON.Signature) if err != nil { return explorer.PutSubChainBlockResponse{}, err } gasPrice, ok := big.NewInt(0).SetString(putBlockJSON.GasPrice, 10) if !ok { return explorer.PutSubChainBlockResponse{}, errors.New("failed to set vote gas price") } roots := make([]*iproto.MerkleRoot, 0) for _, mr := range putBlockJSON.Roots { v, err := hex.DecodeString(mr.Value) if err != nil { return explorer.PutSubChainBlockResponse{}, err } roots = append(roots, &iproto.MerkleRoot{ Name: mr.Name, Value: v, }) } actPb := &iproto.ActionPb{ Action: &iproto.ActionPb_PutBlock{ PutBlock: &iproto.PutBlockPb{ SubChainAddress: putBlockJSON.SubChainAddress, Height: uint64(putBlockJSON.Height), Roots: roots, }, }, Version: uint32(putBlockJSON.Version), SenderPubKey: senderPubKey, Nonce: uint64(putBlockJSON.Nonce), GasLimit: uint64(putBlockJSON.GasLimit), GasPrice: gasPrice.Bytes(), Signature: signature, } // broadcast to the network if err := exp.broadcastHandler(context.Background(), exp.bc.ChainID(), actPb); err != nil { return explorer.PutSubChainBlockResponse{}, err } // send to actpool via dispatcher exp.dp.HandleBroadcast(context.Background(), exp.bc.ChainID(), actPb) v := &action.SealedEnvelope{} if err := v.LoadProto(actPb); err != nil { return explorer.PutSubChainBlockResponse{}, err } h := v.Hash() return explorer.PutSubChainBlockResponse{Hash: hex.EncodeToString(h[:])}, nil } // SendAction is the API to send an action to blockchain. func (exp *Service) SendAction(req explorer.SendActionRequest) (resp explorer.SendActionResponse, err error) { log.L().Debug("receive send action request") defer func() { succeed := "true" if err != nil { succeed = "false" } requestMtc.WithLabelValues("SendAction", succeed).Inc() }() var action iproto.ActionPb if err := jsonpb.UnmarshalString(req.Payload, &action); err != nil { return explorer.SendActionResponse{}, err } // broadcast to the network if err = exp.broadcastHandler(context.Background(), exp.bc.ChainID(), &action); err != nil { log.L().Warn("Failed to broadcast SendAction request.", zap.Error(err)) } // send to actpool via dispatcher exp.dp.HandleBroadcast(context.Background(), exp.bc.ChainID(), &action) // TODO: include action hash return explorer.SendActionResponse{}, nil } // GetPeers return a list of node peers and itself's network addsress info. func (exp *Service) GetPeers() (explorer.GetPeersResponse, error) { var exppeers []explorer.Node ctx := context.Background() peers, err := exp.neighborsHandler(ctx) if err != nil { return explorer.GetPeersResponse{}, err } for _, p := range peers { exppeers = append(exppeers, explorer.Node{ Address: fmt.Sprintf("%v", p), }) } return explorer.GetPeersResponse{ Self: explorer.Node{Address: fmt.Sprintf("%v", exp.networkInfoHandler())}, Peers: exppeers, }, nil } // SendSmartContract sends a smart contract func (exp *Service) SendSmartContract(execution explorer.Execution) (resp explorer.SendSmartContractResponse, err error) { log.L().Debug("receive send smart contract request") defer func() { succeed := "true" if err != nil { succeed = "false" } requestMtc.WithLabelValues("SendSmartContract", succeed).Inc() }() executorPubKey, err := keypair.StringToPubKeyBytes(execution.ExecutorPubKey) if err != nil { return explorer.SendSmartContractResponse{}, err } data, err := hex.DecodeString(execution.Data) if err != nil { return explorer.SendSmartContractResponse{}, err } signature, err := hex.DecodeString(execution.Signature) if err != nil { return explorer.SendSmartContractResponse{}, err } amount, ok := big.NewInt(0).SetString(execution.Amount, 10) if !ok { return explorer.SendSmartContractResponse{}, errors.New("failed to set execution amount") } gasPrice, ok := big.NewInt(0).SetString(execution.GasPrice, 10) if !ok { return explorer.SendSmartContractResponse{}, errors.New("failed to set execution gas price") } actPb := &iproto.ActionPb{ Action: &iproto.ActionPb_Execution{ Execution: &iproto.ExecutionPb{ Amount: amount.Bytes(), Contract: execution.Contract, Data: data, }, }, Version: uint32(execution.Version), SenderPubKey: executorPubKey, Nonce: uint64(execution.Nonce), GasLimit: uint64(execution.GasLimit), GasPrice: gasPrice.Bytes(), Signature: signature, } // broadcast to the network if err := exp.broadcastHandler(context.Background(), exp.bc.ChainID(), actPb); err != nil { return explorer.SendSmartContractResponse{}, err } // send to actpool via dispatcher exp.dp.HandleBroadcast(context.Background(), exp.bc.ChainID(), actPb) sc := &action.SealedEnvelope{} if err := sc.LoadProto(actPb); err != nil { return explorer.SendSmartContractResponse{}, err } h := sc.Hash() return explorer.SendSmartContractResponse{Hash: hex.EncodeToString(h[:])}, nil } // ReadExecutionState reads the state in a contract address specified by the slot func (exp *Service) ReadExecutionState(execution explorer.Execution) (string, error) { log.L().Debug("receive read smart contract request") actPb, err := convertExplorerExecutionToActionPb(&execution) if err != nil { return "", err } selp := &action.SealedEnvelope{} if err := selp.LoadProto(actPb); err != nil { return "", err } sc, ok := selp.Action().(*action.Execution) if !ok { return "", errors.New("not execution") } callerPKHash := keypair.HashPubKey(selp.SrcPubkey()) callerAddr, err := address.FromBytes(callerPKHash[:]) if err != nil { return "", err } res, err := exp.bc.ExecuteContractRead(callerAddr, sc) if err != nil { return "", err } return hex.EncodeToString(res.ReturnValue), nil } // GetBlockOrActionByHash get block or action by a hash func (exp *Service) GetBlockOrActionByHash(hashStr string) (explorer.GetBlkOrActResponse, error) { if blk, err := exp.GetBlockByID(hashStr); err == nil { return explorer.GetBlkOrActResponse{Block: &blk}, nil } if tsf, err := exp.GetTransferByID(hashStr); err == nil { return explorer.GetBlkOrActResponse{Transfer: &tsf}, nil } if vote, err := exp.GetVoteByID(hashStr); err == nil { return explorer.GetBlkOrActResponse{Vote: &vote}, nil } if exe, err := exp.GetExecutionByID(hashStr); err == nil { return explorer.GetBlkOrActResponse{Execution: &exe}, nil } return explorer.GetBlkOrActResponse{}, nil } // CreateDeposit deposits balance from main-chain to sub-chain func (exp *Service) CreateDeposit(req explorer.CreateDepositRequest) (res explorer.CreateDepositResponse, err error) { defer func() { succeed := "true" if err != nil { succeed = "false" } requestMtc.WithLabelValues("createDeposit", succeed).Inc() }() senderPubKey, err := keypair.StringToPubKeyBytes(req.SenderPubKey) if err != nil { return res, err } signature, err := hex.DecodeString(req.Signature) if err != nil { return res, err } amount, ok := big.NewInt(0).SetString(req.Amount, 10) if !ok { return res, errors.New("error when converting amount string into big int type") } gasPrice, ok := big.NewInt(0).SetString(req.GasPrice, 10) if !ok { return res, errors.New("error when converting gas price string into big int type") } actPb := &iproto.ActionPb{ Action: &iproto.ActionPb_CreateDeposit{ CreateDeposit: &iproto.CreateDepositPb{ ChainID: uint32(req.ChainID), Amount: amount.Bytes(), Recipient: req.Recipient, }, }, Version: uint32(req.Version), SenderPubKey: senderPubKey, Nonce: uint64(req.Nonce), GasLimit: uint64(req.GasLimit), GasPrice: gasPrice.Bytes(), Signature: signature, } // broadcast to the network if err := exp.broadcastHandler(context.Background(), exp.bc.ChainID(), actPb); err != nil { return res, err } // send to actpool via dispatcher exp.dp.HandleBroadcast(context.Background(), exp.bc.ChainID(), actPb) selp := &action.SealedEnvelope{} if err := selp.LoadProto(actPb); err != nil { return res, err } h := selp.Hash() return explorer.CreateDepositResponse{Hash: hex.EncodeToString(h[:])}, nil } // GetDeposits returns the deposits of a sub-chain in the given range in descending order by the index func (exp *Service) GetDeposits(subChainID int64, offset int64, limit int64) ([]explorer.Deposit, error) { subChainsInOp, err := exp.mainChain.SubChainsInOperation() if err != nil { return nil, err } var targetSubChain mainchain.InOperation for _, subChainInOp := range subChainsInOp { if subChainInOp.ID == uint32(subChainID) { targetSubChain = subChainInOp } } if targetSubChain.ID != uint32(subChainID) { return nil, errors.Errorf("sub-chain %d is not found in operation", subChainID) } subChainAddr, err := address.FromBytes(targetSubChain.Addr) if err != nil { return nil, err } subChain, err := exp.mainChain.SubChain(subChainAddr) if err != nil { return nil, err } idx := uint64(offset) // If the last deposit index is lower than the start index, reset it if subChain.DepositCount-1 < idx { idx = subChain.DepositCount - 1 } var deposits []explorer.Deposit for count := int64(0); count < limit; count++ { deposit, err := exp.mainChain.Deposit(subChainAddr, idx) if err != nil { return nil, err } recipient, err := address.FromBytes(deposit.Addr) if err != nil { return nil, err } deposits = append(deposits, explorer.Deposit{ Amount: deposit.Amount.String(), Address: recipient.String(), Confirmed: deposit.Confirmed, }) if idx > 0 { idx-- } else { break } } return deposits, nil } // SettleDeposit settles deposit on sub-chain func (exp *Service) SettleDeposit(req explorer.SettleDepositRequest) (res explorer.SettleDepositResponse, err error) { defer func() { succeed := "true" if err != nil { succeed = "false" } requestMtc.WithLabelValues("settleDeposit", succeed).Inc() }() senderPubKey, err := keypair.StringToPubKeyBytes(req.SenderPubKey) if err != nil { return res, err } signature, err := hex.DecodeString(req.Signature) if err != nil { return res, err } amount, ok := big.NewInt(0).SetString(req.Amount, 10) if !ok { return res, errors.New("error when converting amount string into big int type") } gasPrice, ok := big.NewInt(0).SetString(req.GasPrice, 10) if !ok { return res, errors.New("error when converting gas price string into big int type") } actPb := &iproto.ActionPb{ Action: &iproto.ActionPb_SettleDeposit{ SettleDeposit: &iproto.SettleDepositPb{ Amount: amount.Bytes(), Index: uint64(req.Index), Recipient: req.Recipient, }, }, Version: uint32(req.Version), SenderPubKey: senderPubKey, Nonce: uint64(req.Nonce), GasLimit: uint64(req.GasLimit), GasPrice: gasPrice.Bytes(), Signature: signature, } // broadcast to the network if err := exp.broadcastHandler(context.Background(), exp.bc.ChainID(), actPb); err != nil { return res, err } // send to actpool via dispatcher exp.dp.HandleBroadcast(context.Background(), exp.bc.ChainID(), actPb) deposit := &action.SealedEnvelope{} if err := deposit.LoadProto(actPb); err != nil { return res, err } h := deposit.Hash() return explorer.SettleDepositResponse{Hash: hex.EncodeToString(h[:])}, nil } // SuggestGasPrice suggest gas price func (exp *Service) SuggestGasPrice() (int64, error) { return exp.gs.suggestGasPrice() } // EstimateGasForTransfer estimate gas for transfer func (exp *Service) EstimateGasForTransfer(tsfJSON explorer.SendTransferRequest) (int64, error) { return exp.gs.estimateGasForTransfer(tsfJSON) } // EstimateGasForVote suggest gas for vote func (exp *Service) EstimateGasForVote() (int64, error) { return exp.gs.estimateGasForVote() } // EstimateGasForSmartContract suggest gas for smart contract func (exp *Service) EstimateGasForSmartContract(execution explorer.Execution) (int64, error) { return exp.gs.estimateGasForSmartContract(execution) } // GetStateRootHash gets the state root hash of a given block height func (exp *Service) GetStateRootHash(blockHeight int64) (string, error) { rootHash, err := exp.bc.GetFactory().RootHashByHeight(uint64(blockHeight)) if err != nil { return "", err } return hex.EncodeToString(rootHash[:]), nil } // getTransfer takes in a blockchain and transferHash and returns an Explorer Transfer func getTransfer(bc blockchain.Blockchain, ap actpool.ActPool, transferHash hash.Hash256, idx *indexservice.Server, useIndexer bool) (explorer.Transfer, error) { explorerTransfer := explorer.Transfer{} selp, err := bc.GetActionByActionHash(transferHash) if err != nil { // Try to fetch pending transfer from actpool selp, err := ap.GetActionByHash(transferHash) if err != nil { return explorerTransfer, err } return convertTsfToExplorerTsf(selp, true) } // Fetch from block var blkHash hash.Hash256 if useIndexer { hash, err := idx.Indexer().GetBlockByIndex(config.IndexTransfer, transferHash) if err != nil { return explorerTransfer, err } blkHash = hash } else { hash, err := bc.GetBlockHashByTransferHash(transferHash) if err != nil { return explorerTransfer, err } blkHash = hash } blk, err := bc.GetBlockByHash(blkHash) if err != nil { return explorerTransfer, err } if explorerTransfer, err = convertTsfToExplorerTsf(selp, false); err != nil { return explorerTransfer, errors.Wrapf(err, "failed to convert transfer %v to explorer's JSON transfer", selp) } explorerTransfer.Timestamp = blk.ConvertToBlockHeaderPb().GetTimestamp().GetSeconds() explorerTransfer.BlockID = hex.EncodeToString(blkHash[:]) return explorerTransfer, nil } // getVote takes in a blockchain and voteHash and returns an Explorer Vote func getVote(bc blockchain.Blockchain, ap actpool.ActPool, voteHash hash.Hash256, idx *indexservice.Server, useIndexer bool) (explorer.Vote, error) { explorerVote := explorer.Vote{} selp, err := bc.GetActionByActionHash(voteHash) if err != nil { // Try to fetch pending vote from actpool selp, err := ap.GetActionByHash(voteHash) if err != nil { return explorerVote, err } return convertVoteToExplorerVote(selp, true) } // Fetch from block var blkHash hash.Hash256 if useIndexer { hash, err := idx.Indexer().GetBlockByIndex(config.IndexVote, voteHash) if err != nil { return explorerVote, err } blkHash = hash } else { hash, err := bc.GetBlockHashByVoteHash(voteHash) if err != nil { return explorerVote, err } blkHash = hash } blk, err := bc.GetBlockByHash(blkHash) if err != nil { return explorerVote, err } if explorerVote, err = convertVoteToExplorerVote(selp, false); err != nil { return explorerVote, errors.Wrapf(err, "failed to convert vote %v to explorer's JSON vote", selp) } explorerVote.Timestamp = blk.ConvertToBlockHeaderPb().GetTimestamp().GetSeconds() explorerVote.BlockID = hex.EncodeToString(blkHash[:]) return explorerVote, nil } // getExecution takes in a blockchain and executionHash and returns an Explorer execution func getExecution(bc blockchain.Blockchain, ap actpool.ActPool, executionHash hash.Hash256, idx *indexservice.Server, useIndexer bool) (explorer.Execution, error) { explorerExecution := explorer.Execution{} selp, err := bc.GetActionByActionHash(executionHash) if err != nil { // Try to fetch pending execution from actpool selp, err = ap.GetActionByHash(executionHash) if err != nil { return explorerExecution, err } return convertExecutionToExplorerExecution(selp, true) } // Fetch from block var blkHash hash.Hash256 if useIndexer { hash, err := idx.Indexer().GetBlockByIndex(config.IndexExecution, executionHash) if err != nil { return explorerExecution, err } blkHash = hash } else { hash, err := bc.GetBlockHashByExecutionHash(executionHash) if err != nil { return explorerExecution, err } blkHash = hash } blk, err := bc.GetBlockByHash(blkHash) if err != nil { return explorerExecution, err } if explorerExecution, err = convertExecutionToExplorerExecution(selp, false); err != nil { return explorerExecution, errors.Wrapf(err, "failed to convert execution %v to explorer's JSON execution", selp) } explorerExecution.Timestamp = blk.ConvertToBlockHeaderPb().GetTimestamp().GetSeconds() explorerExecution.BlockID = hex.EncodeToString(blkHash[:]) return explorerExecution, nil } // getCreateDeposit takes in a blockchain and create deposit hash and returns an Explorer create deposit func getCreateDeposit( bc blockchain.Blockchain, ap actpool.ActPool, createDepositHash hash.Hash256, ) (explorer.CreateDeposit, error) { pending := false var selp action.SealedEnvelope var err error selp, err = bc.GetActionByActionHash(createDepositHash) if err != nil { // Try to fetch pending create deposit from actpool selp, err = ap.GetActionByHash(createDepositHash) if err != nil { return explorer.CreateDeposit{}, err } pending = true } // Fetch from block blkHash, err := bc.GetBlockHashByActionHash(createDepositHash) if err != nil { return explorer.CreateDeposit{}, err } blk, err := bc.GetBlockByHash(blkHash) if err != nil { return explorer.CreateDeposit{}, err } cd, err := castActionToCreateDeposit(selp, pending) if err != nil { return explorer.CreateDeposit{}, err } cd.Timestamp = blk.ConvertToBlockHeaderPb().GetTimestamp().GetSeconds() cd.BlockID = hex.EncodeToString(blkHash[:]) return cd, nil } func castActionToCreateDeposit(selp action.SealedEnvelope, pending bool) (explorer.CreateDeposit, error) { cd, ok := selp.Action().(*action.CreateDeposit) if !ok { return explorer.CreateDeposit{}, errors.Wrap(ErrAction, "action type is not create deposit") } hash := selp.Hash() createDeposit := explorer.CreateDeposit{ Nonce: int64(selp.Nonce()), ID: hex.EncodeToString(hash[:]), Recipient: cd.Recipient(), Fee: "", // TODO: we need to get the actual fee. GasLimit: int64(selp.GasLimit()), IsPending: pending, } if cd.Amount() != nil && len(cd.Amount().String()) > 0 { createDeposit.Amount = cd.Amount().String() } if selp.GasPrice() != nil && len(selp.GasPrice().String()) > 0 { createDeposit.GasPrice = selp.GasPrice().String() } return createDeposit, nil } // getSettleDeposit takes in a blockchain and settle deposit hash and returns an Explorer settle deposit func getSettleDeposit( bc blockchain.Blockchain, ap actpool.ActPool, settleDepositHash hash.Hash256, ) (explorer.SettleDeposit, error) { pending := false var selp action.SealedEnvelope var err error selp, err = bc.GetActionByActionHash(settleDepositHash) if err != nil { // Try to fetch pending settle deposit from actpool selp, err = ap.GetActionByHash(settleDepositHash) if err != nil { return explorer.SettleDeposit{}, err } pending = true } // Fetch from block blkHash, err := bc.GetBlockHashByActionHash(settleDepositHash) if err != nil { return explorer.SettleDeposit{}, err } blk, err := bc.GetBlockByHash(blkHash) if err != nil { return explorer.SettleDeposit{}, err } sd, err := castActionToSettleDeposit(selp, pending) if err != nil { return explorer.SettleDeposit{}, err } sd.Timestamp = blk.ConvertToBlockHeaderPb().GetTimestamp().GetSeconds() sd.BlockID = hex.EncodeToString(blkHash[:]) return sd, nil } func castActionToSettleDeposit(selp action.SealedEnvelope, pending bool) (explorer.SettleDeposit, error) { sd, ok := selp.Action().(*action.SettleDeposit) if !ok { return explorer.SettleDeposit{}, errors.Wrap(ErrAction, "action type is not settle deposit") } hash := selp.Hash() settleDeposit := explorer.SettleDeposit{ Nonce: int64(selp.Nonce()), ID: hex.EncodeToString(hash[:]), Recipient: sd.Recipient(), Index: int64(sd.Index()), Fee: "", // TODO: we need to get the actual fee. GasLimit: int64(selp.GasLimit()), IsPending: pending, } if sd.Amount() != nil && len(sd.Amount().String()) > 0 { settleDeposit.Amount = sd.Amount().String() } if selp.GasPrice() != nil && len(selp.GasPrice().String()) > 0 { settleDeposit.GasPrice = selp.GasPrice().String() } return settleDeposit, nil } func convertTsfToExplorerTsf(selp action.SealedEnvelope, isPending bool) (explorer.Transfer, error) { transfer, ok := selp.Action().(*action.Transfer) if !ok { return explorer.Transfer{}, errors.Wrap(ErrTransfer, "action is not transfer") } if transfer == nil { return explorer.Transfer{}, errors.Wrap(ErrTransfer, "transfer cannot be nil") } hash := selp.Hash() explorerTransfer := explorer.Transfer{ Nonce: int64(selp.Nonce()), ID: hex.EncodeToString(hash[:]), Recipient: transfer.Recipient(), Fee: "", // TODO: we need to get the actual fee. Payload: hex.EncodeToString(transfer.Payload()), GasLimit: int64(selp.GasLimit()), IsCoinbase: false, IsPending: isPending, } if transfer.Amount() != nil && len(transfer.Amount().String()) > 0 { explorerTransfer.Amount = transfer.Amount().String() } if selp.GasPrice() != nil && len(selp.GasPrice().String()) > 0 { explorerTransfer.GasPrice = selp.GasPrice().String() } return explorerTransfer, nil } func convertVoteToExplorerVote(selp action.SealedEnvelope, isPending bool) (explorer.Vote, error) { vote, ok := selp.Action().(*action.Vote) if !ok { return explorer.Vote{}, errors.Wrap(ErrTransfer, "action is not vote") } if vote == nil { return explorer.Vote{}, errors.Wrap(ErrVote, "vote cannot be nil") } hash := selp.Hash() voterPubkey := vote.VoterPublicKey() explorerVote := explorer.Vote{ ID: hex.EncodeToString(hash[:]), Nonce: int64(selp.Nonce()), VoterPubKey: keypair.EncodePublicKey(voterPubkey), Votee: vote.Votee(), GasLimit: int64(selp.GasLimit()), GasPrice: selp.GasPrice().String(), IsPending: isPending, } return explorerVote, nil } func convertExecutionToExplorerExecution(selp action.SealedEnvelope, isPending bool) (explorer.Execution, error) { execution, ok := selp.Action().(*action.Execution) if !ok { return explorer.Execution{}, errors.Wrap(ErrTransfer, "action is not execution") } if execution == nil { return explorer.Execution{}, errors.Wrap(ErrExecution, "execution cannot be nil") } hash := execution.Hash() explorerExecution := explorer.Execution{ Nonce: int64(selp.Nonce()), ID: hex.EncodeToString(hash[:]), Contract: execution.Contract(), GasLimit: int64(selp.GasLimit()), Data: hex.EncodeToString(execution.Data()), IsPending: isPending, } if execution.Amount() != nil && len(execution.Amount().String()) > 0 { explorerExecution.Amount = execution.Amount().String() } if selp.GasPrice() != nil && len(selp.GasPrice().String()) > 0 { explorerExecution.GasPrice = selp.GasPrice().String() } return explorerExecution, nil } func convertReceiptToExplorerReceipt(receipt *action.Receipt) (explorer.Receipt, error) { if receipt == nil { return explorer.Receipt{}, errors.Wrap(ErrReceipt, "receipt cannot be nil") } logs := []explorer.Log{} for _, log := range receipt.Logs { topics := []string{} for _, topic := range log.Topics { topics = append(topics, hex.EncodeToString(topic[:])) } logs = append(logs, explorer.Log{ Address: log.Address, Topics: topics, Data: hex.EncodeToString(log.Data), BlockNumber: int64(log.BlockNumber), TxnHash: hex.EncodeToString(log.TxnHash[:]), BlockHash: hex.EncodeToString(log.BlockHash[:]), Index: int64(log.Index), }) } return explorer.Receipt{ ReturnValue: hex.EncodeToString(receipt.ReturnValue), Status: int64(receipt.Status), Hash: hex.EncodeToString(receipt.ActHash[:]), GasConsumed: int64(receipt.GasConsumed), ContractAddress: receipt.ContractAddress, Logs: logs, }, nil } func convertExplorerExecutionToActionPb(execution *explorer.Execution) (*iproto.ActionPb, error) { executorPubKey, err := keypair.StringToPubKeyBytes(execution.ExecutorPubKey) if err != nil { return nil, err } data, err := hex.DecodeString(execution.Data) if err != nil { return nil, err } signature, err := hex.DecodeString(execution.Signature) if err != nil { return nil, err } amount, ok := big.NewInt(0).SetString(execution.Amount, 10) if !ok { return nil, errors.New("failed to set execution amount") } gasPrice, ok := big.NewInt(0).SetString(execution.GasPrice, 10) if !ok { return nil, errors.New("failed to set execution gas price") } actPb := &iproto.ActionPb{ Action: &iproto.ActionPb_Execution{ Execution: &iproto.ExecutionPb{ Amount: amount.Bytes(), Contract: execution.Contract, Data: data, }, }, Version: uint32(execution.Version), SenderPubKey: executorPubKey, Nonce: uint64(execution.Nonce), GasLimit: uint64(execution.GasLimit), GasPrice: gasPrice.Bytes(), Signature: signature, } return actPb, nil } func convertExplorerTransferToActionPb(tsfJSON *explorer.SendTransferRequest, maxTransferPayloadBytes uint64) (*iproto.ActionPb, error) { payload, err := hex.DecodeString(tsfJSON.Payload) if err != nil { return nil, err } if uint64(len(payload)) > maxTransferPayloadBytes { return nil, errors.Wrapf( ErrTransfer, "transfer payload contains %d bytes, and is longer than %d bytes limit", len(payload), maxTransferPayloadBytes, ) } senderPubKey, err := keypair.StringToPubKeyBytes(tsfJSON.SenderPubKey) if err != nil { return nil, err } signature, err := hex.DecodeString(tsfJSON.Signature) if err != nil { return nil, err } amount, ok := big.NewInt(0).SetString(tsfJSON.Amount, 10) if !ok { return nil, errors.New("failed to set transfer amount") } gasPrice, ok := big.NewInt(0).SetString(tsfJSON.GasPrice, 10) if !ok { return nil, errors.New("failed to set transfer gas price") } actPb := &iproto.ActionPb{ Action: &iproto.ActionPb_Transfer{ Transfer: &iproto.TransferPb{ Amount: amount.Bytes(), Recipient: tsfJSON.Recipient, Payload: payload, }, }, Version: uint32(tsfJSON.Version), SenderPubKey: senderPubKey, Nonce: uint64(tsfJSON.Nonce), GasLimit: uint64(tsfJSON.GasLimit), GasPrice: gasPrice.Bytes(), Signature: signature, } return actPb, nil }
1
15,055
unknown field AddressDetails in struct literal (from `typecheck`)
iotexproject-iotex-core
go
@@ -33,7 +33,13 @@ func sessionForRegion(region string) (*session.Session, error) { return s.(*session.Session), nil } - ns, err := session.NewSession(aws.NewConfig().WithRegion(region)) + ns, err := session.NewSessionWithOptions(session.Options{ + // Provide SDK Config options, such as Region. + Config: aws.Config{ + Region: aws.String(region), + }, + }) + if err != nil { return nil, err }
1
/* Copyright 2019 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package scope import ( "sync" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/session" ) var ( sessionCache sync.Map ) func sessionForRegion(region string) (*session.Session, error) { s, ok := sessionCache.Load(region) if ok { return s.(*session.Session), nil } ns, err := session.NewSession(aws.NewConfig().WithRegion(region)) if err != nil { return nil, err } sessionCache.Store(region, ns) return ns, nil }
1
14,862
Just noticed, are we specifically missing the addition of `SharedConfigState: session.SharedConfigEnable` ?
kubernetes-sigs-cluster-api-provider-aws
go
@@ -67,9 +67,9 @@ namespace pwiz.Skyline.Model.Results var newTimeIntensities = GetTimeIntensities(Source); if (newTimeIntensities != null) { - if (oldTimeIntensities != null) + if (oldTimeIntensities != null && oldTimeIntensities.NumPoints > 0) { - var oldTime = oldTimeIntensities.Times[ScanIndex]; + var oldTime = oldTimeIntensities.Times[Math.Min(ScanIndex, oldTimeIntensities.NumPoints - 1)]; ScanIndex = newTimeIntensities.IndexOfNearestTime(oldTime); } else
1
/* * Original author: Brian Pratt <bspratt .at. proteinms.net>, * MacCoss Lab, Department of Genome Sciences, UW * * Copyright 2015 University of Washington - Seattle, WA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using System.Threading; using pwiz.Common.Chemistry; using pwiz.ProteowizardWrapper; using pwiz.Skyline.Properties; using pwiz.Skyline.Util; namespace pwiz.Skyline.Model.Results { public class MsDataFileScanHelper : IDisposable { private ChromSource _chromSource; public MsDataFileScanHelper(Action<MsDataSpectrum[]> successAction, Action<Exception> failureAction) { ScanProvider = new BackgroundScanProvider(successAction, failureAction); SourceNames = new string[Helpers.CountEnumValues<ChromSource>()]; SourceNames[(int)ChromSource.ms1] = Resources.GraphFullScan_GraphFullScan_MS1; SourceNames[(int)ChromSource.fragment] = Resources.GraphFullScan_GraphFullScan_MS_MS; SourceNames[(int)ChromSource.sim] = Resources.GraphFullScan_GraphFullScan_SIM; } public BackgroundScanProvider ScanProvider { get; private set; } public MsDataSpectrum[] MsDataSpectra { get; set; } public string FileName { get; private set; } public int TransitionIndex { get; set; } public int ScanIndex { get; set; } public string[] SourceNames { get; set; } public ChromSource Source { get { return _chromSource; } set { if (Source == value) { return; } var oldTimeIntensities = GetTimeIntensities(Source); _chromSource = value; var newTimeIntensities = GetTimeIntensities(Source); if (newTimeIntensities != null) { if (oldTimeIntensities != null) { var oldTime = oldTimeIntensities.Times[ScanIndex]; ScanIndex = newTimeIntensities.IndexOfNearestTime(oldTime); } else { ScanIndex = Math.Min(ScanIndex, newTimeIntensities.NumPoints - 1); } } } } public ChromSource SourceFromName(string name) { return (ChromSource) SourceNames.IndexOf(e => e == name); } public string NameFromSource(ChromSource source) { return SourceNames[(int) source]; } public MsDataSpectrum[] GetFilteredScans() { var fullScans = MsDataSpectra; double minIonMobility, maxIonMobility; if (Settings.Default.FilterIonMobilityFullScan && GetIonMobilityRange(out minIonMobility, out maxIonMobility, Source)) fullScans = fullScans.Where(s => minIonMobility <= s.IonMobility.Mobility && s.IonMobility.Mobility <= maxIonMobility).ToArray(); return fullScans; } public bool GetIonMobilityRange(out double minIonMobility, out double maxIonMobility, ChromSource sourceType) { minIonMobility = double.MaxValue; maxIonMobility = double.MinValue; var hasIonMobilityInfo = false; int i = 0; foreach (var transition in ScanProvider.Transitions) { if (!transition._ionMobilityInfo.HasIonMobilityValue || !transition._ionMobilityInfo.IonMobilityExtractionWindowWidth.HasValue) { // Accept all values minIonMobility = double.MinValue; maxIonMobility = double.MaxValue; } else if (sourceType == ChromSource.unknown || (transition.Source == sourceType && i == TransitionIndex)) { // Products and precursors may have different expected ion mobility values in Waters MsE double startIM = transition._ionMobilityInfo.IonMobility.Mobility.Value - transition._ionMobilityInfo.IonMobilityExtractionWindowWidth.Value / 2; double endIM = startIM + transition._ionMobilityInfo.IonMobilityExtractionWindowWidth.Value; minIonMobility = Math.Min(minIonMobility, startIM); maxIonMobility = Math.Max(maxIonMobility, endIM); hasIonMobilityInfo = true; } i++; } return hasIonMobilityInfo; } /// <summary> /// Return a collisional cross section for this ion mobility at this mz, if reader supports this /// </summary> public double? CCSFromIonMobility(IonMobilityValue ionMobility, double mz, int charge) { if (ScanProvider == null) { return null; } return ScanProvider.CCSFromIonMobility(ionMobility, mz, charge); } public bool ProvidesCollisionalCrossSectionConverter { get { return ScanProvider != null && ScanProvider.ProvidesCollisionalCrossSectionConverter; } } public eIonMobilityUnits IonMobilityUnits { get { return ScanProvider.IonMobilityUnits; } } public TimeIntensities GetTimeIntensities(ChromSource source) { if (ScanProvider != null) { foreach (var transition in ScanProvider.Transitions) { if (transition.Source == source) return transition.TimeIntensities; } } return null; } public IList<int> GetScanIndexes(ChromSource source) { return GetTimeIntensities(source)?.ScanIds; } public int GetScanIndex() { var scanIndexes = GetScanIndexes(Source); var result = scanIndexes != null ? scanIndexes[ScanIndex] : -1; if (result < 0) MsDataSpectra = null; return result; } public static int FindScanIndex(ChromatogramInfo chromatogramInfo, double retentionTime) { if (chromatogramInfo.TimeIntensities.ScanIds == null) return -1; return FindScanIndex(chromatogramInfo.Times, retentionTime, 0, chromatogramInfo.Times.Count); } public static int FindScanIndex(IList<float> times, double retentionTime) { return FindScanIndex(times, retentionTime, 0, times.Count); } private static int FindScanIndex(IList<float> times, double retentionTime, int startIndex, int endIndex) { if (endIndex - startIndex <= 1) return startIndex; int index = (startIndex + endIndex) / 2; return (retentionTime < times[index]) ? FindScanIndex(times, retentionTime, startIndex, index) : FindScanIndex(times, retentionTime, index, endIndex); } public void UpdateScanProvider(IScanProvider scanProvider, int transitionIndex, int scanIndex) { ScanProvider.SetScanProvider(scanProvider); if (scanProvider != null) { Source = scanProvider.Transitions[transitionIndex].Source; Assume.IsTrue(Source == ScanProvider.Source); TransitionIndex = transitionIndex; ScanIndex = scanIndex; FileName = scanProvider.DataFilePath.GetFileName(); } else { MsDataSpectra = null; FileName = null; } } /// <summary> /// Provides a constant background thread with responsibility for all interactions /// with <see cref="IScanProvider"/>, necessary because <see cref="MsDataFileImpl"/> objects /// must be accessed on the same thread. /// </summary> public class BackgroundScanProvider : IDisposable { private const int MAX_CACHE_COUNT = 2; private bool _disposing; private int _scanIndexNext; private IScanProvider _scanProvider; private readonly List<IScanProvider> _cachedScanProviders; private readonly List<IScanProvider> _oldScanProviders; private readonly Thread _backgroundThread; private readonly Action<MsDataSpectrum[]> _successAction; private readonly Action<Exception> _failureAction; public BackgroundScanProvider(Action<MsDataSpectrum[]> successAction, Action<Exception> failureAction) { _scanIndexNext = -1; _oldScanProviders = new List<IScanProvider>(); _cachedScanProviders = new List<IScanProvider>(); _backgroundThread = new Thread(Work) { Name = GetType().Name, Priority = ThreadPriority.BelowNormal, IsBackground = true }; _backgroundThread.Start(); _successAction = successAction; _failureAction = failureAction; } public MsDataFileUri DataFilePath { get { return GetProviderProperty(p => p.DataFilePath, new MsDataFilePath(string.Empty)); } } public ChromSource Source { get { return GetProviderProperty(p => p.Source, ChromSource.unknown); } } public TransitionFullScanInfo[] Transitions { get { return GetProviderProperty(p => p.Transitions, new TransitionFullScanInfo[0]); } } public IList<float> Times { get { return GetProviderProperty(p => p.Times, new float[0]); } } private TProp GetProviderProperty<TProp>(Func<IScanProvider, TProp> getProp, TProp defaultValue) { lock (this) { return _scanProvider != null ? getProp(_scanProvider) : defaultValue; } } /// <summary> /// Return a collisional cross section for this ion mobility at this mz, if reader supports this /// </summary> public double? CCSFromIonMobility(IonMobilityValue ionMobility, double mz, int charge) { if (_scanProvider == null) { return null; } return _scanProvider.CCSFromIonMobility(ionMobility, mz, charge); } public eIonMobilityUnits IonMobilityUnits { get { return _scanProvider != null ? _scanProvider.IonMobilityUnits : eIonMobilityUnits.none; } } public bool ProvidesCollisionalCrossSectionConverter { get { return _scanProvider != null && _scanProvider.ProvidesCollisionalCrossSectionConverter; } } /// <summary> /// Always run on a specific background thread to avoid changing threads when dealing /// with a scan provider, which can mess up data readers used by ProteoWizard. /// </summary> private void Work() { try { while (!_disposing) { IScanProvider scanProvider; int internalScanIndex; lock (this) { while (!_disposing && (_scanProvider == null || _scanIndexNext < 0) && _oldScanProviders.Count == 0) Monitor.Wait(this); if (_disposing) break; scanProvider = _scanProvider; internalScanIndex = _scanIndexNext; _scanIndexNext = -1; } if (scanProvider != null && internalScanIndex != -1) { try { var msDataSpectra = scanProvider.GetMsDataFileSpectraWithCommonRetentionTime(internalScanIndex); // Get a collection of scans with changing ion mobility but same retention time, or single scan if no ion mobility info _successAction(msDataSpectra); } catch (Exception ex) { try { _failureAction(ex); } catch (Exception exFailure) { Program.ReportException(exFailure); } } } DisposeAllProviders(); } } finally { lock (this) { SetScanProvider(null); DisposeAllProviders(); Monitor.PulseAll(this); } } } public void SetScanProvider(IScanProvider newScanProvider) { lock (this) { if (_scanProvider != null && !ReferenceEquals(_scanProvider, newScanProvider)) { _cachedScanProviders.Insert(0, _scanProvider); if (newScanProvider != null) { AdoptCachedProvider(newScanProvider); } // Queue for disposal if (_cachedScanProviders.Count > MAX_CACHE_COUNT) { _oldScanProviders.Add(_cachedScanProviders[MAX_CACHE_COUNT]); _cachedScanProviders.RemoveAt(MAX_CACHE_COUNT); } } _scanProvider = newScanProvider; if (newScanProvider == null) // Called with null when we're disposing { _oldScanProviders.AddRange(_cachedScanProviders); _cachedScanProviders.Clear(); } Monitor.PulseAll(this); } } private void AdoptCachedProvider(IScanProvider scanProvider) { lock (this) { for (int i = 0; i < _cachedScanProviders.Count; i++) { if (scanProvider.Adopt(_cachedScanProviders[i])) { _oldScanProviders.Add(_cachedScanProviders[i]); _cachedScanProviders.RemoveAt(i); return; } } } } public void SetScanForBackgroundLoad(int scanIndex) { lock (this) { _scanIndexNext = scanIndex; if (_scanIndexNext != -1) Monitor.PulseAll(this); } } private void DisposeAllProviders() { IScanProvider[] disposeScanProviders; lock (this) { disposeScanProviders = _oldScanProviders.ToArray(); _oldScanProviders.Clear(); } foreach (var provider in disposeScanProviders) provider.Dispose(); } public void Dispose() { // Wait for dispose to happen on the background thread lock (this) { _disposing = true; SetScanProvider(null); } // Make sure the background thread goes away _backgroundThread.Join(); } } public void Dispose() { if (ScanProvider != null) ScanProvider.Dispose(); ScanProvider = null; } } }
1
12,840
Not sure what this is. Bad merge?
ProteoWizard-pwiz
.cs
@@ -1318,9 +1318,12 @@ func (s *Server) updateAccountWithClaimJWT(acc *Account, claimJWT string) error if acc == nil { return ErrMissingAccount } - if acc.claimJWT != "" && acc.claimJWT == claimJWT && !acc.incomplete { + acc.mu.Lock() + sameClaim := acc.claimJWT != "" && acc.claimJWT == claimJWT && !acc.incomplete + acc.mu.Unlock() + if sameClaim { s.Debugf("Requested account update for [%s], same claims detected", acc.Name) - return ErrAccountResolverSameClaims + return nil } accClaims, _, err := s.verifyAccountClaims(claimJWT) if err == nil && accClaims != nil {
1
// Copyright 2012-2020 The NATS Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package server import ( "bytes" "context" "crypto/tls" "encoding/json" "errors" "flag" "fmt" "io" "io/ioutil" "math/rand" "net" "net/http" // Allow dynamic profiling. _ "net/http/pprof" "os" "path" "path/filepath" "runtime" "strconv" "strings" "sync" "sync/atomic" "time" "github.com/nats-io/jwt/v2" "github.com/nats-io/nkeys" "github.com/nats-io/nuid" "github.com/nats-io/nats-server/v2/logger" ) const ( // Interval for the first PING for non client connections. firstPingInterval = time.Second // This is for the first ping for client connections. firstClientPingInterval = 2 * time.Second ) // Info is the information sent to clients, routes, gateways, and leaf nodes, // to help them understand information about this server. type Info struct { ID string `json:"server_id"` Name string `json:"server_name"` Version string `json:"version"` Proto int `json:"proto"` GitCommit string `json:"git_commit,omitempty"` GoVersion string `json:"go"` Host string `json:"host"` Port int `json:"port"` Headers bool `json:"headers"` AuthRequired bool `json:"auth_required,omitempty"` TLSRequired bool `json:"tls_required,omitempty"` TLSVerify bool `json:"tls_verify,omitempty"` TLSAvailable bool `json:"tls_available,omitempty"` MaxPayload int32 `json:"max_payload"` JetStream bool `json:"jetstream,omitempty"` IP string `json:"ip,omitempty"` CID uint64 `json:"client_id,omitempty"` ClientIP string `json:"client_ip,omitempty"` Nonce string `json:"nonce,omitempty"` Cluster string `json:"cluster,omitempty"` Dynamic bool `json:"cluster_dynamic,omitempty"` ClientConnectURLs []string `json:"connect_urls,omitempty"` // Contains URLs a client can connect to. WSConnectURLs []string `json:"ws_connect_urls,omitempty"` // Contains URLs a ws client can connect to. LameDuckMode bool `json:"ldm,omitempty"` // Route Specific Import *SubjectPermission `json:"import,omitempty"` Export *SubjectPermission `json:"export,omitempty"` LNOC bool `json:"lnoc,omitempty"` // Gateways Specific Gateway string `json:"gateway,omitempty"` // Name of the origin Gateway (sent by gateway's INFO) GatewayURLs []string `json:"gateway_urls,omitempty"` // Gateway URLs in the originating cluster (sent by gateway's INFO) GatewayURL string `json:"gateway_url,omitempty"` // Gateway URL on that server (sent by route's INFO) GatewayCmd byte `json:"gateway_cmd,omitempty"` // Command code for the receiving server to know what to do GatewayCmdPayload []byte `json:"gateway_cmd_payload,omitempty"` // Command payload when needed GatewayNRP bool `json:"gateway_nrp,omitempty"` // Uses new $GNR. prefix for mapped replies // LeafNode Specific LeafNodeURLs []string `json:"leafnode_urls,omitempty"` // LeafNode URLs that the server can reconnect to. } // Server is our main struct. type Server struct { gcid uint64 stats mu sync.Mutex kp nkeys.KeyPair prand *rand.Rand info Info configFile string optsMu sync.RWMutex opts *Options running bool shutdown bool reloading bool listener net.Listener gacc *Account sys *internal js *jetStream accounts sync.Map tmpAccounts sync.Map // Temporarily stores accounts that are being built activeAccounts int32 accResolver AccountResolver clients map[uint64]*client routes map[uint64]*client routesByHash sync.Map remotes map[string]*client leafs map[uint64]*client users map[string]*User nkeys map[string]*NkeyUser totalClients uint64 closed *closedRingBuffer done chan bool start time.Time http net.Listener httpHandler http.Handler httpBasePath string profiler net.Listener httpReqStats map[string]uint64 routeListener net.Listener routeInfo Info routeInfoJSON []byte routeResolver netResolver routesToSelf map[string]struct{} leafNodeListener net.Listener leafNodeInfo Info leafNodeInfoJSON []byte leafURLsMap refCountedUrlSet leafNodeOpts struct { resolver netResolver dialTimeout time.Duration } leafRemoteCfgs []*leafNodeCfg quitCh chan struct{} shutdownComplete chan struct{} // Tracking Go routines grMu sync.Mutex grTmpClients map[uint64]*client grRunning bool grWG sync.WaitGroup // to wait on various go routines cproto int64 // number of clients supporting async INFO configTime time.Time // last time config was loaded logging struct { sync.RWMutex logger Logger trace int32 debug int32 traceSysAcc int32 } clientConnectURLs []string // Used internally for quick look-ups. clientConnectURLsMap refCountedUrlSet lastCURLsUpdate int64 // For Gateways gatewayListener net.Listener // Accept listener gateway *srvGateway // Used by tests to check that http.Servers do // not set any timeout. monitoringServer *http.Server profilingServer *http.Server // LameDuck mode ldm bool ldmCh chan bool // Trusted public operator keys. trustedKeys []string // map of trusted keys to operator setting StrictSigningKeyUsage strictSigningKeyUsage map[string]struct{} // We use this to minimize mem copies for requests to monitoring // endpoint /varz (when it comes from http). varzMu sync.Mutex varz *Varz // This is set during a config reload if we detect that we have // added/removed routes. The monitoring code then check that // to know if it should update the cluster's URLs array. varzUpdateRouteURLs bool // Keeps a sublist of of subscriptions attached to leafnode connections // for the $GNR.*.*.*.> subject so that a server can send back a mapped // gateway reply. gwLeafSubs *Sublist // Used for expiration of mapped GW replies gwrm struct { w int32 ch chan time.Duration m sync.Map } // For eventIDs eventIds *nuid.NUID // Websocket structure websocket srvWebsocket // MQTT structure mqtt srvMQTT // exporting account name the importer experienced issues with incompleteAccExporterMap sync.Map // Holds cluster name under different lock for mapping cnMu sync.RWMutex cn string // For registering raft nodes with the server. rnMu sync.RWMutex raftNodes map[string]RaftNode // For mapping from a raft node name back to a server name and cluster. nodeToInfo sync.Map // For out of resources to not log errors too fast. rerrMu sync.Mutex rerrLast time.Time } type nodeInfo struct { name string cluster string id string offline bool } // Make sure all are 64bits for atomic use type stats struct { inMsgs int64 outMsgs int64 inBytes int64 outBytes int64 slowConsumers int64 } // New will setup a new server struct after parsing the options. // DEPRECATED: Use NewServer(opts) func New(opts *Options) *Server { s, _ := NewServer(opts) return s } // NewServer will setup a new server struct after parsing the options. // Could return an error if options can not be validated. func NewServer(opts *Options) (*Server, error) { setBaselineOptions(opts) // Process TLS options, including whether we require client certificates. tlsReq := opts.TLSConfig != nil verify := (tlsReq && opts.TLSConfig.ClientAuth == tls.RequireAndVerifyClientCert) // Created server's nkey identity. kp, _ := nkeys.CreateServer() pub, _ := kp.PublicKey() serverName := pub if opts.ServerName != _EMPTY_ { serverName = opts.ServerName } httpBasePath := normalizeBasePath(opts.HTTPBasePath) // Validate some options. This is here because we cannot assume that // server will always be started with configuration parsing (that could // report issues). Its options can be (incorrectly) set by hand when // server is embedded. If there is an error, return nil. if err := validateOptions(opts); err != nil { return nil, err } info := Info{ ID: pub, Version: VERSION, Proto: PROTO, GitCommit: gitCommit, GoVersion: runtime.Version(), Name: serverName, Host: opts.Host, Port: opts.Port, AuthRequired: false, TLSRequired: tlsReq && !opts.AllowNonTLS, TLSVerify: verify, MaxPayload: opts.MaxPayload, JetStream: opts.JetStream, Headers: !opts.NoHeaderSupport, Cluster: opts.Cluster.Name, } if tlsReq && !info.TLSRequired { info.TLSAvailable = true } now := time.Now().UTC() s := &Server{ kp: kp, configFile: opts.ConfigFile, info: info, prand: rand.New(rand.NewSource(time.Now().UnixNano())), opts: opts, done: make(chan bool, 1), start: now, configTime: now, gwLeafSubs: NewSublistWithCache(), httpBasePath: httpBasePath, eventIds: nuid.New(), routesToSelf: make(map[string]struct{}), } // Trusted root operator keys. if !s.processTrustedKeys() { return nil, fmt.Errorf("Error processing trusted operator keys") } if opts.Cluster.Name != _EMPTY_ { // Also place into mapping cn with cnMu lock. s.cnMu.Lock() s.cn = opts.Cluster.Name s.cnMu.Unlock() } s.mu.Lock() defer s.mu.Unlock() // Place ourselves in some lookup maps. ourNode := string(getHash(serverName)) s.nodeToInfo.Store(ourNode, nodeInfo{serverName, opts.Cluster.Name, info.ID, false}) s.routeResolver = opts.Cluster.resolver if s.routeResolver == nil { s.routeResolver = net.DefaultResolver } // Used internally for quick look-ups. s.clientConnectURLsMap = make(refCountedUrlSet) s.websocket.connectURLsMap = make(refCountedUrlSet) s.leafURLsMap = make(refCountedUrlSet) // Ensure that non-exported options (used in tests) are properly set. s.setLeafNodeNonExportedOptions() // Call this even if there is no gateway defined. It will // initialize the structure so we don't have to check for // it to be nil or not in various places in the code. if err := s.newGateway(opts); err != nil { return nil, err } // If we have a cluster definition but do not have a cluster name, create one. if opts.Cluster.Port != 0 && opts.Cluster.Name == "" { s.info.Cluster = nuid.Next() } // This is normally done in the AcceptLoop, once the // listener has been created (possibly with random port), // but since some tests may expect the INFO to be properly // set after New(), let's do it now. s.setInfoHostPort() // For tracking clients s.clients = make(map[uint64]*client) // For tracking closed clients. s.closed = newClosedRingBuffer(opts.MaxClosedClients) // For tracking connections that are not yet registered // in s.routes, but for which readLoop has started. s.grTmpClients = make(map[uint64]*client) // For tracking routes and their remote ids s.routes = make(map[uint64]*client) s.remotes = make(map[string]*client) // For tracking leaf nodes. s.leafs = make(map[uint64]*client) // Used to kick out all go routines possibly waiting on server // to shutdown. s.quitCh = make(chan struct{}) // Closed when Shutdown() is complete. Allows WaitForShutdown() to block // waiting for complete shutdown. s.shutdownComplete = make(chan struct{}) // Check for configured account resolvers. if err := s.configureResolver(); err != nil { return nil, err } // If there is an URL account resolver, do basic test to see if anyone is home. if ar := opts.AccountResolver; ar != nil { if ur, ok := ar.(*URLAccResolver); ok { if _, err := ur.Fetch(""); err != nil { return nil, err } } } // For other resolver: // In operator mode, when the account resolver depends on an external system and // the system account can't fetched, inject a temporary one. if ar := s.accResolver; len(opts.TrustedOperators) == 1 && ar != nil && opts.SystemAccount != _EMPTY_ && opts.SystemAccount != DEFAULT_SYSTEM_ACCOUNT { if _, ok := ar.(*MemAccResolver); !ok { s.mu.Unlock() var a *Account // perform direct lookup to avoid warning trace if _, err := fetchAccount(ar, s.opts.SystemAccount); err == nil { a, _ = s.fetchAccount(s.opts.SystemAccount) } s.mu.Lock() if a == nil { sac := NewAccount(s.opts.SystemAccount) sac.Issuer = opts.TrustedOperators[0].Issuer sac.signingKeys = map[string]jwt.Scope{} sac.signingKeys[s.opts.SystemAccount] = nil s.registerAccountNoLock(sac) } } } // For tracking accounts if err := s.configureAccounts(); err != nil { return nil, err } // Used to setup Authorization. s.configureAuthorization() // Start signal handler s.handleSignals() return s, nil } // clusterName returns our cluster name which could be dynamic. func (s *Server) ClusterName() string { s.mu.Lock() cn := s.info.Cluster s.mu.Unlock() return cn } // Grabs cluster name with cluster name specific lock. func (s *Server) cachedClusterName() string { s.cnMu.RLock() cn := s.cn s.cnMu.RUnlock() return cn } // setClusterName will update the cluster name for this server. func (s *Server) setClusterName(name string) { s.mu.Lock() var resetCh chan struct{} if s.sys != nil && s.info.Cluster != name { // can't hold the lock as go routine reading it may be waiting for lock as well resetCh = s.sys.resetCh } s.info.Cluster = name s.routeInfo.Cluster = name // Regenerate the info byte array s.generateRouteInfoJSON() // Need to close solicited leaf nodes. The close has to be done outside of the server lock. var leafs []*client for _, c := range s.leafs { c.mu.Lock() if c.leaf != nil && c.leaf.remote != nil { leafs = append(leafs, c) } c.mu.Unlock() } s.mu.Unlock() // Also place into mapping cn with cnMu lock. s.cnMu.Lock() s.cn = name s.cnMu.Unlock() for _, l := range leafs { l.closeConnection(ClusterNameConflict) } if resetCh != nil { resetCh <- struct{}{} } s.Noticef("Cluster name updated to %s", name) } // Return whether the cluster name is dynamic. func (s *Server) isClusterNameDynamic() bool { return s.getOpts().Cluster.Name == "" } // ClientURL returns the URL used to connect clients. Helpful in testing // when we designate a random client port (-1). func (s *Server) ClientURL() string { // FIXME(dlc) - should we add in user and pass if defined single? opts := s.getOpts() scheme := "nats://" if opts.TLSConfig != nil { scheme = "tls://" } return fmt.Sprintf("%s%s:%d", scheme, opts.Host, opts.Port) } func validateClusterName(o *Options) error { // Check that cluster name if defined matches any gateway name. if o.Gateway.Name != "" && o.Gateway.Name != o.Cluster.Name { if o.Cluster.Name != "" { return ErrClusterNameConfigConflict } // Set this here so we do not consider it dynamic. o.Cluster.Name = o.Gateway.Name } return nil } func validateOptions(o *Options) error { if o.LameDuckDuration > 0 && o.LameDuckGracePeriod >= o.LameDuckDuration { return fmt.Errorf("lame duck grace period (%v) should be strictly lower than lame duck duration (%v)", o.LameDuckGracePeriod, o.LameDuckDuration) } // Check that the trust configuration is correct. if err := validateTrustedOperators(o); err != nil { return err } // Check on leaf nodes which will require a system // account when gateways are also configured. if err := validateLeafNode(o); err != nil { return err } // Check that authentication is properly configured. if err := validateAuth(o); err != nil { return err } // Check that gateway is properly configured. Returns no error // if there is no gateway defined. if err := validateGatewayOptions(o); err != nil { return err } // Check that cluster name if defined matches any gateway name. if err := validateClusterName(o); err != nil { return err } if err := validateMQTTOptions(o); err != nil { return err } if err := validateJetStreamOptions(o); err != nil { return err } // Finally check websocket options. return validateWebsocketOptions(o) } func (s *Server) getOpts() *Options { s.optsMu.RLock() opts := s.opts s.optsMu.RUnlock() return opts } func (s *Server) setOpts(opts *Options) { s.optsMu.Lock() s.opts = opts s.optsMu.Unlock() } func (s *Server) globalAccount() *Account { s.mu.Lock() gacc := s.gacc s.mu.Unlock() return gacc } // Used to setup Accounts. // Lock is held upon entry. func (s *Server) configureAccounts() error { // Create the global account. if s.gacc == nil { s.gacc = NewAccount(globalAccountName) s.registerAccountNoLock(s.gacc) } opts := s.opts // Check opts and walk through them. We need to copy them here // so that we do not keep a real one sitting in the options. for _, acc := range s.opts.Accounts { var a *Account if acc.Name == globalAccountName { a = s.gacc } else { a = acc.shallowCopy() } if acc.hasMappings() { // For now just move and wipe from opts.Accounts version. a.mappings = acc.mappings acc.mappings = nil // We use this for selecting between multiple weighted destinations. a.prand = rand.New(rand.NewSource(time.Now().UnixNano())) } acc.sl = nil acc.clients = nil s.registerAccountNoLock(a) // If we see an account defined using $SYS we will make sure that is set as system account. if acc.Name == DEFAULT_SYSTEM_ACCOUNT && opts.SystemAccount == _EMPTY_ { s.opts.SystemAccount = DEFAULT_SYSTEM_ACCOUNT } } // Now that we have this we need to remap any referenced accounts in // import or export maps to the new ones. swapApproved := func(ea *exportAuth) { for sub, a := range ea.approved { var acc *Account if v, ok := s.accounts.Load(a.Name); ok { acc = v.(*Account) } ea.approved[sub] = acc } } var numAccounts int s.accounts.Range(func(k, v interface{}) bool { numAccounts++ acc := v.(*Account) // Exports for _, se := range acc.exports.streams { if se != nil { swapApproved(&se.exportAuth) } } for _, se := range acc.exports.services { if se != nil { // Swap over the bound account for service exports. if se.acc != nil { if v, ok := s.accounts.Load(se.acc.Name); ok { se.acc = v.(*Account) } } swapApproved(&se.exportAuth) } } // Imports for _, si := range acc.imports.streams { if v, ok := s.accounts.Load(si.acc.Name); ok { si.acc = v.(*Account) } } for _, si := range acc.imports.services { if v, ok := s.accounts.Load(si.acc.Name); ok { si.acc = v.(*Account) si.se = si.acc.getServiceExport(si.to) } } // Make sure the subs are running, but only if not reloading. if len(acc.imports.services) > 0 && acc.ic == nil && !s.reloading { acc.ic = s.createInternalAccountClient() acc.ic.acc = acc acc.addAllServiceImportSubs() } acc.updated = time.Now().UTC() return true }) // Set the system account if it was configured. // Otherwise create a default one. if opts.SystemAccount != _EMPTY_ { // Lock may be acquired in lookupAccount, so release to call lookupAccount. s.mu.Unlock() acc, err := s.lookupAccount(opts.SystemAccount) s.mu.Lock() if err == nil && s.sys != nil && acc != s.sys.account { // sys.account.clients (including internal client)/respmap/etc... are transferred separately s.sys.account = acc s.mu.Unlock() // acquires server lock separately s.addSystemAccountExports(acc) s.mu.Lock() } if err != nil { return fmt.Errorf("error resolving system account: %v", err) } // If we have defined a system account here check to see if its just us and the $G account. // We would do this to add user/pass to the system account. If this is the case add in // no-auth-user for $G. if numAccounts == 2 && s.opts.NoAuthUser == _EMPTY_ { // Create a unique name so we do not collide. var b [8]byte rn := rand.Int63() for i, l := 0, rn; i < len(b); i++ { b[i] = digits[l%base] l /= base } uname := fmt.Sprintf("nats-%s", b[:]) s.opts.Users = append(s.opts.Users, &User{Username: uname, Password: string(b[:]), Account: s.gacc}) s.opts.NoAuthUser = uname } } return nil } // Setup the account resolver. For memory resolver, make sure the JWTs are // properly formed but do not enforce expiration etc. func (s *Server) configureResolver() error { opts := s.getOpts() s.accResolver = opts.AccountResolver if opts.AccountResolver != nil { // For URL resolver, set the TLSConfig if specified. if opts.AccountResolverTLSConfig != nil { if ar, ok := opts.AccountResolver.(*URLAccResolver); ok { if t, ok := ar.c.Transport.(*http.Transport); ok { t.CloseIdleConnections() t.TLSClientConfig = opts.AccountResolverTLSConfig.Clone() } } } if len(opts.resolverPreloads) > 0 { if s.accResolver.IsReadOnly() { return fmt.Errorf("resolver preloads only available for writeable resolver types MEM/DIR/CACHE_DIR") } for k, v := range opts.resolverPreloads { _, err := jwt.DecodeAccountClaims(v) if err != nil { return fmt.Errorf("preload account error for %q: %v", k, err) } s.accResolver.Store(k, v) } } } return nil } // This will check preloads for validation issues. func (s *Server) checkResolvePreloads() { opts := s.getOpts() // We can just check the read-only opts versions here, that way we do not need // to grab server lock or access s.accResolver. for k, v := range opts.resolverPreloads { claims, err := jwt.DecodeAccountClaims(v) if err != nil { s.Errorf("Preloaded account [%s] not valid", k) continue } // Check if it is expired. vr := jwt.CreateValidationResults() claims.Validate(vr) if vr.IsBlocking(true) { s.Warnf("Account [%s] has validation issues:", k) for _, v := range vr.Issues { s.Warnf(" - %s", v.Description) } } } } func (s *Server) generateRouteInfoJSON() { b, _ := json.Marshal(s.routeInfo) pcs := [][]byte{[]byte("INFO"), b, []byte(CR_LF)} s.routeInfoJSON = bytes.Join(pcs, []byte(" ")) } // Determines if we are in pre NATS 2.0 setup with no accounts. func (s *Server) globalAccountOnly() bool { var hasOthers bool if s.trustedKeys != nil { return false } s.mu.Lock() s.accounts.Range(func(k, v interface{}) bool { acc := v.(*Account) // Ignore global and system if acc == s.gacc || (s.sys != nil && acc == s.sys.account) { return true } hasOthers = true return false }) s.mu.Unlock() return !hasOthers } // Determines if this server is in standalone mode, meaning no routes or gateways. func (s *Server) standAloneMode() bool { opts := s.getOpts() return opts.Cluster.Port == 0 && opts.Gateway.Port == 0 } func (s *Server) configuredRoutes() int { return len(s.getOpts().Routes) } // activePeers is used in bootstrapping raft groups like the JetStream meta controller. func (s *Server) ActivePeers() (peers []string) { s.nodeToInfo.Range(func(k, v interface{}) bool { si := v.(nodeInfo) if !si.offline { peers = append(peers, k.(string)) } return true }) return peers } // isTrustedIssuer will check that the issuer is a trusted public key. // This is used to make sure an account was signed by a trusted operator. func (s *Server) isTrustedIssuer(issuer string) bool { s.mu.Lock() defer s.mu.Unlock() // If we are not running in trusted mode and there is no issuer, that is ok. if s.trustedKeys == nil && issuer == "" { return true } for _, tk := range s.trustedKeys { if tk == issuer { return true } } return false } // processTrustedKeys will process binary stamped and // options-based trusted nkeys. Returns success. func (s *Server) processTrustedKeys() bool { s.strictSigningKeyUsage = map[string]struct{}{} if trustedKeys != "" && !s.initStampedTrustedKeys() { return false } else if s.opts.TrustedKeys != nil { for _, key := range s.opts.TrustedKeys { if !nkeys.IsValidPublicOperatorKey(key) { return false } } s.trustedKeys = append([]string(nil), s.opts.TrustedKeys...) for _, claim := range s.opts.TrustedOperators { if !claim.StrictSigningKeyUsage { continue } for _, key := range claim.SigningKeys { s.strictSigningKeyUsage[key] = struct{}{} } } } return true } // checkTrustedKeyString will check that the string is a valid array // of public operator nkeys. func checkTrustedKeyString(keys string) []string { tks := strings.Fields(keys) if len(tks) == 0 { return nil } // Walk all the keys and make sure they are valid. for _, key := range tks { if !nkeys.IsValidPublicOperatorKey(key) { return nil } } return tks } // initStampedTrustedKeys will check the stamped trusted keys // and will set the server field 'trustedKeys'. Returns whether // it succeeded or not. func (s *Server) initStampedTrustedKeys() bool { // Check to see if we have an override in options, which will cause us to fail. if len(s.opts.TrustedKeys) > 0 { return false } tks := checkTrustedKeyString(trustedKeys) if len(tks) == 0 { return false } s.trustedKeys = tks return true } // PrintAndDie is exported for access in other packages. func PrintAndDie(msg string) { fmt.Fprintln(os.Stderr, msg) os.Exit(1) } // PrintServerAndExit will print our version and exit. func PrintServerAndExit() { fmt.Printf("nats-server: v%s\n", VERSION) os.Exit(0) } // ProcessCommandLineArgs takes the command line arguments // validating and setting flags for handling in case any // sub command was present. func ProcessCommandLineArgs(cmd *flag.FlagSet) (showVersion bool, showHelp bool, err error) { if len(cmd.Args()) > 0 { arg := cmd.Args()[0] switch strings.ToLower(arg) { case "version": return true, false, nil case "help": return false, true, nil default: return false, false, fmt.Errorf("unrecognized command: %q", arg) } } return false, false, nil } // Public version. func (s *Server) Running() bool { return s.isRunning() } // Protected check on running state func (s *Server) isRunning() bool { s.mu.Lock() running := s.running s.mu.Unlock() return running } func (s *Server) logPid() error { pidStr := strconv.Itoa(os.Getpid()) return ioutil.WriteFile(s.getOpts().PidFile, []byte(pidStr), 0660) } // NewAccountsAllowed returns whether or not new accounts can be created on the fly. func (s *Server) NewAccountsAllowed() bool { s.mu.Lock() defer s.mu.Unlock() return s.opts.AllowNewAccounts } // numReservedAccounts will return the number of reserved accounts configured in the server. // Currently this is 1, one for the global default account. func (s *Server) numReservedAccounts() int { return 1 } // NumActiveAccounts reports number of active accounts on this server. func (s *Server) NumActiveAccounts() int32 { return atomic.LoadInt32(&s.activeAccounts) } // incActiveAccounts() just adds one under lock. func (s *Server) incActiveAccounts() { atomic.AddInt32(&s.activeAccounts, 1) } // decActiveAccounts() just subtracts one under lock. func (s *Server) decActiveAccounts() { atomic.AddInt32(&s.activeAccounts, -1) } // This should be used for testing only. Will be slow since we have to // range over all accounts in the sync.Map to count. func (s *Server) numAccounts() int { count := 0 s.mu.Lock() s.accounts.Range(func(k, v interface{}) bool { count++ return true }) s.mu.Unlock() return count } // NumLoadedAccounts returns the number of loaded accounts. func (s *Server) NumLoadedAccounts() int { return s.numAccounts() } // LookupOrRegisterAccount will return the given account if known or create a new entry. func (s *Server) LookupOrRegisterAccount(name string) (account *Account, isNew bool) { s.mu.Lock() defer s.mu.Unlock() if v, ok := s.accounts.Load(name); ok { return v.(*Account), false } acc := NewAccount(name) s.registerAccountNoLock(acc) return acc, true } // RegisterAccount will register an account. The account must be new // or this call will fail. func (s *Server) RegisterAccount(name string) (*Account, error) { s.mu.Lock() defer s.mu.Unlock() if _, ok := s.accounts.Load(name); ok { return nil, ErrAccountExists } acc := NewAccount(name) s.registerAccountNoLock(acc) return acc, nil } // SetSystemAccount will set the internal system account. // If root operators are present it will also check validity. func (s *Server) SetSystemAccount(accName string) error { // Lookup from sync.Map first. if v, ok := s.accounts.Load(accName); ok { return s.setSystemAccount(v.(*Account)) } // If we are here we do not have local knowledge of this account. // Do this one by hand to return more useful error. ac, jwt, err := s.fetchAccountClaims(accName) if err != nil { return err } acc := s.buildInternalAccount(ac) acc.claimJWT = jwt // Due to race, we need to make sure that we are not // registering twice. if racc := s.registerAccount(acc); racc != nil { return nil } return s.setSystemAccount(acc) } // SystemAccount returns the system account if set. func (s *Server) SystemAccount() *Account { var sacc *Account s.mu.Lock() if s.sys != nil { sacc = s.sys.account } s.mu.Unlock() return sacc } // GlobalAccount returns the global account. // Default clients will use the global account. func (s *Server) GlobalAccount() *Account { s.mu.Lock() defer s.mu.Unlock() return s.gacc } // SetDefaultSystemAccount will create a default system account if one is not present. func (s *Server) SetDefaultSystemAccount() error { if _, isNew := s.LookupOrRegisterAccount(DEFAULT_SYSTEM_ACCOUNT); !isNew { return nil } s.Debugf("Created system account: %q", DEFAULT_SYSTEM_ACCOUNT) return s.SetSystemAccount(DEFAULT_SYSTEM_ACCOUNT) } // For internal sends. const internalSendQLen = 256 * 1024 // Assign a system account. Should only be called once. // This sets up a server to send and receive messages from // inside the server itself. func (s *Server) setSystemAccount(acc *Account) error { if acc == nil { return ErrMissingAccount } // Don't try to fix this here. if acc.IsExpired() { return ErrAccountExpired } // If we are running with trusted keys for an operator // make sure we check the account is legit. if !s.isTrustedIssuer(acc.Issuer) { return ErrAccountValidation } s.mu.Lock() if s.sys != nil { s.mu.Unlock() return ErrAccountExists } // This is here in an attempt to quiet the race detector and not have to place // locks on fast path for inbound messages and checking service imports. acc.mu.Lock() if acc.imports.services == nil { acc.imports.services = make(map[string]*serviceImport) } acc.mu.Unlock() s.sys = &internal{ account: acc, client: s.createInternalSystemClient(), seq: 1, sid: 1, servers: make(map[string]*serverUpdate), replies: make(map[string]msgHandler), sendq: make(chan *pubMsg, internalSendQLen), resetCh: make(chan struct{}), sq: s.newSendQ(), statsz: eventsHBInterval, orphMax: 5 * eventsHBInterval, chkOrph: 3 * eventsHBInterval, } s.sys.wg.Add(1) s.mu.Unlock() // Register with the account. s.sys.client.registerWithAccount(acc) s.addSystemAccountExports(acc) // Start our internal loop to serialize outbound messages. // We do our own wg here since we will stop first during shutdown. go s.internalSendLoop(&s.sys.wg) // Start up our general subscriptions s.initEventTracking() // Track for dead remote servers. s.wrapChk(s.startRemoteServerSweepTimer)() // Send out statsz updates periodically. s.wrapChk(s.startStatszTimer)() // If we have existing accounts make sure we enable account tracking. s.mu.Lock() s.accounts.Range(func(k, v interface{}) bool { acc := v.(*Account) s.enableAccountTracking(acc) return true }) s.mu.Unlock() return nil } // Creates an internal system client. func (s *Server) createInternalSystemClient() *client { return s.createInternalClient(SYSTEM) } // Creates an internal jetstream client. func (s *Server) createInternalJetStreamClient() *client { return s.createInternalClient(JETSTREAM) } // Creates an internal client for Account. func (s *Server) createInternalAccountClient() *client { return s.createInternalClient(ACCOUNT) } // Internal clients. kind should be SYSTEM or JETSTREAM func (s *Server) createInternalClient(kind int) *client { if kind != SYSTEM && kind != JETSTREAM && kind != ACCOUNT { return nil } now := time.Now().UTC() c := &client{srv: s, kind: kind, opts: internalOpts, msubs: -1, mpay: -1, start: now, last: now} c.initClient() c.echo = false c.headers = true c.flags.set(noReconnect) return c } // Determine if accounts should track subscriptions for // efficient propagation. // Lock should be held on entry. func (s *Server) shouldTrackSubscriptions() bool { return (s.opts.Cluster.Port != 0 || s.opts.Gateway.Port != 0) } // Invokes registerAccountNoLock under the protection of the server lock. // That is, server lock is acquired/released in this function. // See registerAccountNoLock for comment on returned value. func (s *Server) registerAccount(acc *Account) *Account { s.mu.Lock() racc := s.registerAccountNoLock(acc) s.mu.Unlock() return racc } // Helper to set the sublist based on preferences. func (s *Server) setAccountSublist(acc *Account) { if acc != nil && acc.sl == nil { opts := s.getOpts() if opts != nil && opts.NoSublistCache { acc.sl = NewSublistNoCache() } else { acc.sl = NewSublistWithCache() } } } // Registers an account in the server. // Due to some locking considerations, we may end-up trying // to register the same account twice. This function will // then return the already registered account. // Lock should be held on entry. func (s *Server) registerAccountNoLock(acc *Account) *Account { // We are under the server lock. Lookup from map, if present // return existing account. if a, _ := s.accounts.Load(acc.Name); a != nil { s.tmpAccounts.Delete(acc.Name) return a.(*Account) } // Finish account setup and store. s.setAccountSublist(acc) acc.mu.Lock() if acc.clients == nil { acc.clients = make(map[*client]struct{}) } // If we are capable of routing we will track subscription // information for efficient interest propagation. // During config reload, it is possible that account was // already created (global account), so use locking and // make sure we create only if needed. // TODO(dlc)- Double check that we need this for GWs. if acc.rm == nil && s.opts != nil && s.shouldTrackSubscriptions() { acc.rm = make(map[string]int32) acc.lqws = make(map[string]int32) } acc.srv = s acc.updated = time.Now().UTC() acc.mu.Unlock() s.accounts.Store(acc.Name, acc) s.tmpAccounts.Delete(acc.Name) s.enableAccountTracking(acc) return nil } // lookupAccount is a function to return the account structure // associated with an account name. // Lock MUST NOT be held upon entry. func (s *Server) lookupAccount(name string) (*Account, error) { var acc *Account if v, ok := s.accounts.Load(name); ok { acc = v.(*Account) } if acc != nil { // If we are expired and we have a resolver, then // return the latest information from the resolver. if acc.IsExpired() { s.Debugf("Requested account [%s] has expired", name) if s.AccountResolver() != nil { if err := s.updateAccount(acc); err != nil { // This error could mask expired, so just return expired here. return nil, ErrAccountExpired } } else { return nil, ErrAccountExpired } } return acc, nil } // If we have a resolver see if it can fetch the account. if s.AccountResolver() == nil { return nil, ErrNoAccountResolver } return s.fetchAccount(name) } // LookupAccount is a public function to return the account structure // associated with name. func (s *Server) LookupAccount(name string) (*Account, error) { return s.lookupAccount(name) } // This will fetch new claims and if found update the account with new claims. // Lock MUST NOT be held upon entry. func (s *Server) updateAccount(acc *Account) error { // TODO(dlc) - Make configurable if !acc.incomplete && time.Since(acc.updated) < time.Second { s.Debugf("Requested account update for [%s] ignored, too soon", acc.Name) return ErrAccountResolverUpdateTooSoon } claimJWT, err := s.fetchRawAccountClaims(acc.Name) if err != nil { return err } return s.updateAccountWithClaimJWT(acc, claimJWT) } // updateAccountWithClaimJWT will check and apply the claim update. // Lock MUST NOT be held upon entry. func (s *Server) updateAccountWithClaimJWT(acc *Account, claimJWT string) error { if acc == nil { return ErrMissingAccount } if acc.claimJWT != "" && acc.claimJWT == claimJWT && !acc.incomplete { s.Debugf("Requested account update for [%s], same claims detected", acc.Name) return ErrAccountResolverSameClaims } accClaims, _, err := s.verifyAccountClaims(claimJWT) if err == nil && accClaims != nil { acc.mu.Lock() if acc.Issuer == "" { acc.Issuer = accClaims.Issuer } if acc.Name != accClaims.Subject { acc.mu.Unlock() return ErrAccountValidation } acc.claimJWT = claimJWT acc.mu.Unlock() s.UpdateAccountClaims(acc, accClaims) return nil } return err } // fetchRawAccountClaims will grab raw account claims iff we have a resolver. // Lock is NOT held upon entry. func (s *Server) fetchRawAccountClaims(name string) (string, error) { accResolver := s.AccountResolver() if accResolver == nil { return "", ErrNoAccountResolver } // Need to do actual Fetch start := time.Now() claimJWT, err := fetchAccount(accResolver, name) fetchTime := time.Since(start) if fetchTime > time.Second { s.Warnf("Account [%s] fetch took %v", name, fetchTime) } else { s.Debugf("Account [%s] fetch took %v", name, fetchTime) } if err != nil { s.Warnf("Account fetch failed: %v", err) return "", err } return claimJWT, nil } // fetchAccountClaims will attempt to fetch new claims if a resolver is present. // Lock is NOT held upon entry. func (s *Server) fetchAccountClaims(name string) (*jwt.AccountClaims, string, error) { claimJWT, err := s.fetchRawAccountClaims(name) if err != nil { return nil, _EMPTY_, err } var claim *jwt.AccountClaims claim, claimJWT, err = s.verifyAccountClaims(claimJWT) if claim != nil && claim.Subject != name { return nil, _EMPTY_, ErrAccountValidation } return claim, claimJWT, err } // verifyAccountClaims will decode and validate any account claims. func (s *Server) verifyAccountClaims(claimJWT string) (*jwt.AccountClaims, string, error) { accClaims, err := jwt.DecodeAccountClaims(claimJWT) if err != nil { return nil, _EMPTY_, err } if !s.isTrustedIssuer(accClaims.Issuer) { return nil, _EMPTY_, ErrAccountValidation } vr := jwt.CreateValidationResults() accClaims.Validate(vr) if vr.IsBlocking(true) { return nil, _EMPTY_, ErrAccountValidation } return accClaims, claimJWT, nil } // This will fetch an account from a resolver if defined. // Lock is NOT held upon entry. func (s *Server) fetchAccount(name string) (*Account, error) { accClaims, claimJWT, err := s.fetchAccountClaims(name) if accClaims == nil { return nil, err } acc := s.buildInternalAccount(accClaims) acc.claimJWT = claimJWT // Due to possible race, if registerAccount() returns a non // nil account, it means the same account was already // registered and we should use this one. if racc := s.registerAccount(acc); racc != nil { // Update with the new claims in case they are new. // Following call will ignore ErrAccountResolverSameClaims // if claims are the same. err = s.updateAccountWithClaimJWT(racc, claimJWT) if err != nil && err != ErrAccountResolverSameClaims { return nil, err } return racc, nil } // The sub imports may have been setup but will not have had their // subscriptions properly setup. Do that here. if len(acc.imports.services) > 0 { if acc.ic == nil { acc.ic = s.createInternalAccountClient() acc.ic.acc = acc } acc.addAllServiceImportSubs() } return acc, nil } // Start up the server, this will block. // Start via a Go routine if needed. func (s *Server) Start() { s.Noticef("Starting nats-server") gc := gitCommit if gc == "" { gc = "not set" } // Snapshot server options. opts := s.getOpts() s.Noticef(" Version: %s", VERSION) s.Noticef(" Git: [%s]", gc) s.Debugf(" Go build: %s", s.info.GoVersion) s.Noticef(" Name: %s", s.info.Name) if opts.JetStream { s.Noticef(" Node: %s", getHash(s.info.Name)) } s.Noticef(" ID: %s", s.info.ID) defer s.Noticef("Server is ready") // Check for insecure configurations. s.checkAuthforWarnings() // Avoid RACE between Start() and Shutdown() s.mu.Lock() s.running = true s.mu.Unlock() s.grMu.Lock() s.grRunning = true s.grMu.Unlock() if opts.ConfigFile != _EMPTY_ { s.Noticef("Using configuration file: %s", opts.ConfigFile) } hasOperators := len(opts.TrustedOperators) > 0 if hasOperators { s.Noticef("Trusted Operators") } for _, opc := range opts.TrustedOperators { s.Noticef(" System : %q", opc.Audience) s.Noticef(" Operator: %q", opc.Name) s.Noticef(" Issued : %v", time.Unix(opc.IssuedAt, 0)) s.Noticef(" Expires : %v", time.Unix(opc.Expires, 0)) } if hasOperators && opts.SystemAccount == _EMPTY_ { s.Warnf("Trusted Operators should utilize a System Account") } // If we have a memory resolver, check the accounts here for validation exceptions. // This allows them to be logged right away vs when they are accessed via a client. if hasOperators && len(opts.resolverPreloads) > 0 { s.checkResolvePreloads() } // Log the pid to a file if opts.PidFile != _EMPTY_ { if err := s.logPid(); err != nil { s.Fatalf("Could not write pidfile: %v", err) return } } // Setup system account which will start the eventing stack. if sa := opts.SystemAccount; sa != _EMPTY_ { if err := s.SetSystemAccount(sa); err != nil { s.Fatalf("Can't set system account: %v", err) return } } else if !opts.NoSystemAccount { // We will create a default system account here. s.SetDefaultSystemAccount() } // start up resolver machinery if ar := s.AccountResolver(); ar != nil { if err := ar.Start(s); err != nil { s.Fatalf("Could not start resolver: %v", err) return } // In operator mode, when the account resolver depends on an external system and // the system account is the bootstrapping account, start fetching it if len(opts.TrustedOperators) == 1 && opts.SystemAccount != _EMPTY_ && opts.SystemAccount != DEFAULT_SYSTEM_ACCOUNT { _, isMemResolver := ar.(*MemAccResolver) if v, ok := s.accounts.Load(s.opts.SystemAccount); !isMemResolver && ok && v.(*Account).claimJWT == "" { s.Noticef("Using bootstrapping system account") s.startGoRoutine(func() { defer s.grWG.Done() t := time.NewTicker(time.Second) defer t.Stop() for { select { case <-s.quitCh: return case <-t.C: if _, err := fetchAccount(ar, s.opts.SystemAccount); err != nil { continue } if _, err := s.fetchAccount(s.opts.SystemAccount); err != nil { continue } s.Noticef("System account fetched and updated") return } } }) } } } // Start expiration of mapped GW replies, regardless if // this server is configured with gateway or not. s.startGWReplyMapExpiration() // Check if JetStream has been enabled. This needs to be after // the system account setup above. JetStream will create its // own system account if one is not present. if opts.JetStream { // Make sure someone is not trying to enable on the system account. if sa := s.SystemAccount(); sa != nil && sa.jsLimits != nil { s.Fatalf("Not allowed to enable JetStream on the system account") } cfg := &JetStreamConfig{ StoreDir: opts.StoreDir, MaxMemory: opts.JetStreamMaxMemory, MaxStore: opts.JetStreamMaxStore, } if err := s.EnableJetStream(cfg); err != nil { s.Fatalf("Can't start JetStream: %v", err) return } } else { // Check to see if any configured accounts have JetStream enabled. s.accounts.Range(func(k, v interface{}) bool { acc := v.(*Account) acc.mu.RLock() hasJs := acc.jsLimits != nil acc.mu.RUnlock() if hasJs { s.checkJetStreamExports() acc.enableAllJetStreamServiceImports() } return true }) } // Start monitoring if needed if err := s.StartMonitoring(); err != nil { s.Fatalf("Can't start monitoring: %v", err) return } // Start up gateway if needed. Do this before starting the routes, because // we want to resolve the gateway host:port so that this information can // be sent to other routes. if opts.Gateway.Port != 0 { s.startGateways() } // Start websocket server if needed. Do this before starting the routes, and // leaf node because we want to resolve the gateway host:port so that this // information can be sent to other routes. if opts.Websocket.Port != 0 { s.startWebsocketServer() } // Start up listen if we want to accept leaf node connections. if opts.LeafNode.Port != 0 { // Will resolve or assign the advertise address for the leafnode listener. // We need that in StartRouting(). s.startLeafNodeAcceptLoop() } // Solicit remote servers for leaf node connections. if len(opts.LeafNode.Remotes) > 0 { s.solicitLeafNodeRemotes(opts.LeafNode.Remotes) } // TODO (ik): I wanted to refactor this by starting the client // accept loop first, that is, it would resolve listen spec // in place, but start the accept-for-loop in a different go // routine. This would get rid of the synchronization between // this function and StartRouting, which I also would have wanted // to refactor, but both AcceptLoop() and StartRouting() have // been exported and not sure if that would break users using them. // We could mark them as deprecated and remove in a release or two... // The Routing routine needs to wait for the client listen // port to be opened and potential ephemeral port selected. clientListenReady := make(chan struct{}) // MQTT if opts.MQTT.Port != 0 { s.startMQTT() } // Start up routing as well if needed. if opts.Cluster.Port != 0 { s.startGoRoutine(func() { s.StartRouting(clientListenReady) }) } // Pprof http endpoint for the profiler. if opts.ProfPort != 0 { s.StartProfiler() } if opts.PortsFileDir != _EMPTY_ { s.logPorts() } // Wait for clients. s.AcceptLoop(clientListenReady) } // Shutdown will shutdown the server instance by kicking out the AcceptLoop // and closing all associated clients. func (s *Server) Shutdown() { // Transfer off any raft nodes that we are a leader by shutting them all down. s.shutdownRaftNodes() // This is for clustered JetStream and ephemeral consumers. // No-op if not clustered or not running JetStream. s.migrateEphemerals() // Shutdown the eventing system as needed. // This is done first to send out any messages for // account status. We will also clean up any // eventing items associated with accounts. s.shutdownEventing() s.mu.Lock() // Prevent issues with multiple calls. if s.shutdown { s.mu.Unlock() return } s.Noticef("Initiating Shutdown...") if s.accResolver != nil { s.accResolver.Close() } opts := s.getOpts() s.shutdown = true s.running = false s.grMu.Lock() s.grRunning = false s.grMu.Unlock() s.mu.Unlock() // Now check jetstream. s.shutdownJetStream() s.mu.Lock() conns := make(map[uint64]*client) // Copy off the clients for i, c := range s.clients { conns[i] = c } // Copy off the connections that are not yet registered // in s.routes, but for which the readLoop has started s.grMu.Lock() for i, c := range s.grTmpClients { conns[i] = c } s.grMu.Unlock() // Copy off the routes for i, r := range s.routes { conns[i] = r } // Copy off the gateways s.getAllGatewayConnections(conns) // Copy off the leaf nodes for i, c := range s.leafs { conns[i] = c } // Number of done channel responses we expect. doneExpected := 0 // Kick client AcceptLoop() if s.listener != nil { doneExpected++ s.listener.Close() s.listener = nil } // Kick websocket server if s.websocket.server != nil { doneExpected++ s.websocket.server.Close() s.websocket.server = nil s.websocket.listener = nil } // Kick MQTT accept loop if s.mqtt.listener != nil { doneExpected++ s.mqtt.listener.Close() s.mqtt.listener = nil } // Kick leafnodes AcceptLoop() if s.leafNodeListener != nil { doneExpected++ s.leafNodeListener.Close() s.leafNodeListener = nil } // Kick route AcceptLoop() if s.routeListener != nil { doneExpected++ s.routeListener.Close() s.routeListener = nil } // Kick Gateway AcceptLoop() if s.gatewayListener != nil { doneExpected++ s.gatewayListener.Close() s.gatewayListener = nil } // Kick HTTP monitoring if its running if s.http != nil { doneExpected++ s.http.Close() s.http = nil } // Kick Profiling if its running if s.profiler != nil { doneExpected++ s.profiler.Close() } s.mu.Unlock() // Release go routines that wait on that channel close(s.quitCh) // Close client and route connections for _, c := range conns { c.setNoReconnect() c.closeConnection(ServerShutdown) } // Block until the accept loops exit for doneExpected > 0 { <-s.done doneExpected-- } // Wait for go routines to be done. s.grWG.Wait() if opts.PortsFileDir != _EMPTY_ { s.deletePortsFile(opts.PortsFileDir) } s.Noticef("Server Exiting..") // Close logger if applicable. It allows tests on Windows // to be able to do proper cleanup (delete log file). s.logging.RLock() log := s.logging.logger s.logging.RUnlock() if log != nil { if l, ok := log.(*logger.Logger); ok { l.Close() } } // Notify that the shutdown is complete close(s.shutdownComplete) } // WaitForShutdown will block until the server has been fully shutdown. func (s *Server) WaitForShutdown() { <-s.shutdownComplete } // AcceptLoop is exported for easier testing. func (s *Server) AcceptLoop(clr chan struct{}) { // If we were to exit before the listener is setup properly, // make sure we close the channel. defer func() { if clr != nil { close(clr) } }() // Snapshot server options. opts := s.getOpts() // Setup state that can enable shutdown s.mu.Lock() if s.shutdown { s.mu.Unlock() return } hp := net.JoinHostPort(opts.Host, strconv.Itoa(opts.Port)) l, e := natsListen("tcp", hp) if e != nil { s.mu.Unlock() s.Fatalf("Error listening on port: %s, %q", hp, e) return } s.Noticef("Listening for client connections on %s", net.JoinHostPort(opts.Host, strconv.Itoa(l.Addr().(*net.TCPAddr).Port))) // Alert of TLS enabled. if opts.TLSConfig != nil { s.Noticef("TLS required for client connections") } // If server was started with RANDOM_PORT (-1), opts.Port would be equal // to 0 at the beginning this function. So we need to get the actual port if opts.Port == 0 { // Write resolved port back to options. opts.Port = l.Addr().(*net.TCPAddr).Port } // Now that port has been set (if it was set to RANDOM), set the // server's info Host/Port with either values from Options or // ClientAdvertise. if err := s.setInfoHostPort(); err != nil { s.Fatalf("Error setting server INFO with ClientAdvertise value of %s, err=%v", s.opts.ClientAdvertise, err) l.Close() s.mu.Unlock() return } // Keep track of client connect URLs. We may need them later. s.clientConnectURLs = s.getClientConnectURLs() s.listener = l go s.acceptConnections(l, "Client", func(conn net.Conn) { s.createClient(conn) }, func(_ error) bool { if s.isLameDuckMode() { // Signal that we are not accepting new clients s.ldmCh <- true // Now wait for the Shutdown... <-s.quitCh return true } return false }) s.mu.Unlock() // Let the caller know that we are ready close(clr) clr = nil } func (s *Server) acceptConnections(l net.Listener, acceptName string, createFunc func(conn net.Conn), errFunc func(err error) bool) { tmpDelay := ACCEPT_MIN_SLEEP for { conn, err := l.Accept() if err != nil { if errFunc != nil && errFunc(err) { return } if tmpDelay = s.acceptError(acceptName, err, tmpDelay); tmpDelay < 0 { break } continue } tmpDelay = ACCEPT_MIN_SLEEP if !s.startGoRoutine(func() { createFunc(conn) s.grWG.Done() }) { conn.Close() } } s.Debugf(acceptName + " accept loop exiting..") s.done <- true } // This function sets the server's info Host/Port based on server Options. // Note that this function may be called during config reload, this is why // Host/Port may be reset to original Options if the ClientAdvertise option // is not set (since it may have previously been). func (s *Server) setInfoHostPort() error { // When this function is called, opts.Port is set to the actual listen // port (if option was originally set to RANDOM), even during a config // reload. So use of s.opts.Port is safe. if s.opts.ClientAdvertise != "" { h, p, err := parseHostPort(s.opts.ClientAdvertise, s.opts.Port) if err != nil { return err } s.info.Host = h s.info.Port = p } else { s.info.Host = s.opts.Host s.info.Port = s.opts.Port } return nil } // StartProfiler is called to enable dynamic profiling. func (s *Server) StartProfiler() { // Snapshot server options. opts := s.getOpts() port := opts.ProfPort // Check for Random Port if port == -1 { port = 0 } s.mu.Lock() if s.shutdown { s.mu.Unlock() return } hp := net.JoinHostPort(opts.Host, strconv.Itoa(port)) l, err := net.Listen("tcp", hp) if err != nil { s.mu.Unlock() s.Fatalf("error starting profiler: %s", err) return } s.Noticef("profiling port: %d", l.Addr().(*net.TCPAddr).Port) srv := &http.Server{ Addr: hp, Handler: http.DefaultServeMux, MaxHeaderBytes: 1 << 20, } s.profiler = l s.profilingServer = srv // Enable blocking profile runtime.SetBlockProfileRate(1) go func() { // if this errors out, it's probably because the server is being shutdown err := srv.Serve(l) if err != nil { s.mu.Lock() shutdown := s.shutdown s.mu.Unlock() if !shutdown { s.Fatalf("error starting profiler: %s", err) } } srv.Close() s.done <- true }() s.mu.Unlock() } // StartHTTPMonitoring will enable the HTTP monitoring port. // DEPRECATED: Should use StartMonitoring. func (s *Server) StartHTTPMonitoring() { s.startMonitoring(false) } // StartHTTPSMonitoring will enable the HTTPS monitoring port. // DEPRECATED: Should use StartMonitoring. func (s *Server) StartHTTPSMonitoring() { s.startMonitoring(true) } // StartMonitoring starts the HTTP or HTTPs server if needed. func (s *Server) StartMonitoring() error { // Snapshot server options. opts := s.getOpts() // Specifying both HTTP and HTTPS ports is a misconfiguration if opts.HTTPPort != 0 && opts.HTTPSPort != 0 { return fmt.Errorf("can't specify both HTTP (%v) and HTTPs (%v) ports", opts.HTTPPort, opts.HTTPSPort) } var err error if opts.HTTPPort != 0 { err = s.startMonitoring(false) } else if opts.HTTPSPort != 0 { if opts.TLSConfig == nil { return fmt.Errorf("TLS cert and key required for HTTPS") } err = s.startMonitoring(true) } return err } // HTTP endpoints const ( RootPath = "/" VarzPath = "/varz" ConnzPath = "/connz" RoutezPath = "/routez" GatewayzPath = "/gatewayz" LeafzPath = "/leafz" SubszPath = "/subsz" StackszPath = "/stacksz" AccountzPath = "/accountz" JszPath = "/jsz" ) func (s *Server) basePath(p string) string { return path.Join(s.httpBasePath, p) } // Start the monitoring server func (s *Server) startMonitoring(secure bool) error { // Snapshot server options. opts := s.getOpts() // Used to track HTTP requests s.httpReqStats = map[string]uint64{ RootPath: 0, VarzPath: 0, ConnzPath: 0, RoutezPath: 0, GatewayzPath: 0, SubszPath: 0, } var ( hp string err error httpListener net.Listener port int ) monitorProtocol := "http" if secure { monitorProtocol += "s" port = opts.HTTPSPort if port == -1 { port = 0 } hp = net.JoinHostPort(opts.HTTPHost, strconv.Itoa(port)) config := opts.TLSConfig.Clone() config.ClientAuth = tls.NoClientCert httpListener, err = tls.Listen("tcp", hp, config) } else { port = opts.HTTPPort if port == -1 { port = 0 } hp = net.JoinHostPort(opts.HTTPHost, strconv.Itoa(port)) httpListener, err = net.Listen("tcp", hp) } if err != nil { return fmt.Errorf("can't listen to the monitor port: %v", err) } s.Noticef("Starting %s monitor on %s", monitorProtocol, net.JoinHostPort(opts.HTTPHost, strconv.Itoa(httpListener.Addr().(*net.TCPAddr).Port))) mux := http.NewServeMux() // Root mux.HandleFunc(s.basePath(RootPath), s.HandleRoot) // Varz mux.HandleFunc(s.basePath(VarzPath), s.HandleVarz) // Connz mux.HandleFunc(s.basePath(ConnzPath), s.HandleConnz) // Routez mux.HandleFunc(s.basePath(RoutezPath), s.HandleRoutez) // Gatewayz mux.HandleFunc(s.basePath(GatewayzPath), s.HandleGatewayz) // Leafz mux.HandleFunc(s.basePath(LeafzPath), s.HandleLeafz) // Subz mux.HandleFunc(s.basePath(SubszPath), s.HandleSubsz) // Subz alias for backwards compatibility mux.HandleFunc(s.basePath("/subscriptionsz"), s.HandleSubsz) // Stacksz mux.HandleFunc(s.basePath(StackszPath), s.HandleStacksz) // Accountz mux.HandleFunc(s.basePath(AccountzPath), s.HandleAccountz) // Jsz mux.HandleFunc(s.basePath(JszPath), s.HandleJsz) // Do not set a WriteTimeout because it could cause cURL/browser // to return empty response or unable to display page if the // server needs more time to build the response. srv := &http.Server{ Addr: hp, Handler: mux, MaxHeaderBytes: 1 << 20, } s.mu.Lock() if s.shutdown { httpListener.Close() s.mu.Unlock() return nil } s.http = httpListener s.httpHandler = mux s.monitoringServer = srv s.mu.Unlock() go func() { if err := srv.Serve(httpListener); err != nil { s.mu.Lock() shutdown := s.shutdown s.mu.Unlock() if !shutdown { s.Fatalf("Error starting monitor on %q: %v", hp, err) } } srv.Close() srv.Handler = nil s.mu.Lock() s.httpHandler = nil s.mu.Unlock() s.done <- true }() return nil } // HTTPHandler returns the http.Handler object used to handle monitoring // endpoints. It will return nil if the server is not configured for // monitoring, or if the server has not been started yet (Server.Start()). func (s *Server) HTTPHandler() http.Handler { s.mu.Lock() defer s.mu.Unlock() return s.httpHandler } // Perform a conditional deep copy due to reference nature of [Client|WS]ConnectURLs. // If updates are made to Info, this function should be consulted and updated. // Assume lock is held. func (s *Server) copyInfo() Info { info := s.info if len(info.ClientConnectURLs) > 0 { info.ClientConnectURLs = append([]string(nil), s.info.ClientConnectURLs...) } if len(info.WSConnectURLs) > 0 { info.WSConnectURLs = append([]string(nil), s.info.WSConnectURLs...) } return info } // tlsMixConn is used when we can receive both TLS and non-TLS connections on same port. type tlsMixConn struct { net.Conn pre *bytes.Buffer } // Read for our mixed multi-reader. func (c *tlsMixConn) Read(b []byte) (int, error) { if c.pre != nil { n, err := c.pre.Read(b) if c.pre.Len() == 0 { c.pre = nil } return n, err } return c.Conn.Read(b) } func (s *Server) createClient(conn net.Conn) *client { // Snapshot server options. opts := s.getOpts() maxPay := int32(opts.MaxPayload) maxSubs := int32(opts.MaxSubs) // For system, maxSubs of 0 means unlimited, so re-adjust here. if maxSubs == 0 { maxSubs = -1 } now := time.Now().UTC() c := &client{srv: s, nc: conn, opts: defaultOpts, mpay: maxPay, msubs: maxSubs, start: now, last: now} c.registerWithAccount(s.globalAccount()) var info Info var authRequired bool s.mu.Lock() // Grab JSON info string info = s.copyInfo() if s.nonceRequired() { // Nonce handling var raw [nonceLen]byte nonce := raw[:] s.generateNonce(nonce) info.Nonce = string(nonce) } c.nonce = []byte(info.Nonce) authRequired = info.AuthRequired s.totalClients++ s.mu.Unlock() // Grab lock c.mu.Lock() if authRequired { c.flags.set(expectConnect) } // Initialize c.initClient() c.Debugf("Client connection created") // Send our information. // Need to be sent in place since writeLoop cannot be started until // TLS handshake is done (if applicable). c.sendProtoNow(c.generateClientInfoJSON(info)) // Unlock to register c.mu.Unlock() // Register with the server. s.mu.Lock() // If server is not running, Shutdown() may have already gathered the // list of connections to close. It won't contain this one, so we need // to bail out now otherwise the readLoop started down there would not // be interrupted. Skip also if in lame duck mode. if !s.running || s.ldm { // There are some tests that create a server but don't start it, // and use "async" clients and perform the parsing manually. Such // clients would branch here (since server is not running). However, // when a server was really running and has been shutdown, we must // close this connection. if s.shutdown { conn.Close() } s.mu.Unlock() return c } // If there is a max connections specified, check that adding // this new client would not push us over the max if opts.MaxConn > 0 && len(s.clients) >= opts.MaxConn { s.mu.Unlock() c.maxConnExceeded() return nil } s.clients[c.cid] = c tlsRequired := info.TLSRequired s.mu.Unlock() // Re-Grab lock c.mu.Lock() // Connection could have been closed while sending the INFO proto. isClosed := c.isClosed() var pre []byte // If we have both TLS and non-TLS allowed we need to see which // one the client wants. if !isClosed && opts.TLSConfig != nil && opts.AllowNonTLS { pre = make([]byte, 4) c.nc.SetReadDeadline(time.Now().Add(secondsToDuration(opts.TLSTimeout))) n, _ := io.ReadFull(c.nc, pre[:]) c.nc.SetReadDeadline(time.Time{}) pre = pre[:n] if n > 0 && pre[0] == 0x16 { tlsRequired = true } else { tlsRequired = false } } // Check for TLS if !isClosed && tlsRequired { // If we have a prebuffer create a multi-reader. if len(pre) > 0 { c.nc = &tlsMixConn{c.nc, bytes.NewBuffer(pre)} // Clear pre so it is not parsed. pre = nil } // Performs server-side TLS handshake. if err := c.doTLSServerHandshake(_EMPTY_, opts.TLSConfig, opts.TLSTimeout); err != nil { c.mu.Unlock() return nil } } // If connection is marked as closed, bail out. if isClosed { c.mu.Unlock() // Connection could have been closed due to TLS timeout or while trying // to send the INFO protocol. We need to call closeConnection() to make // sure that proper cleanup is done. c.closeConnection(WriteError) return nil } // Check for Auth. We schedule this timer after the TLS handshake to avoid // the race where the timer fires during the handshake and causes the // server to write bad data to the socket. See issue #432. if authRequired { c.setAuthTimer(secondsToDuration(opts.AuthTimeout)) } // Do final client initialization // Set the Ping timer. Will be reset once connect was received. c.setPingTimer() // Spin up the read loop. s.startGoRoutine(func() { c.readLoop(pre) }) // Spin up the write loop. s.startGoRoutine(func() { c.writeLoop() }) if tlsRequired { c.Debugf("TLS handshake complete") cs := c.nc.(*tls.Conn).ConnectionState() c.Debugf("TLS version %s, cipher suite %s", tlsVersion(cs.Version), tlsCipher(cs.CipherSuite)) } c.mu.Unlock() return c } // This will save off a closed client in a ring buffer such that // /connz can inspect. Useful for debugging, etc. func (s *Server) saveClosedClient(c *client, nc net.Conn, reason ClosedState) { now := time.Now().UTC() s.accountDisconnectEvent(c, now, reason.String()) c.mu.Lock() cc := &closedClient{} cc.fill(c, nc, now) cc.Stop = &now cc.Reason = reason.String() // Do subs, do not place by default in main ConnInfo if len(c.subs) > 0 { cc.subs = make([]SubDetail, 0, len(c.subs)) for _, sub := range c.subs { cc.subs = append(cc.subs, newSubDetail(sub)) } } // Hold user as well. cc.user = c.opts.Username // Hold account name if not the global account. if c.acc != nil && c.acc.Name != globalAccountName { cc.acc = c.acc.Name } cc.JWT = c.opts.JWT cc.IssuerKey = issuerForClient(c) cc.Tags = c.tags cc.NameTag = c.nameTag c.mu.Unlock() // Place in the ring buffer s.mu.Lock() if s.closed != nil { s.closed.append(cc) } s.mu.Unlock() } // Adds to the list of client and websocket clients connect URLs. // If there was a change, an INFO protocol is sent to registered clients // that support async INFO protocols. func (s *Server) addConnectURLsAndSendINFOToClients(curls, wsurls []string) { s.updateServerINFOAndSendINFOToClients(curls, wsurls, true) } // Removes from the list of client and websocket clients connect URLs. // If there was a change, an INFO protocol is sent to registered clients // that support async INFO protocols. func (s *Server) removeConnectURLsAndSendINFOToClients(curls, wsurls []string) { s.updateServerINFOAndSendINFOToClients(curls, wsurls, false) } // Updates the list of client and websocket clients connect URLs and if any change // sends an async INFO update to clients that support it. func (s *Server) updateServerINFOAndSendINFOToClients(curls, wsurls []string, add bool) { s.mu.Lock() defer s.mu.Unlock() remove := !add // Will return true if we need alter the server's Info object. updateMap := func(urls []string, m refCountedUrlSet) bool { wasUpdated := false for _, url := range urls { if add && m.addUrl(url) { wasUpdated = true } else if remove && m.removeUrl(url) { wasUpdated = true } } return wasUpdated } cliUpdated := updateMap(curls, s.clientConnectURLsMap) wsUpdated := updateMap(wsurls, s.websocket.connectURLsMap) updateInfo := func(infoURLs *[]string, urls []string, m refCountedUrlSet) { // Recreate the info's slice from the map *infoURLs = (*infoURLs)[:0] // Add this server client connect ULRs first... *infoURLs = append(*infoURLs, urls...) // Then the ones from the map for url := range m { *infoURLs = append(*infoURLs, url) } } if cliUpdated { updateInfo(&s.info.ClientConnectURLs, s.clientConnectURLs, s.clientConnectURLsMap) } if wsUpdated { updateInfo(&s.info.WSConnectURLs, s.websocket.connectURLs, s.websocket.connectURLsMap) } if cliUpdated || wsUpdated { // Update the time of this update s.lastCURLsUpdate = time.Now().UnixNano() // Send to all registered clients that support async INFO protocols. s.sendAsyncInfoToClients(cliUpdated, wsUpdated) } } // Handle closing down a connection when the handshake has timedout. func tlsTimeout(c *client, conn *tls.Conn) { c.mu.Lock() closed := c.isClosed() c.mu.Unlock() // Check if already closed if closed { return } cs := conn.ConnectionState() if !cs.HandshakeComplete { c.Errorf("TLS handshake timeout") c.sendErr("Secure Connection - TLS Required") c.closeConnection(TLSHandshakeError) } } // Seems silly we have to write these func tlsVersion(ver uint16) string { switch ver { case tls.VersionTLS10: return "1.0" case tls.VersionTLS11: return "1.1" case tls.VersionTLS12: return "1.2" case tls.VersionTLS13: return "1.3" } return fmt.Sprintf("Unknown [0x%x]", ver) } // We use hex here so we don't need multiple versions func tlsCipher(cs uint16) string { name, present := cipherMapByID[cs] if present { return name } return fmt.Sprintf("Unknown [0x%x]", cs) } // Remove a client or route from our internal accounting. func (s *Server) removeClient(c *client) { // kind is immutable, so can check without lock switch c.kind { case CLIENT: c.mu.Lock() cid := c.cid updateProtoInfoCount := false if c.kind == CLIENT && c.opts.Protocol >= ClientProtoInfo { updateProtoInfoCount = true } c.mu.Unlock() s.mu.Lock() delete(s.clients, cid) if updateProtoInfoCount { s.cproto-- } s.mu.Unlock() case ROUTER: s.removeRoute(c) case GATEWAY: s.removeRemoteGatewayConnection(c) case LEAF: s.removeLeafNodeConnection(c) } } func (s *Server) removeFromTempClients(cid uint64) { s.grMu.Lock() delete(s.grTmpClients, cid) s.grMu.Unlock() } func (s *Server) addToTempClients(cid uint64, c *client) bool { added := false s.grMu.Lock() if s.grRunning { s.grTmpClients[cid] = c added = true } s.grMu.Unlock() return added } ///////////////////////////////////////////////////////////////// // These are some helpers for accounting in functional tests. ///////////////////////////////////////////////////////////////// // NumRoutes will report the number of registered routes. func (s *Server) NumRoutes() int { s.mu.Lock() nr := len(s.routes) s.mu.Unlock() return nr } // NumRemotes will report number of registered remotes. func (s *Server) NumRemotes() int { s.mu.Lock() defer s.mu.Unlock() return len(s.remotes) } // NumLeafNodes will report number of leaf node connections. func (s *Server) NumLeafNodes() int { s.mu.Lock() defer s.mu.Unlock() return len(s.leafs) } // NumClients will report the number of registered clients. func (s *Server) NumClients() int { s.mu.Lock() defer s.mu.Unlock() return len(s.clients) } // GetClient will return the client associated with cid. func (s *Server) GetClient(cid uint64) *client { return s.getClient(cid) } // getClient will return the client associated with cid. func (s *Server) getClient(cid uint64) *client { s.mu.Lock() defer s.mu.Unlock() return s.clients[cid] } // GetLeafNode returns the leafnode associated with the cid. func (s *Server) GetLeafNode(cid uint64) *client { s.mu.Lock() defer s.mu.Unlock() return s.leafs[cid] } // NumSubscriptions will report how many subscriptions are active. func (s *Server) NumSubscriptions() uint32 { s.mu.Lock() defer s.mu.Unlock() return s.numSubscriptions() } // numSubscriptions will report how many subscriptions are active. // Lock should be held. func (s *Server) numSubscriptions() uint32 { var subs int s.accounts.Range(func(k, v interface{}) bool { acc := v.(*Account) if acc.sl != nil { subs += acc.TotalSubs() } return true }) return uint32(subs) } // NumSlowConsumers will report the number of slow consumers. func (s *Server) NumSlowConsumers() int64 { return atomic.LoadInt64(&s.slowConsumers) } // ConfigTime will report the last time the server configuration was loaded. func (s *Server) ConfigTime() time.Time { s.mu.Lock() defer s.mu.Unlock() return s.configTime } // Addr will return the net.Addr object for the current listener. func (s *Server) Addr() net.Addr { s.mu.Lock() defer s.mu.Unlock() if s.listener == nil { return nil } return s.listener.Addr() } // MonitorAddr will return the net.Addr object for the monitoring listener. func (s *Server) MonitorAddr() *net.TCPAddr { s.mu.Lock() defer s.mu.Unlock() if s.http == nil { return nil } return s.http.Addr().(*net.TCPAddr) } // ClusterAddr returns the net.Addr object for the route listener. func (s *Server) ClusterAddr() *net.TCPAddr { s.mu.Lock() defer s.mu.Unlock() if s.routeListener == nil { return nil } return s.routeListener.Addr().(*net.TCPAddr) } // ProfilerAddr returns the net.Addr object for the profiler listener. func (s *Server) ProfilerAddr() *net.TCPAddr { s.mu.Lock() defer s.mu.Unlock() if s.profiler == nil { return nil } return s.profiler.Addr().(*net.TCPAddr) } // ReadyForConnections returns `true` if the server is ready to accept clients // and, if routing is enabled, route connections. If after the duration // `dur` the server is still not ready, returns `false`. func (s *Server) ReadyForConnections(dur time.Duration) bool { // Snapshot server options. opts := s.getOpts() end := time.Now().Add(dur) for time.Now().Before(end) { s.mu.Lock() ok := s.listener != nil && (opts.Cluster.Port == 0 || s.routeListener != nil) && (opts.Gateway.Name == "" || s.gatewayListener != nil) && (opts.LeafNode.Port == 0 || s.leafNodeListener != nil) && (opts.Websocket.Port == 0 || s.websocket.listener != nil) s.mu.Unlock() if ok { return true } time.Sleep(25 * time.Millisecond) } return false } // Quick utility to function to tell if the server supports headers. func (s *Server) supportsHeaders() bool { if s == nil { return false } return !(s.getOpts().NoHeaderSupport) } // ID returns the server's ID func (s *Server) ID() string { return s.info.ID } // NodeName returns the node name for this server. func (s *Server) NodeName() string { return string(getHash(s.info.Name)) } // Name returns the server's name. This will be the same as the ID if it was not set. func (s *Server) Name() string { return s.info.Name } func (s *Server) String() string { return s.info.Name } func (s *Server) startGoRoutine(f func()) bool { var started bool s.grMu.Lock() if s.grRunning { s.grWG.Add(1) go f() started = true } s.grMu.Unlock() return started } func (s *Server) numClosedConns() int { s.mu.Lock() defer s.mu.Unlock() return s.closed.len() } func (s *Server) totalClosedConns() uint64 { s.mu.Lock() defer s.mu.Unlock() return s.closed.totalConns() } func (s *Server) closedClients() []*closedClient { s.mu.Lock() defer s.mu.Unlock() return s.closed.closedClients() } // getClientConnectURLs returns suitable URLs for clients to connect to the listen // port based on the server options' Host and Port. If the Host corresponds to // "any" interfaces, this call returns the list of resolved IP addresses. // If ClientAdvertise is set, returns the client advertise host and port. // The server lock is assumed held on entry. func (s *Server) getClientConnectURLs() []string { // Snapshot server options. opts := s.getOpts() // Ignore error here since we know that if there is client advertise, the // parseHostPort is correct because we did it right before calling this // function in Server.New(). urls, _ := s.getConnectURLs(opts.ClientAdvertise, opts.Host, opts.Port) return urls } // Generic version that will return an array of URLs based on the given // advertise, host and port values. func (s *Server) getConnectURLs(advertise, host string, port int) ([]string, error) { urls := make([]string, 0, 1) // short circuit if advertise is set if advertise != "" { h, p, err := parseHostPort(advertise, port) if err != nil { return nil, err } urls = append(urls, net.JoinHostPort(h, strconv.Itoa(p))) } else { sPort := strconv.Itoa(port) _, ips, err := s.getNonLocalIPsIfHostIsIPAny(host, true) for _, ip := range ips { urls = append(urls, net.JoinHostPort(ip, sPort)) } if err != nil || len(urls) == 0 { // We are here if s.opts.Host is not "0.0.0.0" nor "::", or if for some // reason we could not add any URL in the loop above. // We had a case where a Windows VM was hosed and would have err == nil // and not add any address in the array in the loop above, and we // ended-up returning 0.0.0.0, which is problematic for Windows clients. // Check for 0.0.0.0 or :: specifically, and ignore if that's the case. if host == "0.0.0.0" || host == "::" { s.Errorf("Address %q can not be resolved properly", host) } else { urls = append(urls, net.JoinHostPort(host, sPort)) } } } return urls, nil } // Returns an array of non local IPs if the provided host is // 0.0.0.0 or ::. It returns the first resolved if `all` is // false. // The boolean indicate if the provided host was 0.0.0.0 (or ::) // so that if the returned array is empty caller can decide // what to do next. func (s *Server) getNonLocalIPsIfHostIsIPAny(host string, all bool) (bool, []string, error) { ip := net.ParseIP(host) // If this is not an IP, we are done if ip == nil { return false, nil, nil } // If this is not 0.0.0.0 or :: we have nothing to do. if !ip.IsUnspecified() { return false, nil, nil } s.Debugf("Get non local IPs for %q", host) var ips []string ifaces, _ := net.Interfaces() for _, i := range ifaces { addrs, _ := i.Addrs() for _, addr := range addrs { switch v := addr.(type) { case *net.IPNet: ip = v.IP case *net.IPAddr: ip = v.IP } ipStr := ip.String() // Skip non global unicast addresses if !ip.IsGlobalUnicast() || ip.IsUnspecified() { ip = nil continue } s.Debugf(" ip=%s", ipStr) ips = append(ips, ipStr) if !all { break } } } return true, ips, nil } // if the ip is not specified, attempt to resolve it func resolveHostPorts(addr net.Listener) []string { hostPorts := make([]string, 0) hp := addr.Addr().(*net.TCPAddr) port := strconv.Itoa(hp.Port) if hp.IP.IsUnspecified() { var ip net.IP ifaces, _ := net.Interfaces() for _, i := range ifaces { addrs, _ := i.Addrs() for _, addr := range addrs { switch v := addr.(type) { case *net.IPNet: ip = v.IP hostPorts = append(hostPorts, net.JoinHostPort(ip.String(), port)) case *net.IPAddr: ip = v.IP hostPorts = append(hostPorts, net.JoinHostPort(ip.String(), port)) default: continue } } } } else { hostPorts = append(hostPorts, net.JoinHostPort(hp.IP.String(), port)) } return hostPorts } // format the address of a net.Listener with a protocol func formatURL(protocol string, addr net.Listener) []string { hostports := resolveHostPorts(addr) for i, hp := range hostports { hostports[i] = fmt.Sprintf("%s://%s", protocol, hp) } return hostports } // Ports describes URLs that the server can be contacted in type Ports struct { Nats []string `json:"nats,omitempty"` Monitoring []string `json:"monitoring,omitempty"` Cluster []string `json:"cluster,omitempty"` Profile []string `json:"profile,omitempty"` WebSocket []string `json:"websocket,omitempty"` } // PortsInfo attempts to resolve all the ports. If after maxWait the ports are not // resolved, it returns nil. Otherwise it returns a Ports struct // describing ports where the server can be contacted func (s *Server) PortsInfo(maxWait time.Duration) *Ports { if s.readyForListeners(maxWait) { opts := s.getOpts() s.mu.Lock() tls := s.info.TLSRequired listener := s.listener httpListener := s.http clusterListener := s.routeListener profileListener := s.profiler wsListener := s.websocket.listener wss := s.websocket.tls s.mu.Unlock() ports := Ports{} if listener != nil { natsProto := "nats" if tls { natsProto = "tls" } ports.Nats = formatURL(natsProto, listener) } if httpListener != nil { monProto := "http" if opts.HTTPSPort != 0 { monProto = "https" } ports.Monitoring = formatURL(monProto, httpListener) } if clusterListener != nil { clusterProto := "nats" if opts.Cluster.TLSConfig != nil { clusterProto = "tls" } ports.Cluster = formatURL(clusterProto, clusterListener) } if profileListener != nil { ports.Profile = formatURL("http", profileListener) } if wsListener != nil { protocol := wsSchemePrefix if wss { protocol = wsSchemePrefixTLS } ports.WebSocket = formatURL(protocol, wsListener) } return &ports } return nil } // Returns the portsFile. If a non-empty dirHint is provided, the dirHint // path is used instead of the server option value func (s *Server) portFile(dirHint string) string { dirname := s.getOpts().PortsFileDir if dirHint != "" { dirname = dirHint } if dirname == _EMPTY_ { return _EMPTY_ } return filepath.Join(dirname, fmt.Sprintf("%s_%d.ports", filepath.Base(os.Args[0]), os.Getpid())) } // Delete the ports file. If a non-empty dirHint is provided, the dirHint // path is used instead of the server option value func (s *Server) deletePortsFile(hintDir string) { portsFile := s.portFile(hintDir) if portsFile != "" { if err := os.Remove(portsFile); err != nil { s.Errorf("Error cleaning up ports file %s: %v", portsFile, err) } } } // Writes a file with a serialized Ports to the specified ports_file_dir. // The name of the file is `exename_pid.ports`, typically nats-server_pid.ports. // if ports file is not set, this function has no effect func (s *Server) logPorts() { opts := s.getOpts() portsFile := s.portFile(opts.PortsFileDir) if portsFile != _EMPTY_ { go func() { info := s.PortsInfo(5 * time.Second) if info == nil { s.Errorf("Unable to resolve the ports in the specified time") return } data, err := json.Marshal(info) if err != nil { s.Errorf("Error marshaling ports file: %v", err) return } if err := ioutil.WriteFile(portsFile, data, 0666); err != nil { s.Errorf("Error writing ports file (%s): %v", portsFile, err) return } }() } } // waits until a calculated list of listeners is resolved or a timeout func (s *Server) readyForListeners(dur time.Duration) bool { end := time.Now().Add(dur) for time.Now().Before(end) { s.mu.Lock() listeners := s.serviceListeners() s.mu.Unlock() if len(listeners) == 0 { return false } ok := true for _, l := range listeners { if l == nil { ok = false break } } if ok { return true } select { case <-s.quitCh: return false case <-time.After(25 * time.Millisecond): // continue - unable to select from quit - we are still running } } return false } // returns a list of listeners that are intended for the process // if the entry is nil, the interface is yet to be resolved func (s *Server) serviceListeners() []net.Listener { listeners := make([]net.Listener, 0) opts := s.getOpts() listeners = append(listeners, s.listener) if opts.Cluster.Port != 0 { listeners = append(listeners, s.routeListener) } if opts.HTTPPort != 0 || opts.HTTPSPort != 0 { listeners = append(listeners, s.http) } if opts.ProfPort != 0 { listeners = append(listeners, s.profiler) } if opts.Websocket.Port != 0 { listeners = append(listeners, s.websocket.listener) } return listeners } // Returns true if in lame duck mode. func (s *Server) isLameDuckMode() bool { s.mu.Lock() defer s.mu.Unlock() return s.ldm } // This function will close the client listener then close the clients // at some interval to avoid a reconnecting storm. func (s *Server) lameDuckMode() { s.mu.Lock() // Check if there is actually anything to do if s.shutdown || s.ldm || s.listener == nil { s.mu.Unlock() return } s.Noticef("Entering lame duck mode, stop accepting new clients") s.ldm = true expected := 1 s.listener.Close() s.listener = nil if s.websocket.server != nil { expected++ s.websocket.server.Close() s.websocket.server = nil s.websocket.listener = nil } s.ldmCh = make(chan bool, expected) opts := s.getOpts() gp := opts.LameDuckGracePeriod // For tests, we want the grace period to be in some cases bigger // than the ldm duration, so to by-pass the validateOptions() check, // we use negative number and flip it here. if gp < 0 { gp *= -1 } s.mu.Unlock() // If we are running any raftNodes transfer leaders. if hadTransfers := s.transferRaftLeaders(); hadTransfers { // They will tranfer leadership quickly, but wait here for a second. select { case <-time.After(time.Second): case <-s.quitCh: return } } // Wait for accept loops to be done to make sure that no new // client can connect for i := 0; i < expected; i++ { <-s.ldmCh } s.mu.Lock() // Need to recheck few things if s.shutdown || len(s.clients) == 0 { s.mu.Unlock() // If there is no client, we need to call Shutdown() to complete // the LDMode. If server has been shutdown while lock was released, // calling Shutdown() should be no-op. s.Shutdown() return } dur := int64(opts.LameDuckDuration) dur -= int64(gp) if dur <= 0 { dur = int64(time.Second) } numClients := int64(len(s.clients)) batch := 1 // Sleep interval between each client connection close. si := dur / numClients if si < 1 { // Should not happen (except in test with very small LD duration), but // if there are too many clients, batch the number of close and // use a tiny sleep interval that will result in yield likely. si = 1 batch = int(numClients / dur) } else if si > int64(time.Second) { // Conversely, there is no need to sleep too long between clients // and spread say 10 clients for the 2min duration. Sleeping no // more than 1sec. si = int64(time.Second) } // Now capture all clients clients := make([]*client, 0, len(s.clients)) for _, client := range s.clients { clients = append(clients, client) } // Now that we know that no new client can be accepted, // send INFO to routes and clients to notify this state. s.sendLDMToRoutes() s.sendLDMToClients() s.mu.Unlock() t := time.NewTimer(gp) // Delay start of closing of client connections in case // we have several servers that we want to signal to enter LD mode // and not have their client reconnect to each other. select { case <-t.C: s.Noticef("Closing existing clients") case <-s.quitCh: t.Stop() return } for i, client := range clients { client.closeConnection(ServerShutdown) if i == len(clients)-1 { break } if batch == 1 || i%batch == 0 { // We pick a random interval which will be at least si/2 v := rand.Int63n(si) if v < si/2 { v = si / 2 } t.Reset(time.Duration(v)) // Sleep for given interval or bail out if kicked by Shutdown(). select { case <-t.C: case <-s.quitCh: t.Stop() return } } } s.Shutdown() } // Send an INFO update to routes with the indication that this server is in LDM mode. // Server lock is held on entry. func (s *Server) sendLDMToRoutes() { s.routeInfo.LameDuckMode = true s.generateRouteInfoJSON() for _, r := range s.routes { r.mu.Lock() r.enqueueProto(s.routeInfoJSON) r.mu.Unlock() } // Clear now so that we notify only once, should we have to send other INFOs. s.routeInfo.LameDuckMode = false } // Send an INFO update to clients with the indication that this server is in // LDM mode and with only URLs of other nodes. // Server lock is held on entry. func (s *Server) sendLDMToClients() { s.info.LameDuckMode = true // Clear this so that if there are further updates, we don't send our URLs. s.clientConnectURLs = s.clientConnectURLs[:0] if s.websocket.connectURLs != nil { s.websocket.connectURLs = s.websocket.connectURLs[:0] } // Reset content first. s.info.ClientConnectURLs = s.info.ClientConnectURLs[:0] s.info.WSConnectURLs = s.info.WSConnectURLs[:0] // Only add the other nodes if we are allowed to. if !s.getOpts().Cluster.NoAdvertise { for url := range s.clientConnectURLsMap { s.info.ClientConnectURLs = append(s.info.ClientConnectURLs, url) } for url := range s.websocket.connectURLsMap { s.info.WSConnectURLs = append(s.info.WSConnectURLs, url) } } // Send to all registered clients that support async INFO protocols. s.sendAsyncInfoToClients(true, true) // We now clear the info.LameDuckMode flag so that if there are // cluster updates and we send the INFO, we don't have the boolean // set which would cause multiple LDM notifications to clients. s.info.LameDuckMode = false } // If given error is a net.Error and is temporary, sleeps for the given // delay and double it, but cap it to ACCEPT_MAX_SLEEP. The sleep is // interrupted if the server is shutdown. // An error message is displayed depending on the type of error. // Returns the new (or unchanged) delay, or a negative value if the // server has been or is being shutdown. func (s *Server) acceptError(acceptName string, err error, tmpDelay time.Duration) time.Duration { if !s.isRunning() { return -1 } if ne, ok := err.(net.Error); ok && ne.Temporary() { s.Errorf("Temporary %s Accept Error(%v), sleeping %dms", acceptName, ne, tmpDelay/time.Millisecond) select { case <-time.After(tmpDelay): case <-s.quitCh: return -1 } tmpDelay *= 2 if tmpDelay > ACCEPT_MAX_SLEEP { tmpDelay = ACCEPT_MAX_SLEEP } } else { s.Errorf("%s Accept error: %v", acceptName, err) } return tmpDelay } var errNoIPAvail = errors.New("no IP available") func (s *Server) getRandomIP(resolver netResolver, url string, excludedAddresses map[string]struct{}) (string, error) { host, port, err := net.SplitHostPort(url) if err != nil { return "", err } // If already an IP, skip. if net.ParseIP(host) != nil { return url, nil } ips, err := resolver.LookupHost(context.Background(), host) if err != nil { return "", fmt.Errorf("lookup for host %q: %v", host, err) } if len(excludedAddresses) > 0 { for i := 0; i < len(ips); i++ { ip := ips[i] addr := net.JoinHostPort(ip, port) if _, excluded := excludedAddresses[addr]; excluded { if len(ips) == 1 { ips = nil break } ips[i] = ips[len(ips)-1] ips = ips[:len(ips)-1] i-- } } if len(ips) == 0 { return "", errNoIPAvail } } var address string if len(ips) == 0 { s.Warnf("Unable to get IP for %s, will try with %s: %v", host, url, err) address = url } else { var ip string if len(ips) == 1 { ip = ips[0] } else { ip = ips[rand.Int31n(int32(len(ips)))] } // add the port address = net.JoinHostPort(ip, port) } return address, nil } // Returns true for the first attempt and depending on the nature // of the attempt (first connect or a reconnect), when the number // of attempts is equal to the configured report attempts. func (s *Server) shouldReportConnectErr(firstConnect bool, attempts int) bool { opts := s.getOpts() if firstConnect { if attempts == 1 || attempts%opts.ConnectErrorReports == 0 { return true } return false } if attempts == 1 || attempts%opts.ReconnectErrorReports == 0 { return true } return false } // Invoked for route, leaf and gateway connections. Set the very first // PING to a lower interval to capture the initial RTT. // After that the PING interval will be set to the user defined value. // Client lock should be held. func (s *Server) setFirstPingTimer(c *client) { opts := s.getOpts() d := opts.PingInterval if !opts.DisableShortFirstPing { if c.kind != CLIENT { if d > firstPingInterval { d = firstPingInterval } if c.kind == GATEWAY { d = adjustPingIntervalForGateway(d) } } else if d > firstClientPingInterval { d = firstClientPingInterval } } // We randomize the first one by an offset up to 20%, e.g. 2m ~= max 24s. addDelay := rand.Int63n(int64(d / 5)) d += time.Duration(addDelay) c.ping.tmr = time.AfterFunc(d, c.processPingTimer) }
1
12,917
I think this may be a RW lock.
nats-io-nats-server
go
@@ -1,4 +1,4 @@ -package pluginhelper +package helpers import ( "bytes"
1
package pluginhelper import ( "bytes" "io/ioutil" "github.com/hashicorp/hcl" "github.com/hashicorp/hcl/hcl/ast" "github.com/hashicorp/hcl/hcl/printer" ) // PluginConfig is the plugin config data type Config interface { ParseConfig(file string) error setConfig(data interface{}) error } type PluginConfig struct { Version string PluginName string PluginCmd string PluginChecksum string PluginData string PluginType string Enabled bool } func (c *PluginConfig) ParseConfig(file string) (err error) { result, err := parseHCLfile(file) if err != nil { return err } c.setConfig(result) if err != nil { return err } return err } func (c *PluginConfig) setConfig( hclData *HCLData) (err error) { c.Version = hclData.Version c.PluginName = hclData.PluginName c.PluginCmd = hclData.PluginCmd c.PluginChecksum = hclData.PluginChecksum c.Enabled = hclData.Enabled c.PluginType = hclData.PluginType // Re-encode plugin-specific data var buf bytes.Buffer if err = printer.DefaultConfig.Fprint(&buf, hclData.PluginData); err != nil { return err } c.PluginData = buf.String() return err } type NodeAgentConfig struct { Version string WorkloadAPIAddress string } func(c * NodeAgentConfig) ParseConfig(file string) error { result, err := parseHCLfile(file) if err != nil { return err } if err = c.setConfig(result); err!=nil{ return err } return nil } func (c *NodeAgentConfig) setConfig(hclData *HCLData) error{ c.Version = hclData.Version c.WorkloadAPIAddress = hclData.WorkloadAPIAddress return nil } type ControlPlaneConfig struct { Version string NodeAPIAddress string RegistrationAPIAddress string } func(c * ControlPlaneConfig) ParseConfig(file string) error { result, err := parseHCLfile(file) if err != nil { return err } if err = c.setConfig(result); err!=nil{ return err } return nil } func (c *ControlPlaneConfig) setConfig(hclData *HCLData) error{ c.Version = hclData.Version c.NodeAPIAddress = hclData.NodeAPIAddress c.RegistrationAPIAddress = hclData.RegistrationAPIAddress return nil } // HCL config data type HCLData struct { //Common config Version string `hcl:version` //Plugin Config PluginName string `hcl:pluginName` PluginCmd string `hcl:"pluginCmd"` PluginChecksum string `hcl:"pluginChecksum"` PluginData ast.Node `hcl:"pluginData"` PluginType string `hcl:"pluginType"` Enabled bool `hcl:enabled` WorkloadAPIAddress string `hcl:workloadAPIAddress` NodeAPIAddress string `hcl:nodeAPIAddress` RegistrationAPIAddress string `hcl:registrationAPIAddress` } func parseHCLfile(file string) (*HCLData,error){ hclData := &HCLData{} // Read HCL file dat, err := ioutil.ReadFile(file) if err != nil { return nil, err } hclText := string(dat) // Parse HCL hclParseTree, err := hcl.Parse(hclText) if err != nil { return nil, err } if err := hcl.DecodeObject(&hclData, hclParseTree); err != nil { return nil, err } return hclData, nil }
1
8,205
Perhaps we should tuck this away into a dedicated subdir and name it `config` or something similar? Or maybe it would be happy living in `common`?
spiffe-spire
go
@@ -810,6 +810,14 @@ void Client::SendTradeskillSearchResults( continue; } } + + //Check if we need to learn it before sending them the recipe.. + DBTradeskillRecipe_Struct spec; + if (content_db.GetTradeRecipe(recipe_id, objtype, someid, this->CharacterID(), &spec)) { + if ((spec.must_learn & 0xf) && !spec.has_learnt) { + continue; + } + } auto outapp = new EQApplicationPacket(OP_RecipeReply, sizeof(RecipeReply_Struct)); RecipeReply_Struct *reply = (RecipeReply_Struct *) outapp->pBuffer;
1
/* EQEMu: Everquest Server Emulator Copyright (C) 2001-2004 EQEMu Development Team (http://eqemulator.net) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; version 2 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY except by those people which sell it, which are required to give you total support for your newly bought product; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "../common/global_define.h" #include <stdlib.h> #include <list> #ifndef WIN32 #include <netinet/in.h> //for htonl #endif #include "../common/rulesys.h" #include "../common/string_util.h" #include "queryserv.h" #include "quest_parser_collection.h" #include "string_ids.h" #include "titles.h" #include "zonedb.h" #include "zone_store.h" #include "../common/repositories/character_recipe_list_repository.h" #include "../common/repositories/tradeskill_recipe_repository.h" extern QueryServ* QServ; static const EQ::skills::SkillType TradeskillUnknown = EQ::skills::Skill1HBlunt; /* an arbitrary non-tradeskill */ void Object::HandleAugmentation(Client* user, const AugmentItem_Struct* in_augment, Object *worldo) { if (!user || !in_augment) { LogError("Client or AugmentItem_Struct not set in Object::HandleAugmentation"); return; } EQ::ItemInstance* container = nullptr; if (worldo) { container = worldo->m_inst; } else { // Check to see if they have an inventory container type 53 that is used for this. EQ::InventoryProfile& user_inv = user->GetInv(); EQ::ItemInstance* inst = nullptr; inst = user_inv.GetItem(in_augment->container_slot); if (inst) { const EQ::ItemData* item = inst->GetItem(); if (item && inst->IsType(EQ::item::ItemClassBag) && item->BagType == 53) { // We have found an appropriate inventory augmentation sealer container = inst; // Verify that no more than two items are in container to guarantee no inadvertant wipes. uint8 itemsFound = 0; for (uint8 i = EQ::invbag::SLOT_BEGIN; i < EQ::invtype::WORLD_SIZE; i++) { const EQ::ItemInstance* inst = container->GetItem(i); if (inst) { itemsFound++; } } if (itemsFound != 2) { user->Message(Chat::Red, "Error: Too many/few items in augmentation container."); return; } } } } if(!container) { LogError("Player tried to augment an item without a container set"); user->Message(Chat::Red, "Error: This item is not a container!"); return; } EQ::ItemInstance *tobe_auged = nullptr, *auged_with = nullptr; int8 slot=-1; // Verify 2 items in the augmentation device if (container->GetItem(0) && container->GetItem(1)) { // Verify 1 item is augmentable and the other is not if (container->GetItem(0)->IsAugmentable() && !container->GetItem(1)->IsAugmentable()) { tobe_auged = container->GetItem(0); auged_with = container->GetItem(1); } else if (!container->GetItem(0)->IsAugmentable() && container->GetItem(1)->IsAugmentable()) { tobe_auged = container->GetItem(1); auged_with = container->GetItem(0); } else { // Either 2 augmentable items found or none found // This should never occur due to client restrictions, but prevent in case of a hack user->Message(Chat::Red, "Error: Must be 1 augmentable item in the sealer"); return; } } else { // This happens if the augment button is clicked more than once quickly while augmenting if (!container->GetItem(0)) { user->Message(Chat::Red, "Error: No item in slot 0 of sealer"); } if (!container->GetItem(1)) { user->Message(Chat::Red, "Error: No item in slot 1 of sealer"); } return; } bool deleteItems = false; EQ::ItemInstance *itemOneToPush = nullptr, *itemTwoToPush = nullptr; // Adding augment if (in_augment->augment_slot == -1) { if (((slot=tobe_auged->AvailableAugmentSlot(auged_with->GetAugmentType()))!=-1) && (tobe_auged->AvailableWearSlot(auged_with->GetItem()->Slots))) { tobe_auged->PutAugment(slot, *auged_with); EQ::ItemInstance *aug = tobe_auged->GetAugment(slot); if(aug) { std::vector<EQ::Any> args; args.push_back(aug); parse->EventItem(EVENT_AUGMENT_ITEM, user, tobe_auged, nullptr, "", slot, &args); args.assign(1, tobe_auged); parse->EventItem(EVENT_AUGMENT_INSERT, user, aug, nullptr, "", slot, &args); } itemOneToPush = tobe_auged->Clone(); deleteItems = true; } else { user->Message(Chat::Red, "Error: No available slot for augment"); } } else { EQ::ItemInstance *old_aug = nullptr; bool isSolvent = auged_with->GetItem()->ItemType == EQ::item::ItemTypeAugmentationSolvent; if (!isSolvent && auged_with->GetItem()->ItemType != EQ::item::ItemTypeAugmentationDistiller) { LogError("Player tried to remove an augment without a solvent or distiller"); user->Message(Chat::Red, "Error: Missing an augmentation solvent or distiller for removing this augment."); return; } EQ::ItemInstance *aug = tobe_auged->GetAugment(in_augment->augment_slot); if (aug) { if (!isSolvent && auged_with->GetItem()->ID != aug->GetItem()->AugDistiller) { LogError("Player tried to safely remove an augment with the wrong distiller (item [{}] vs expected [{}])", auged_with->GetItem()->ID, aug->GetItem()->AugDistiller); user->Message(Chat::Red, "Error: Wrong augmentation distiller for safely removing this augment."); return; } std::vector<EQ::Any> args; args.push_back(aug); parse->EventItem(EVENT_UNAUGMENT_ITEM, user, tobe_auged, nullptr, "", slot, &args); args.assign(1, tobe_auged); args.push_back(&isSolvent); parse->EventItem(EVENT_AUGMENT_REMOVE, user, aug, nullptr, "", slot, &args); } if (isSolvent) tobe_auged->DeleteAugment(in_augment->augment_slot); else old_aug = tobe_auged->RemoveAugment(in_augment->augment_slot); itemOneToPush = tobe_auged->Clone(); if (old_aug) itemTwoToPush = old_aug->Clone(); deleteItems = true; } if (deleteItems) { if (worldo) { container->Clear(); auto outapp = new EQApplicationPacket(OP_ClearObject, sizeof(ClearObject_Struct)); ClearObject_Struct *cos = (ClearObject_Struct *)outapp->pBuffer; cos->Clear = 1; user->QueuePacket(outapp); safe_delete(outapp); database.DeleteWorldContainer(worldo->m_id, zone->GetZoneID()); } else { // Delete items in our inventory container... for (uint8 i = EQ::invbag::SLOT_BEGIN; i < EQ::invtype::WORLD_SIZE; i++) { const EQ::ItemInstance* inst = container->GetItem(i); if (inst) { user->DeleteItemInInventory(EQ::InventoryProfile::CalcSlotId(in_augment->container_slot, i), 0, true); } } // Explicitly mark container as cleared. container->Clear(); } } // Must push items after the items in inventory are deleted - necessary due to lore items... if (itemOneToPush) { user->PushItemOnCursor(*itemOneToPush, true); } if (itemTwoToPush) { user->PushItemOnCursor(*itemTwoToPush, true); } } // Perform tradeskill combine void Object::HandleCombine(Client* user, const NewCombine_Struct* in_combine, Object *worldo) { if (!user || !in_combine) { LogError("Client or NewCombine_Struct not set in Object::HandleCombine"); return; } LogTradeskills( "[HandleCombine] container_slot [{}] guildtribute_slot [{}]", in_combine->container_slot, in_combine->guildtribute_slot ); EQ::InventoryProfile &user_inv = user->GetInv(); PlayerProfile_Struct &user_pp = user->GetPP(); EQ::ItemInstance *container = nullptr; EQ::ItemInstance *inst = nullptr; uint8 c_type = 0xE8; uint32 some_id = 0; bool worldcontainer = false; if (in_combine->container_slot == EQ::invslot::SLOT_TRADESKILL_EXPERIMENT_COMBINE) { if(!worldo) { user->Message( Chat::Red, "Error: Server is not aware of the tradeskill container you are attempting to use" ); return; } c_type = worldo->m_type; inst = worldo->m_inst; worldcontainer = true; // if we're a world container with an item, use that too if (inst) { const EQ::ItemData *item = inst->GetItem(); if (item) { some_id = item->ID; } } } else { inst = user_inv.GetItem(in_combine->container_slot); if (inst) { const EQ::ItemData* item = inst->GetItem(); if (item && inst->IsType(EQ::item::ItemClassBag)) { c_type = item->BagType; some_id = item->ID; } } } if (!inst || !inst->IsType(EQ::item::ItemClassBag)) { user->Message(Chat::Red, "Error: Server does not recognize specified tradeskill container"); return; } container = inst; if (container->GetItem() && container->GetItem()->BagType == EQ::item::BagTypeTransformationmold) { const EQ::ItemInstance *inst = container->GetItem(0); bool AllowAll = RuleB(Inventory, AllowAnyWeaponTransformation); if (inst && EQ::ItemInstance::CanTransform(inst->GetItem(), container->GetItem(), AllowAll)) { const EQ::ItemData *new_weapon = inst->GetItem(); user->DeleteItemInInventory(EQ::InventoryProfile::CalcSlotId(in_combine->container_slot, 0), 0, true); container->Clear(); user->SummonItem( new_weapon->ID, inst->GetCharges(), inst->GetAugmentItemID(0), inst->GetAugmentItemID(1), inst->GetAugmentItemID(2), inst->GetAugmentItemID(3), inst->GetAugmentItemID(4), inst->GetAugmentItemID(5), inst->IsAttuned(), EQ::invslot::slotCursor, container->GetItem()->Icon, atoi(container->GetItem()->IDFile + 2) ); user->MessageString(Chat::LightBlue, TRANSFORM_COMPLETE, inst->GetItem()->Name); if (RuleB(Inventory, DeleteTransformationMold)) { user->DeleteItemInInventory(in_combine->container_slot, 0, true); } } else if (inst) { user->MessageString(Chat::LightBlue, TRANSFORM_FAILED, inst->GetItem()->Name); } auto outapp = new EQApplicationPacket(OP_TradeSkillCombine, 0); user->QueuePacket(outapp); safe_delete(outapp); LogTradeskills( "[HandleCombine] inst_item [{}] container_item [{}]", inst->GetItem()->ID, container->GetItem()->ID ); return; } if (container->GetItem() && container->GetItem()->BagType == EQ::item::BagTypeDetransformationmold) { LogTradeskillsDetail("[HandleCombine] Check 1"); const EQ::ItemInstance* inst = container->GetItem(0); if (inst && inst->GetOrnamentationIcon() && inst->GetOrnamentationIcon()) { const EQ::ItemData* new_weapon = inst->GetItem(); user->DeleteItemInInventory(EQ::InventoryProfile::CalcSlotId(in_combine->container_slot, 0), 0, true); container->Clear(); user->SummonItem(new_weapon->ID, inst->GetCharges(), inst->GetAugmentItemID(0), inst->GetAugmentItemID(1), inst->GetAugmentItemID(2), inst->GetAugmentItemID(3), inst->GetAugmentItemID(4), inst->GetAugmentItemID(5), inst->IsAttuned(), EQ::invslot::slotCursor, 0, 0); user->MessageString(Chat::LightBlue, TRANSFORM_COMPLETE, inst->GetItem()->Name); } else if (inst) { user->MessageString(Chat::LightBlue, DETRANSFORM_FAILED, inst->GetItem()->Name); } auto outapp = new EQApplicationPacket(OP_TradeSkillCombine, 0); user->QueuePacket(outapp); safe_delete(outapp); return; } DBTradeskillRecipe_Struct spec; if (parse->EventPlayer(EVENT_COMBINE, user, std::to_string(in_combine->container_slot), 0) == 1) { auto outapp = new EQApplicationPacket(OP_TradeSkillCombine, 0); user->QueuePacket(outapp); safe_delete(outapp); return; } if (!content_db.GetTradeRecipe(container, c_type, some_id, user->CharacterID(), &spec)) { LogTradeskillsDetail("[HandleCombine] Check 2"); user->MessageString(Chat::Emote,TRADESKILL_NOCOMBINE); auto outapp = new EQApplicationPacket(OP_TradeSkillCombine, 0); user->QueuePacket(outapp); safe_delete(outapp); return; } // Character hasn't learnt the recipe yet. // must_learn: // bit 1 (0x01): recipe can't be experimented // bit 2 (0x02): can try to experiment but not useable for auto-combine until learnt // bit 5 (0x10): no learn message, use unlisted flag to prevent it showing up on search // bit 6 (0x20): unlisted recipe flag if ((spec.must_learn & 0xF) == 1 && !spec.has_learnt) { LogTradeskillsDetail("[HandleCombine] Check 3"); // Made up message for the client. Just giving a DNC is the other option. user->Message(Chat::LightBlue, "You need to learn how to combine these first."); auto outapp = new EQApplicationPacket(OP_TradeSkillCombine, 0); user->QueuePacket(outapp); safe_delete(outapp); return; } // Character does not have the required skill. if(spec.skill_needed > 0 && user->GetSkill(spec.tradeskill) < spec.skill_needed ) { LogTradeskillsDetail("Check 4"); // Notify client. user->Message(Chat::LightBlue, "You are not skilled enough."); auto outapp = new EQApplicationPacket(OP_TradeSkillCombine, 0); user->QueuePacket(outapp); safe_delete(outapp); return; } //changing from a switch to string of if's since we don't need to iterate through all of the skills in the SkillType enum if (spec.tradeskill == EQ::skills::SkillAlchemy) { if (user_pp.class_ != SHAMAN) { user->Message(Chat::Red, "This tradeskill can only be performed by a shaman."); return; } else if (user_pp.level < MIN_LEVEL_ALCHEMY) { user->Message(Chat::Red, "You cannot perform alchemy until you reach level %i.", MIN_LEVEL_ALCHEMY); return; } } else if (spec.tradeskill == EQ::skills::SkillTinkering) { if (user_pp.race != GNOME) { user->Message(Chat::Red, "Only gnomes can tinker."); return; } } else if (spec.tradeskill == EQ::skills::SkillMakePoison) { if (user_pp.class_ != ROGUE) { user->Message(Chat::Red, "Only rogues can mix poisons."); return; } } // final check for any additional quest requirements .. "check_zone" in this case - exported as variable [validate_type] if (parse->EventPlayer(EVENT_COMBINE_VALIDATE, user, fmt::format("check_zone {}", zone->GetZoneID()), spec.recipe_id) != 0) { user->Message(Chat::Emote, "You cannot make this combine because the location requirement has not been met."); auto outapp = new EQApplicationPacket(OP_TradeSkillCombine, 0); user->QueuePacket(outapp); safe_delete(outapp); return; } // Send acknowledgement packets to client auto outapp = new EQApplicationPacket(OP_TradeSkillCombine, 0); user->QueuePacket(outapp); safe_delete(outapp); //now clean out the containers. if(worldcontainer){ container->Clear(); outapp = new EQApplicationPacket(OP_ClearObject, sizeof(ClearObject_Struct)); ClearObject_Struct *cos = (ClearObject_Struct *)outapp->pBuffer; cos->Clear = 1; user->QueuePacket(outapp); safe_delete(outapp); database.DeleteWorldContainer(worldo->m_id, zone->GetZoneID()); } else{ for (uint8 i = EQ::invbag::SLOT_BEGIN; i < EQ::invtype::WORLD_SIZE; i++) { const EQ::ItemInstance* inst = container->GetItem(i); if (inst) { user->DeleteItemInInventory(EQ::InventoryProfile::CalcSlotId(in_combine->container_slot, i), 0, true); } } container->Clear(); } //do the check and send results... bool success = user->TradeskillExecute(&spec); // Learn new recipe message // Update Made count if (success) { if (!spec.has_learnt && ((spec.must_learn&0x10) != 0x10)) { user->MessageString(Chat::LightBlue, TRADESKILL_LEARN_RECIPE, spec.name.c_str()); } database.UpdateRecipeMadecount(spec.recipe_id, user->CharacterID(), spec.madecount+1); } // Replace the container on success if required. // if (success && spec.replace_container) { if (worldcontainer) { //should report this error, but we dont have the recipe ID, so its not very useful LogError("Replace container combine executed in a world container"); } else { user->DeleteItemInInventory(in_combine->container_slot, 0, true); } } if (success) { parse->EventPlayer(EVENT_COMBINE_SUCCESS, user, spec.name.c_str(), spec.recipe_id); } else { parse->EventPlayer(EVENT_COMBINE_FAILURE, user, spec.name.c_str(), spec.recipe_id); } } void Object::HandleAutoCombine(Client* user, const RecipeAutoCombine_Struct* rac) { //get our packet ready, gotta send one no matter what... auto outapp = new EQApplicationPacket(OP_RecipeAutoCombine, sizeof(RecipeAutoCombine_Struct)); RecipeAutoCombine_Struct *outp = (RecipeAutoCombine_Struct *)outapp->pBuffer; outp->object_type = rac->object_type; outp->some_id = rac->some_id; outp->unknown1 = rac->unknown1; outp->recipe_id = rac->recipe_id; outp->reply_code = 0xFFFFFFF5; //default fail. //ask the database for the recipe to make sure it exists... DBTradeskillRecipe_Struct spec; if (!content_db.GetTradeRecipe(rac->recipe_id, rac->object_type, rac->some_id, user->CharacterID(), &spec)) { LogError("Unknown recipe for HandleAutoCombine: [{}]\n", rac->recipe_id); user->QueuePacket(outapp); safe_delete(outapp); return; } // Character hasn't learnt the recipe yet. // This shouldn't happen. if ((spec.must_learn&0xf) && !spec.has_learnt) { // Made up message for the client. Just giving a DNC is the other option. user->Message(Chat::LightBlue, "You need to learn how to combine these first."); user->QueuePacket(outapp); safe_delete(outapp); return; } //pull the list of components std::string query = StringFormat("SELECT tre.item_id, tre.componentcount " "FROM tradeskill_recipe_entries AS tre " "WHERE tre.componentcount > 0 AND tre.recipe_id = %u", rac->recipe_id); auto results = content_db.QueryDatabase(query); if (!results.Success()) { user->QueuePacket(outapp); safe_delete(outapp); return; } if(results.RowCount() < 1) { LogError("Error in HandleAutoCombine: no components returned"); user->QueuePacket(outapp); safe_delete(outapp); return; } if(results.RowCount() > 10) { LogError("Error in HandleAutoCombine: too many components returned ([{}])", results.RowCount()); user->QueuePacket(outapp); safe_delete(outapp); return; } uint32 items[10]; memset(items, 0, sizeof(items)); uint8 counts[10]; memset(counts, 0, sizeof(counts)); //search for all the items in their inventory EQ::InventoryProfile& user_inv = user->GetInv(); uint8 count = 0; uint8 needcount = 0; std::list<int> MissingItems; uint8 needItemIndex = 0; for (auto row = results.begin(); row != results.end(); ++row, ++needItemIndex) { uint32 item = (uint32)atoi(row[0]); uint8 num = (uint8) atoi(row[1]); needcount += num; //because a HasItem on items with num > 1 only returns the //last-most slot... the results of this are useless to us //when we go to delete them because we cannot assume it is in a single stack. if (user_inv.HasItem(item, num, invWherePersonal) != INVALID_INDEX) count += num; else MissingItems.push_back(item); //dont start deleting anything until we have found it all. items[needItemIndex] = item; counts[needItemIndex] = num; } //make sure we found it all... if(count != needcount) { user->QueuePacket(outapp); safe_delete(outapp); user->MessageString(Chat::Skills, TRADESKILL_MISSING_COMPONENTS); for (auto it = MissingItems.begin(); it != MissingItems.end(); ++it) { const EQ::ItemData* item = database.GetItem(*it); if(item) user->MessageString(Chat::Skills, TRADESKILL_MISSING_ITEM, item->Name); } return; } //now we know they have everything... //remove all the items from the players inventory, with updates... int16 slot; for(uint8 r = 0; r < results.RowCount(); r++) { if(items[r] == 0 || counts[r] == 0) continue; //skip empties, could prolly break here //we have to loop here to delete 1 at a time in case its in multiple stacks. for(uint8 k = 0; k < counts[r]; k++) { slot = user_inv.HasItem(items[r], 1, invWherePersonal); if (slot == INVALID_INDEX) { //WTF... I just checked this above, but just to be sure... //we cant undo the previous deletes without a lot of work. //so just call it quits, this shouldent ever happen anyways. user->QueuePacket(outapp); safe_delete(outapp); return; } const EQ::ItemInstance* inst = user_inv.GetItem(slot); if (inst && !inst->IsStackable()) user->DeleteItemInInventory(slot, 0, true); else user->DeleteItemInInventory(slot, 1, true); } } //otherwise, we found it all... outp->reply_code = 0x00000000; //success for finding it... user->QueuePacket(outapp); safe_delete(outapp); //now actually try to make something... bool success = user->TradeskillExecute(&spec); if (success) { if (!spec.has_learnt && ((spec.must_learn & 0x10) != 0x10)) { user->MessageString(Chat::LightBlue, TRADESKILL_LEARN_RECIPE, spec.name.c_str()); } database.UpdateRecipeMadecount(spec.recipe_id, user->CharacterID(), spec.madecount+1); } //TODO: find in-pack containers in inventory, make sure they are really //there, and then use that slot to handle replace_container too. if(success && spec.replace_container) { // user->DeleteItemInInventory(in_combine->container_slot, 0, true); } if (success) parse->EventPlayer(EVENT_COMBINE_SUCCESS, user, spec.name.c_str(), spec.recipe_id); else parse->EventPlayer(EVENT_COMBINE_FAILURE, user, spec.name.c_str(), spec.recipe_id); } EQ::skills::SkillType Object::TypeToSkill(uint32 type) { switch(type) { // grouped and ordered by SkillUseTypes name - new types need to be verified for proper SkillUseTypes and use /*SkillAlchemy*/ case EQ::item::BagTypeMedicineBag: return EQ::skills::SkillAlchemy; /*SkillBaking*/ //case EQ::item::BagTypeMixingBowl: // No idea... case EQ::item::BagTypeOven: return EQ::skills::SkillBaking; /*SkillBlacksmithing*/ case EQ::item::BagTypeForge: //case EQ::item::BagTypeKoadaDalForge: case EQ::item::BagTypeTeirDalForge: case EQ::item::BagTypeOggokForge: case EQ::item::BagTypeStormguardForge: //case EQ::item::BagTypeAkanonForge: //case EQ::item::BagTypeNorthmanForge: //case EQ::item::BagTypeCabilisForge: //case EQ::item::BagTypeFreeportForge: //case EQ::item::BagTypeRoyalQeynosForge: //case EQ::item::BagTypeTrollForge: case EQ::item::BagTypeFierDalForge: case EQ::item::BagTypeValeForge: //case EQ::item::BagTypeErudForge: //case EQ::item::BagTypeGuktaForge: return EQ::skills::SkillBlacksmithing; /*SkillBrewing*/ //case EQ::item::BagTypeIceCreamChurn: // No idea... case EQ::item::BagTypeBrewBarrel: return EQ::skills::SkillBrewing; /*SkillFishing*/ case EQ::item::BagTypeTackleBox: return EQ::skills::SkillFishing; /*SkillFletching*/ case EQ::item::BagTypeFletchingKit: //case EQ::item::BagTypeFierDalFletchingKit: return EQ::skills::SkillFletching; /*SkillJewelryMaking*/ case EQ::item::BagTypeJewelersKit: return EQ::skills::SkillJewelryMaking; /*SkillMakePoison*/ // This is a guess and needs to be verified... (Could be SkillAlchemy) //case EQ::item::BagTypeMortar: // return SkillMakePoison; /*SkillPottery*/ case EQ::item::BagTypePotteryWheel: case EQ::item::BagTypeKiln: //case EQ::item::BagTypeIksarPotteryWheel: return EQ::skills::SkillPottery; /*SkillResearch*/ //case EQ::item::BagTypeLexicon: case EQ::item::BagTypeWizardsLexicon: case EQ::item::BagTypeMagesLexicon: case EQ::item::BagTypeNecromancersLexicon: case EQ::item::BagTypeEnchantersLexicon: //case EQ::item::BagTypeConcordanceofResearch: return EQ::skills::SkillResearch; /*SkillTailoring*/ case EQ::item::BagTypeSewingKit: //case EQ::item::BagTypeHalflingTailoringKit: //case EQ::item::BagTypeErudTailoringKit: //case EQ::item::BagTypeFierDalTailoringKit: return EQ::skills::SkillTailoring; /*SkillTinkering*/ case EQ::item::BagTypeToolBox: return EQ::skills::SkillTinkering; /*Undefined*/ default: return TradeskillUnknown; } } void Client::SendTradeskillSearchResults( const std::string &query, unsigned long objtype, unsigned long someid ) { auto results = content_db.QueryDatabase(query); if (!results.Success()) { return; } if (results.RowCount() < 1) { return; } auto character_learned_recipe_list = CharacterRecipeListRepository::GetLearnedRecipeList(CharacterID()); for (auto row = results.begin(); row != results.end(); ++row) { if (row == nullptr || row[0] == nullptr || row[1] == nullptr || row[2] == nullptr || row[3] == nullptr || row[5] == nullptr) { continue; } uint32 recipe_id = (uint32) atoi(row[0]); const char *name = row[1]; uint32 trivial = (uint32) atoi(row[2]); uint32 comp_count = (uint32) atoi(row[3]); uint32 tradeskill = (uint16) atoi(row[5]); // Skip the recipes that exceed the threshold in skill difference // Recipes that have either been made before or were // explicitly learned are excempt from that limit if (RuleB(Skills, UseLimitTradeskillSearchSkillDiff) && ((int32) trivial - (int32) GetSkill((EQ::skills::SkillType) tradeskill)) > RuleI(Skills, MaxTradeskillSearchSkillDiff)) { LogTradeskills("Checking limit recipe_id [{}] name [{}]", recipe_id, name); auto character_learned_recipe = CharacterRecipeListRepository::GetRecipe( character_learned_recipe_list, recipe_id ); if (character_learned_recipe.made_count == 0) { continue; } } auto outapp = new EQApplicationPacket(OP_RecipeReply, sizeof(RecipeReply_Struct)); RecipeReply_Struct *reply = (RecipeReply_Struct *) outapp->pBuffer; reply->object_type = objtype; reply->some_id = someid; reply->component_count = comp_count; reply->recipe_id = recipe_id; reply->trivial = trivial; strn0cpy(reply->recipe_name, name, sizeof(reply->recipe_name)); FastQueuePacket(&outapp); } } void Client::SendTradeskillDetails(uint32 recipe_id) { std::string query = StringFormat("SELECT tre.item_id,tre.componentcount,i.icon,i.Name " "FROM tradeskill_recipe_entries AS tre " "LEFT JOIN items AS i ON tre.item_id = i.id " "WHERE tre.componentcount > 0 AND tre.recipe_id = %u", recipe_id); auto results = content_db.QueryDatabase(query); if (!results.Success()) { return; } if(results.RowCount() < 1) { LogError("Error in SendTradeskillDetails: no components returned"); return; } if(results.RowCount() > 10) { LogError("Error in SendTradeskillDetails: too many components returned ([{}])", results.RowCount()); return; } //biggest this packet can ever be: // 64 * 10 + 8 * 10 + 4 + 4 * 10 = 764 auto buf = new char[775]; // dynamic so we can just give it to EQApplicationPacket uint8 r,k; uint32 *header = (uint32 *) buf; //Hell if I know why this is in the wrong byte order.... *header = htonl(recipe_id); char *startblock = buf; startblock += sizeof(uint32); uint32 *ffff_start = (uint32 *) startblock; //fill in the FFFF's as if there were 0 items for(r = 0; r < 10; r++) { // world:item container size related? *ffff_start = 0xFFFFFFFF; ffff_start++; } char * datastart = (char *) ffff_start; char * cblock = (char *) ffff_start; uint32 *itemptr; uint32 *iconptr; uint32 len; uint32 datalen = 0; uint8 count = 0; for(auto row = results.begin(); row != results.end(); ++row) { //watch for references to items which are not in the //items table, which the left join will make nullptr... if(row[2] == nullptr || row[3] == nullptr) continue; uint32 item = (uint32)atoi(row[0]); uint8 num = (uint8) atoi(row[1]); uint32 icon = (uint32) atoi(row[2]); const char *name = row[3]; len = strlen(name); if(len > 63) len = 63; //Hell if I know why these are in the wrong byte order.... item = htonl(item); icon = htonl(icon); //if we get more than 10 items, just start skipping them... for(k = 0; k < num && count < 10; k++) { // world:item container size related? itemptr = (uint32 *) cblock; cblock += sizeof(uint32); datalen += sizeof(uint32); iconptr = (uint32 *) cblock; cblock += sizeof(uint32); datalen += sizeof(uint32); *itemptr = item; *iconptr = icon; strncpy(cblock, name, len); cblock[len] = '\0'; //just making sure. cblock += len + 1; //get the null datalen += len + 1; //get the null count++; } } //now move the item data over top of the FFFFs uint8 dist = sizeof(uint32) * (10 - count); startblock += dist; memmove(startblock, datastart, datalen); uint32 total = sizeof(uint32) + dist + datalen; auto outapp = new EQApplicationPacket(OP_RecipeDetails); outapp->size = total; outapp->pBuffer = (uchar*) buf; QueuePacket(outapp); DumpPacket(outapp); safe_delete(outapp); } //returns true on success bool Client::TradeskillExecute(DBTradeskillRecipe_Struct *spec) { if(spec == nullptr) return(false); uint16 user_skill = GetSkill(spec->tradeskill); float chance = 0.0; float skillup_modifier = 0.0; int16 thirdstat = 0; int16 stat_modifier = 15; uint16 success_modifier = 0; // Rework based on the info on eqtraders.com // http://mboards.eqtraders.com/eq/showthread.php?t=22246 // 09/10/2006 v0.1 (eq4me) // 09/11/2006 v0.2 (eq4me) // Todo: // Implementing AAs // Success modifiers based on recipes // Skillup modifiers based on the rarity of the ingredients // Some tradeskills are more eqal then others. ;-) // If you want to customize the stage1 success rate do it here. // Remember: skillup_modifier is (float). Lower is better switch(spec->tradeskill) { case EQ::skills::SkillFletching: skillup_modifier = RuleI(Character, TradeskillUpFletching); break; case EQ::skills::SkillAlchemy: skillup_modifier = RuleI(Character, TradeskillUpAlchemy); break; case EQ::skills::SkillJewelryMaking: skillup_modifier = RuleI(Character, TradeskillUpJewelcrafting); break; case EQ::skills::SkillPottery: skillup_modifier = RuleI(Character, TradeskillUpPottery); break; case EQ::skills::SkillBaking: skillup_modifier = RuleI(Character, TradeskillUpBaking); break; case EQ::skills::SkillBrewing: skillup_modifier = RuleI(Character, TradeskillUpBrewing); break; case EQ::skills::SkillBlacksmithing: skillup_modifier = RuleI(Character, TradeskillUpBlacksmithing); break; case EQ::skills::SkillResearch: skillup_modifier = RuleI(Character, TradeskillUpResearch); break; case EQ::skills::SkillMakePoison: skillup_modifier = RuleI(Character, TradeskillUpMakePoison); break; case EQ::skills::SkillTinkering: skillup_modifier = RuleI(Character, TradeskillUpTinkering); break; default: skillup_modifier = 2; break; } // Some tradeskills take the higher of one additional stat beside INT and WIS // to determine the skillup rate. Additionally these tradeskills do not have an // -15 modifier on their statbonus. if (spec->tradeskill == EQ::skills::SkillFletching || spec->tradeskill == EQ::skills::SkillMakePoison) { thirdstat = GetDEX(); stat_modifier = 0; } else if (spec->tradeskill == EQ::skills::SkillBlacksmithing) { thirdstat = GetSTR(); stat_modifier = 0; } int16 higher_from_int_wis = (GetINT() > GetWIS()) ? GetINT() : GetWIS(); int16 bonusstat = (higher_from_int_wis > thirdstat) ? higher_from_int_wis : thirdstat; std::vector< std::pair<uint32,uint8> >::iterator itr; //calculate the base success chance // For trivials over 68 the chance is (skill - 0.75*trivial) +51.5 // For trivial up to 68 the chance is (skill - trivial) + 66 if (spec->trivial >= 68) { chance = (user_skill - (0.75*spec->trivial)) + 51.5; } else { chance = (user_skill - spec->trivial) + 66; } int16 over_trivial = (int16)GetRawSkill(spec->tradeskill) - (int16)spec->trivial; //handle caps if(spec->nofail) { chance = 100; //cannot fail. LogTradeskills("This combine cannot fail"); } else if(over_trivial >= 0) { // At reaching trivial the chance goes to 95% going up an additional // percent for every 40 skillpoints above the trivial. // The success rate is not modified through stats. // Mastery AAs are unaccounted for so far. // chance_AA = chance + ((100 - chance) * mastery_modifier) // But the 95% limit with an additional 1% for every 40 skill points // above critical still stands. // Mastery modifier is: 10%/25%/50% for rank one/two/three chance = 95.0f + (float(user_skill - spec->trivial) / 40.0f); MessageString(Chat::Emote, TRADESKILL_TRIVIAL); } else if(chance < 5) { // Minimum chance is always 5 chance = 5; } else if(chance > 95) { //cap is 95, shouldent reach this before trivial, but just in case. chance = 95; } LogTradeskills("Current skill: [{}] , Trivial: [{}] , Success chance: [{}] percent", user_skill , spec->trivial , chance); LogTradeskills("Bonusstat: [{}] , INT: [{}] , WIS: [{}] , DEX: [{}] , STR: [{}]", bonusstat , GetINT() , GetWIS() , GetDEX() , GetSTR()); float res = zone->random.Real(0, 99); int aa_chance = 0; aa_chance = spellbonuses.ReduceTradeskillFail[spec->tradeskill] + itembonuses.ReduceTradeskillFail[spec->tradeskill] + aabonuses.ReduceTradeskillFail[spec->tradeskill]; const EQ::ItemData* item = nullptr; chance = mod_tradeskill_chance(chance, spec); if (((spec->tradeskill==75) || GetGM() || (chance > res)) || zone->random.Roll(aa_chance)) { success_modifier = 1; if (over_trivial < 0) { CheckIncreaseTradeskill(bonusstat, stat_modifier, skillup_modifier, success_modifier, spec->tradeskill); } MessageString(Chat::LightBlue, TRADESKILL_SUCCEED, spec->name.c_str()); LogTradeskills("Tradeskill success"); itr = spec->onsuccess.begin(); while(itr != spec->onsuccess.end() && !spec->quest) { SummonItem(itr->first, itr->second); item = database.GetItem(itr->first); if (item) { if (GetGroup()) { entity_list.MessageGroup(this, true, Chat::Skills, "%s has successfully fashioned %s!", GetName(), item->Name); } } else { Log( Logs::General, Logs::Tradeskills, StringFormat( "Failure (null item pointer [id: %u, qty: %u]) :: recipe_id:%i tskillid:%i trivial:%i chance:%4.2f in zoneid:%i instid:%i", itr->first, itr->second, spec->recipe_id, spec->tradeskill, spec->trivial, chance, this->GetZoneID(), this->GetInstanceID() ).c_str() ); } /* QS: Player_Log_Trade_Skill_Events */ if (RuleB(QueryServ, PlayerLogTradeSkillEvents)) { std::string event_desc = StringFormat("Success :: fashioned recipe_id:%i tskillid:%i trivial:%i chance:%4.2f in zoneid:%i instid:%i", spec->recipe_id, spec->tradeskill, spec->trivial, chance, this->GetZoneID(), this->GetInstanceID()); QServ->PlayerLogEvent(Player_Log_Trade_Skill_Events, this->CharacterID(), event_desc); } if (RuleB(TaskSystem, EnableTaskSystem)) { UpdateTasksForItem(TaskActivityType::TradeSkill, itr->first, itr->second); } ++itr; } return(true); } /* Tradeskill Fail */ else { success_modifier = 2; // Halves the chance if(over_trivial < 0) CheckIncreaseTradeskill(bonusstat, stat_modifier, skillup_modifier, success_modifier, spec->tradeskill); MessageString(Chat::Emote,TRADESKILL_FAILED); LogTradeskills("Tradeskill failed"); if (this->GetGroup()) { entity_list.MessageGroup(this, true, Chat::Skills,"%s was unsuccessful in %s tradeskill attempt.",GetName(),this->GetGender() == 0 ? "his" : this->GetGender() == 1 ? "her" : "its"); } /* QS: Player_Log_Trade_Skill_Events */ if (RuleB(QueryServ, PlayerLogTradeSkillEvents)){ std::string event_desc = StringFormat("Failed :: recipe_id:%i tskillid:%i trivial:%i chance:%4.2f in zoneid:%i instid:%i", spec->recipe_id, spec->tradeskill, spec->trivial, chance, this->GetZoneID(), this->GetInstanceID()); QServ->PlayerLogEvent(Player_Log_Trade_Skill_Events, this->CharacterID(), event_desc); } itr = spec->onfail.begin(); while(itr != spec->onfail.end()) { //should we check these arguments? SummonItem(itr->first, itr->second); ++itr; } /* Salvage Item rolls */ // Rolls on each item, is possible to return everything int SalvageChance = aabonuses.SalvageChance + itembonuses.SalvageChance + spellbonuses.SalvageChance; // Skip check if not a normal TS or if a quest recipe these should be nofail, but check amyways if(SalvageChance && spec->tradeskill != 75 && !spec->quest) { itr = spec->salvage.begin(); uint8 sc = 0; while(itr != spec->salvage.end()) { for(sc = 0; sc < itr->second; sc++) if(zone->random.Roll(SalvageChance)) SummonItem(itr->first, 1); ++itr; } } } return(false); } void Client::CheckIncreaseTradeskill(int16 bonusstat, int16 stat_modifier, float skillup_modifier, uint16 success_modifier, EQ::skills::SkillType tradeskill) { uint16 current_raw_skill = GetRawSkill(tradeskill); if(!CanIncreaseTradeskill(tradeskill)) return; //not allowed to go higher. float chance_stage2 = 0; //A successfull combine doubles the stage1 chance for an skillup //Some tradeskill are harder than others. See above for more. float chance_stage1 = (bonusstat - stat_modifier) / (skillup_modifier * success_modifier); //In stage2 the only thing that matters is your current unmodified skill. //If you want to customize here you probbably need to implement your own //formula instead of tweaking the below one. if (chance_stage1 > zone->random.Real(0, 99)) { if (current_raw_skill < 15) { //Always succeed chance_stage2 = 100; } else if (current_raw_skill < 175) { //From skill 16 to 174 your chance of success falls linearly from 92% to 13%. chance_stage2 = (200 - current_raw_skill) / 2; } else { //At skill 175, your chance of success falls linearly from 12.5% to 2.5% at skill 300. chance_stage2 = 12.5 - (.08 * (current_raw_skill - 175)); } } chance_stage2 = mod_tradeskill_skillup(chance_stage2); if (chance_stage2 > zone->random.Real(0, 99)) { //Only if stage1 and stage2 succeeded you get a skillup. SetSkill(tradeskill, current_raw_skill + 1); if(title_manager.IsNewTradeSkillTitleAvailable(tradeskill, current_raw_skill + 1)) NotifyNewTitlesAvailable(); } LogTradeskills("[CheckIncreaseTradeskill] skillup_modifier: [{}] , success_modifier: [{}] , stat modifier: [{}]", skillup_modifier , success_modifier , stat_modifier); LogTradeskills("[CheckIncreaseTradeskill] Stage1 chance was: [{}] percent", chance_stage1); LogTradeskills("[CheckIncreaseTradeskill] Stage2 chance was: [{}] percent. 0 percent means stage1 failed", chance_stage2); } bool ZoneDatabase::GetTradeRecipe( const EQ::ItemInstance *container, uint8 c_type, uint32 some_id, uint32 char_id, DBTradeskillRecipe_Struct *spec ) { if (container == nullptr) { LogTradeskills("[GetTradeRecipe] Container null"); return false; } std::string containers;// make where clause segment for container(s) if (some_id == 0) { containers = StringFormat("= %u", c_type); // world combiner so no item number } else { containers = StringFormat("IN (%u,%u)", c_type, some_id); } // container in inventory //Could prolly watch for stacks in this loop and handle them properly... //just increment sum and count accordingly bool first = true; std::string buf2; uint32 count = 0; uint32 sum = 0; for (uint8 i = 0; i < 10; i++) { // <watch> TODO: need to determine if this is bound to world/item container size LogTradeskills("[GetTradeRecipe] Fetching item [{}]", i); const EQ::ItemInstance *inst = container->GetItem(i); if (!inst) { continue; } const EQ::ItemData *item = database.GetItem(inst->GetItem()->ID); if (!item) { LogTradeskills("[GetTradeRecipe] item [{}] not found!", inst->GetItem()->ID); continue; } if (first) { buf2 += StringFormat("%d", item->ID); first = false; } else { buf2 += StringFormat(",%d", item->ID); } sum += item->ID; count++; LogTradeskills( "[GetTradeRecipe] Item in container index [{}] item [{}] found [{}]", i, item->ID, count ); } //no items == no recipe if (count == 0) { return false; } std::string query = StringFormat("SELECT tre.recipe_id " "FROM tradeskill_recipe_entries AS tre " "INNER JOIN tradeskill_recipe AS tr ON (tre.recipe_id = tr.id) " "WHERE tr.enabled AND (( tre.item_id IN(%s) AND tre.componentcount > 0) " "OR ( tre.item_id %s AND tre.iscontainer=1 ))" "GROUP BY tre.recipe_id HAVING sum(tre.componentcount) = %u " "AND sum(tre.item_id * tre.componentcount) = %u", buf2.c_str(), containers.c_str(), count, sum); auto results = QueryDatabase(query); if (!results.Success()) { LogError("Error in GetTradeRecipe search, query: [{}]", query.c_str()); LogError("Error in GetTradeRecipe search, error: [{}]", results.ErrorMessage().c_str()); return false; } if (results.RowCount() > 1) { //multiple recipes, partial match... do an extra query to get it exact. //this happens when combining components for a smaller recipe //which is completely contained within another recipe first = true; uint32 index = 0; buf2 = ""; for (auto row = results.begin(); row != results.end(); ++row, ++index) { uint32 recipeid = (uint32)atoi(row[0]); if(first) { buf2 += StringFormat("%u", recipeid); first = false; } else buf2 += StringFormat(",%u", recipeid); //length limit on buf2 if(index == 214) { //Maximum number of recipe matches (19 * 215 = 4096) LogError("GetTradeRecipe warning: Too many matches. Unable to search all recipe entries. Searched [{}] of [{}] possible entries", index + 1, results.RowCount()); break; } } query = StringFormat("SELECT tre.recipe_id " "FROM tradeskill_recipe_entries AS tre " "WHERE tre.recipe_id IN (%s) " "GROUP BY tre.recipe_id HAVING sum(tre.componentcount) = %u " "AND sum(tre.item_id * tre.componentcount) = %u", buf2.c_str(), count, sum ); results = QueryDatabase(query); if (!results.Success()) { LogError("Error in GetTradeRecipe, re-query: [{}]", query.c_str()); LogError("Error in GetTradeRecipe, error: [{}]", results.ErrorMessage().c_str()); return false; } } if (results.RowCount() < 1) return false; if (results.RowCount() > 1) { //The recipe is not unique, so we need to compare the container were using. uint32 containerId = 0; if (some_id) { //Standard container containerId = some_id; } else if (c_type) {//World container containerId = c_type; } else { //Invalid container return false; } query = StringFormat( "SELECT tre.recipe_id " "FROM tradeskill_recipe_entries AS tre " "WHERE tre.recipe_id IN (%s) " "AND tre.item_id = %u;", buf2.c_str(), containerId ); results = QueryDatabase(query); if (!results.Success()) { LogError("Error in GetTradeRecipe, re-query: [{}]", query.c_str()); LogError("Error in GetTradeRecipe, error: [{}]", results.ErrorMessage().c_str()); return false; } if (results.RowCount() == 0) { //Recipe contents matched more than 1 recipe, but not in this container LogError("Combine error: Incorrect container is being used!"); return false; } if (results.RowCount() > 1) { //Recipe contents matched more than 1 recipe in this container LogError( "Combine error: Recipe is not unique! [{}] matches found for container [{}]. Continuing with first recipe match", results.RowCount(), containerId ); } } auto row = results.begin(); uint32 recipe_id = (uint32)atoi(row[0]); //Right here we verify that we actually have ALL of the tradeskill components.. //instead of part which is possible with experimentation. //This is here because something's up with the query above.. it needs to be rethought out bool has_components = true; query = StringFormat("SELECT item_id, componentcount " "FROM tradeskill_recipe_entries " "WHERE recipe_id = %i AND componentcount > 0", recipe_id); results = QueryDatabase(query); if (!results.Success()) { return GetTradeRecipe(recipe_id, c_type, some_id, char_id, spec); } if (results.RowCount() == 0) { return GetTradeRecipe(recipe_id, c_type, some_id, char_id, spec); } for (auto row = results.begin(); row != results.end(); ++row) { int component_count = 0; for (int x = EQ::invbag::SLOT_BEGIN; x < EQ::invtype::WORLD_SIZE; x++) { const EQ::ItemInstance* inst = container->GetItem(x); if(!inst) continue; const EQ::ItemData* item = database.GetItem(inst->GetItem()->ID); if (!item) continue; if (item->ID == atoi(row[0])) { component_count++; } LogTradeskills( "[GetTradeRecipe] Component count loop [{}] item [{}] recipe component_count [{}]", component_count, item->ID, atoi(row[1]) ); } if (component_count != atoi(row[1])) { return false; } } return GetTradeRecipe(recipe_id, c_type, some_id, char_id, spec); } bool ZoneDatabase::GetTradeRecipe( uint32 recipe_id, uint8 c_type, uint32 some_id, uint32 char_id, DBTradeskillRecipe_Struct *spec ) { std::string container_where_filter; if (some_id == 0) { // world combiner so no item number container_where_filter = StringFormat("= %u", c_type); } else { // container in inventory container_where_filter = StringFormat("IN (%u,%u)", c_type, some_id); } std::string query = StringFormat( SQL ( SELECT tradeskill_recipe.id, tradeskill_recipe.tradeskill, tradeskill_recipe.skillneeded, tradeskill_recipe.trivial, tradeskill_recipe.nofail, tradeskill_recipe.replace_container, tradeskill_recipe.name, tradeskill_recipe.must_learn, tradeskill_recipe.quest FROM tradeskill_recipe INNER JOIN tradeskill_recipe_entries ON tradeskill_recipe.id = tradeskill_recipe_entries.recipe_id WHERE tradeskill_recipe.id = %lu AND tradeskill_recipe_entries.item_id %s AND tradeskill_recipe.enabled GROUP BY tradeskill_recipe.id ) , (unsigned long) recipe_id, container_where_filter.c_str() ); auto results = QueryDatabase(query); if (!results.Success()) { LogError("Error in GetTradeRecipe, query: [{}]", query.c_str()); LogError("Error in GetTradeRecipe, error: [{}]", results.ErrorMessage().c_str()); return false; } if (results.RowCount() != 1) { return false; } auto row = results.begin(); spec->tradeskill = (EQ::skills::SkillType) atoi(row[1]); spec->skill_needed = (int16) atoi(row[2]); spec->trivial = (uint16) atoi(row[3]); spec->nofail = atoi(row[4]) ? true : false; spec->replace_container = atoi(row[5]) ? true : false; spec->name = row[6]; spec->must_learn = (uint8) atoi(row[7]); spec->quest = atoi(row[8]) ? true : false; spec->has_learnt = false; spec->madecount = 0; spec->recipe_id = recipe_id; auto character_learned_recipe_list = CharacterRecipeListRepository::GetLearnedRecipeList(char_id); auto character_learned_recipe = CharacterRecipeListRepository::GetRecipe( character_learned_recipe_list, recipe_id ); if (character_learned_recipe.made_count > 0) { LogTradeskills("[GetTradeRecipe] made_count [{}]", character_learned_recipe.made_count); spec->has_learnt = true; spec->madecount = (uint32)character_learned_recipe.made_count; } //Pull the on-success items... query = StringFormat("SELECT item_id,successcount FROM tradeskill_recipe_entries " "WHERE successcount > 0 AND recipe_id = %u", recipe_id); results = QueryDatabase(query); if (!results.Success()) { return false; } if(results.RowCount() < 1 && !spec->quest) { LogError("Error in GetTradeRecept success: no success items returned"); return false; } spec->onsuccess.clear(); for(auto row = results.begin(); row != results.end(); ++row) { uint32 item = (uint32)atoi(row[0]); uint8 num = (uint8) atoi(row[1]); spec->onsuccess.push_back(std::pair<uint32,uint8>(item, num)); } spec->onfail.clear(); //Pull the on-fail items... query = StringFormat( "SELECT item_id, failcount FROM tradeskill_recipe_entries " "WHERE failcount > 0 AND recipe_id = %u", recipe_id ); results = QueryDatabase(query); if (results.Success()) { for (auto row = results.begin(); row != results.end(); ++row) { uint32 item = (uint32) atoi(row[0]); uint8 num = (uint8) atoi(row[1]); spec->onfail.push_back(std::pair<uint32, uint8>(item, num)); } } spec->salvage.clear(); // Don't bother with the query if TS is nofail if (spec->nofail) { return true; } // Pull the salvage list query = StringFormat( "SELECT item_id, salvagecount " "FROM tradeskill_recipe_entries " "WHERE salvagecount > 0 AND recipe_id = %u", recipe_id ); results = QueryDatabase(query); if (results.Success()) { for (auto row = results.begin(); row != results.begin(); ++row) { uint32 item = (uint32) atoi(row[0]); uint8 num = (uint8) atoi(row[1]); spec->salvage.push_back(std::pair<uint32, uint8>(item, num)); } } return true; } void ZoneDatabase::UpdateRecipeMadecount(uint32 recipe_id, uint32 char_id, uint32 madeCount) { std::string query = StringFormat("INSERT INTO char_recipe_list " "SET recipe_id = %u, char_id = %u, madecount = %u " "ON DUPLICATE KEY UPDATE madecount = %u;", recipe_id, char_id, madeCount, madeCount); QueryDatabase(query); } void Client::LearnRecipe(uint32 recipe_id) { std::string query = fmt::format( SQL( select char_id, recipe_id, madecount from char_recipe_list where char_id = {} and recipe_id = {} LIMIT 1 ), CharacterID(), recipe_id ); auto results = database.QueryDatabase(query); if (!results.Success()) { return; } auto tradeskill_recipe = TradeskillRecipeRepository::FindOne(content_db, recipe_id); if (tradeskill_recipe.id == 0) { LogError("Invalid recipe [{}]", recipe_id); return; } LogTradeskills( "[LearnRecipe] recipe_id [{}] name [{}] learned [{}]", recipe_id, tradeskill_recipe.name, results.RowCount() ); auto row = results.begin(); if (results.RowCount() > 0) { return; } MessageString(Chat::LightBlue, TRADESKILL_LEARN_RECIPE, tradeskill_recipe.name.c_str()); database.QueryDatabase( fmt::format( "REPLACE INTO char_recipe_list (recipe_id, char_id, madecount) VALUES ({}, {}, 0)", recipe_id, CharacterID() ) ); } bool Client::CanIncreaseTradeskill(EQ::skills::SkillType tradeskill) { uint32 rawskill = GetRawSkill(tradeskill); uint16 maxskill = MaxSkill(tradeskill); if (rawskill >= maxskill) //Max skill sanity check return false; uint8 Baking = (GetRawSkill(EQ::skills::SkillBaking) > 200) ? 1 : 0; uint8 Smithing = (GetRawSkill(EQ::skills::SkillBlacksmithing) > 200) ? 1 : 0; uint8 Brewing = (GetRawSkill(EQ::skills::SkillBrewing) > 200) ? 1 : 0; uint8 Fletching = (GetRawSkill(EQ::skills::SkillFletching) > 200) ? 1 : 0; uint8 Jewelry = (GetRawSkill(EQ::skills::SkillJewelryMaking) > 200) ? 1 : 0; uint8 Pottery = (GetRawSkill(EQ::skills::SkillPottery) > 200) ? 1 : 0; uint8 Tailoring = (GetRawSkill(EQ::skills::SkillTailoring) > 200) ? 1 : 0; uint8 SkillTotal = Baking + Smithing + Brewing + Fletching + Jewelry + Pottery + Tailoring; //Tradeskills above 200 //New Tanaan AA: Each level allows an additional tradeskill above 200 (first one is free) uint8 aaLevel = spellbonuses.TradeSkillMastery + itembonuses.TradeSkillMastery + aabonuses.TradeSkillMastery; switch (tradeskill) { case EQ::skills::SkillBaking: case EQ::skills::SkillBlacksmithing: case EQ::skills::SkillBrewing: case EQ::skills::SkillFletching: case EQ::skills::SkillJewelryMaking: case EQ::skills::SkillPottery: case EQ::skills::SkillTailoring: if (aaLevel == 6) break; //Maxed AA if (SkillTotal == 0) break; //First tradeskill freebie if ((SkillTotal == (aaLevel + 1)) && (rawskill > 200)) break; //One of the tradeskills already allowed to go over 200 if ((SkillTotal >= (aaLevel + 1)) && (rawskill >= 200)) return false; //One or more tradeskills already at or beyond limit break; default: break; //Other skills unchecked and ability to increase assumed true } return true; } bool ZoneDatabase::EnableRecipe(uint32 recipe_id) { std::string query = StringFormat("UPDATE tradeskill_recipe SET enabled = 1 " "WHERE id = %u;", recipe_id); auto results = QueryDatabase(query); if (!results.Success()) return results.RowsAffected() > 0; return false; } bool ZoneDatabase::DisableRecipe(uint32 recipe_id) { std::string query = StringFormat("UPDATE tradeskill_recipe SET enabled = 0 " "WHERE id = %u;", recipe_id); auto results = QueryDatabase(query); if (!results.Success()) return results.RowsAffected() > 0; return false; }
1
10,826
Doing a select query inside of a loop like this is not ideal. We should fetch recipes once and then loop through it in memory to perform this check
EQEmu-Server
cpp
@@ -386,6 +386,8 @@ class Index(IndexOpsMixin): array([0, 1, 2, 3]) >>> ks.DataFrame({'a': ['a', 'b', 'c']}, index=[[1, 2, 3], [4, 5, 6]]).index.to_numpy() array([(1, 4), (2, 5), (3, 6)], dtype=object) + >>> ks.DataFrame({'a': ['a', 'b', 'c']}, index=[[1, 2, 3], [4, 5, 6]]).index.to_numpy(copy=True) + array([(1, 4), (2, 5), (3, 6)], dtype=object) """ result = np.asarray(self.to_pandas()._values, dtype=dtype) if copy:
1
# # Copyright (C) 2019 Databricks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Wrappers for Indexes to behave similar to pandas Index, MultiIndex. """ from collections import OrderedDict from distutils.version import LooseVersion from functools import partial from typing import Any, List, Optional, Tuple, Union import warnings import pandas as pd import numpy as np from pandas.api.types import ( is_list_like, is_interval_dtype, is_bool_dtype, is_categorical_dtype, is_integer_dtype, is_float_dtype, is_numeric_dtype, is_object_dtype, ) from pandas.io.formats.printing import pprint_thing import pyspark from pyspark import sql as spark from pyspark.sql import functions as F, Window from pyspark.sql.types import BooleanType, NumericType, StringType, TimestampType from databricks import koalas as ks # For running doctests and reference resolution in PyCharm. from databricks.koalas.config import get_option, option_context from databricks.koalas.exceptions import PandasNotImplementedError from databricks.koalas.base import IndexOpsMixin from databricks.koalas.frame import DataFrame from databricks.koalas.missing.indexes import _MissingPandasLikeIndex, _MissingPandasLikeMultiIndex from databricks.koalas.series import Series, _col from databricks.koalas.utils import ( compare_allow_null, compare_disallow_null, compare_null_first, compare_null_last, default_session, name_like_string, scol_for, verify_temp_column_name, validate_bool_kwarg, ) from databricks.koalas.internal import _InternalFrame, NATURAL_ORDER_COLUMN_NAME class Index(IndexOpsMixin): """ Koalas Index that corresponds to Pandas Index logically. This might hold Spark Column internally. :ivar _kdf: The parent dataframe :type _kdf: DataFrame :ivar _scol: Spark Column instance :type _scol: pyspark.Column Parameters ---------- data : DataFrame or list Index can be created by DataFrame or list dtype : dtype, default None Data type to force. Only a single dtype is allowed. If None, infer name : name of index, hashable See Also -------- MultiIndex : A multi-level, or hierarchical, Index. Examples -------- >>> ks.DataFrame({'a': ['a', 'b', 'c']}, index=[1, 2, 3]).index Int64Index([1, 2, 3], dtype='int64') >>> ks.DataFrame({'a': [1, 2, 3]}, index=list('abc')).index Index(['a', 'b', 'c'], dtype='object') >>> Index([1, 2, 3]) Int64Index([1, 2, 3], dtype='int64') >>> Index(list('abc')) Index(['a', 'b', 'c'], dtype='object') """ def __init__(self, data: Union[DataFrame, list], dtype=None, name=None) -> None: if isinstance(data, DataFrame): assert dtype is None assert name is None kdf = data else: kdf = DataFrame(index=pd.Index(data=data, dtype=dtype, name=name)) internal = kdf._internal.copy( spark_column=kdf._internal.index_spark_columns[0], column_labels=kdf._internal.index_names, column_label_names=None, ) IndexOpsMixin.__init__(self, internal, kdf) def _with_new_scol(self, scol: spark.Column) -> "Index": """ Copy Koalas Index with the new Spark Column. :param scol: the new Spark Column :return: the copied Index """ sdf = self._internal.spark_frame.select(scol) internal = _InternalFrame( spark_frame=sdf, index_map=OrderedDict(zip(sdf.columns, self._internal.index_names)) ) return DataFrame(internal).index # This method is used via `DataFrame.info` API internally. def _summary(self, name=None): """ Return a summarized representation. Parameters ---------- name : str name to use in the summary representation Returns ------- String with a summarized representation of the index """ head, tail, total_count = self._kdf._sdf.select( F.first(self._scol), F.last(self._scol), F.count(F.expr("*")) ).first() if total_count > 0: index_summary = ", %s to %s" % (pprint_thing(head), pprint_thing(tail)) else: index_summary = "" if name is None: name = type(self).__name__ return "%s: %s entries%s" % (name, total_count, index_summary) @property def size(self) -> int: """ Return an int representing the number of elements in this object. Examples -------- >>> df = ks.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], ... columns=['dogs', 'cats'], ... index=list('abcd')) >>> df.index.size 4 >>> df.set_index('dogs', append=True).index.size 4 """ return len(self._kdf) # type: ignore @property def shape(self) -> tuple: """ Return a tuple of the shape of the underlying data. Examples -------- >>> idx = ks.Index(['a', 'b', 'c']) >>> idx Index(['a', 'b', 'c'], dtype='object') >>> idx.shape (3,) >>> midx = ks.MultiIndex.from_tuples([('a', 'x'), ('b', 'y'), ('c', 'z')]) >>> midx # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y'), ('c', 'z')], ) >>> midx.shape (3,) """ return (len(self._kdf),) def identical(self, other): """ Similar to equals, but check that other comparable attributes are also equal. Returns ------- bool If two Index objects have equal elements and same type True, otherwise False. Examples -------- >>> from databricks.koalas.config import option_context >>> idx = ks.Index(['a', 'b', 'c']) >>> midx = ks.MultiIndex.from_tuples([('a', 'x'), ('b', 'y'), ('c', 'z')]) For Index >>> idx.identical(idx) True >>> with option_context('compute.ops_on_diff_frames', True): ... idx.identical(ks.Index(['a', 'b', 'c'])) True >>> with option_context('compute.ops_on_diff_frames', True): ... idx.identical(ks.Index(['b', 'b', 'a'])) False >>> idx.identical(midx) False For MultiIndex >>> midx.identical(midx) True >>> with option_context('compute.ops_on_diff_frames', True): ... midx.identical(ks.MultiIndex.from_tuples([('a', 'x'), ('b', 'y'), ('c', 'z')])) True >>> with option_context('compute.ops_on_diff_frames', True): ... midx.identical(ks.MultiIndex.from_tuples([('c', 'z'), ('b', 'y'), ('a', 'x')])) False >>> midx.identical(idx) False """ self_name = self.names if isinstance(self, MultiIndex) else self.name other_name = other.names if isinstance(other, MultiIndex) else other.name return (self is other) or ( type(self) == type(other) and self_name == other_name # to support non-index comparison by short-circuiting. and self.equals(other) ) def equals(self, other): """ Determine if two Index objects contain the same elements. Returns ------- bool True if "other" is an Index and it has the same elements as calling index; False otherwise. Examples -------- >>> from databricks.koalas.config import option_context >>> idx = ks.Index(['a', 'b', 'c']) >>> idx.name = "name" >>> midx = ks.MultiIndex.from_tuples([('a', 'x'), ('b', 'y'), ('c', 'z')]) >>> midx.names = ("nameA", "nameB") For Index >>> idx.equals(idx) True >>> with option_context('compute.ops_on_diff_frames', True): ... idx.equals(ks.Index(['a', 'b', 'c'])) True >>> with option_context('compute.ops_on_diff_frames', True): ... idx.equals(ks.Index(['b', 'b', 'a'])) False >>> idx.equals(midx) False For MultiIndex >>> midx.equals(midx) True >>> with option_context('compute.ops_on_diff_frames', True): ... midx.equals(ks.MultiIndex.from_tuples([('a', 'x'), ('b', 'y'), ('c', 'z')])) True >>> with option_context('compute.ops_on_diff_frames', True): ... midx.equals(ks.MultiIndex.from_tuples([('c', 'z'), ('b', 'y'), ('a', 'x')])) False >>> midx.equals(idx) False """ # TODO: avoid using default index? with option_context("compute.default_index_type", "distributed-sequence"): # Directly using Series from both self and other seems causing # some exceptions when 'compute.ops_on_diff_frames' is enabled. # Working around for now via using frame. return (self is other) or ( type(self) == type(other) and ( self.to_series().rename("self").to_frame().reset_index()["self"] == other.to_series().rename("other").to_frame().reset_index()["other"] ).all() ) def transpose(self): """ Return the transpose, For index, It will be index itself. Examples -------- >>> idx = ks.Index(['a', 'b', 'c']) >>> idx Index(['a', 'b', 'c'], dtype='object') >>> idx.transpose() Index(['a', 'b', 'c'], dtype='object') For MultiIndex >>> midx = ks.MultiIndex.from_tuples([('a', 'x'), ('b', 'y'), ('c', 'z')]) >>> midx # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y'), ('c', 'z')], ) >>> midx.transpose() # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y'), ('c', 'z')], ) """ return self T = property(transpose) def to_pandas(self) -> pd.Index: """ Return a pandas Index. .. note:: This method should only be used if the resulting Pandas object is expected to be small, as all the data is loaded into the driver's memory. Examples -------- >>> df = ks.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], ... columns=['dogs', 'cats'], ... index=list('abcd')) >>> df['dogs'].index.to_pandas() Index(['a', 'b', 'c', 'd'], dtype='object') """ return self._internal.to_pandas_frame.index toPandas = to_pandas def to_numpy(self, dtype=None, copy=False): """ A NumPy ndarray representing the values in this Index or MultiIndex. .. note:: This method should only be used if the resulting NumPy ndarray is expected to be small, as all the data is loaded into the driver's memory. Parameters ---------- dtype : str or numpy.dtype, optional The dtype to pass to :meth:`numpy.asarray` copy : bool, default False Whether to ensure that the returned value is a not a view on another array. Note that ``copy=False`` does not *ensure* that ``to_numpy()`` is no-copy. Rather, ``copy=True`` ensure that a copy is made, even if not strictly necessary. Returns ------- numpy.ndarray Examples -------- >>> ks.Series([1, 2, 3, 4]).index.to_numpy() array([0, 1, 2, 3]) >>> ks.DataFrame({'a': ['a', 'b', 'c']}, index=[[1, 2, 3], [4, 5, 6]]).index.to_numpy() array([(1, 4), (2, 5), (3, 6)], dtype=object) """ result = np.asarray(self.to_pandas()._values, dtype=dtype) if copy: result = result.copy() return result @property def values(self): """ Return an array representing the data in the Index. .. warning:: We recommend using `Index.to_numpy()` instead. .. note:: This method should only be used if the resulting NumPy ndarray is expected to be small, as all the data is loaded into the driver's memory. Returns ------- numpy.ndarray Examples -------- >>> ks.Series([1, 2, 3, 4]).index.values array([0, 1, 2, 3]) >>> ks.DataFrame({'a': ['a', 'b', 'c']}, index=[[1, 2, 3], [4, 5, 6]]).index.values array([(1, 4), (2, 5), (3, 6)], dtype=object) """ warnings.warn("We recommend using `{}.to_numpy()` instead.".format(type(self).__name__)) return self.to_numpy() @property def spark_type(self): """ Returns the data type as defined by Spark, as a Spark DataType object.""" return self.to_series().spark_type @property def has_duplicates(self) -> bool: """ If index has duplicates, return True, otherwise False. Examples -------- >>> kdf = ks.DataFrame({'a': [1, 2, 3]}, index=list('aac')) >>> kdf.index.has_duplicates True >>> kdf = ks.DataFrame({'a': [1, 2, 3]}, index=[list('abc'), list('def')]) >>> kdf.index.has_duplicates False >>> kdf = ks.DataFrame({'a': [1, 2, 3]}, index=[list('aac'), list('eef')]) >>> kdf.index.has_duplicates True """ df = self._kdf._sdf.select(self._scol) col = df.columns[0] return df.select(F.count(col) != F.countDistinct(col)).first()[0] @property def name(self) -> Union[str, Tuple[str, ...]]: """Return name of the Index.""" return self.names[0] @name.setter def name(self, name: Union[str, Tuple[str, ...]]) -> None: self.names = [name] @property def names(self) -> List[Union[str, Tuple[str, ...]]]: """Return names of the Index.""" return [ name if name is None or len(name) > 1 else name[0] for name in self._internal.index_names ] @names.setter def names(self, names: List[Union[str, Tuple[str, ...]]]) -> None: if not is_list_like(names): raise ValueError("Names must be a list-like") self.rename(names, inplace=True) @property def nlevels(self) -> int: """ Number of levels in Index & MultiIndex. Examples -------- >>> kdf = ks.DataFrame({"a": [1, 2, 3]}, index=pd.Index(['a', 'b', 'c'], name="idx")) >>> kdf.index.nlevels 1 >>> kdf = ks.DataFrame({'a': [1, 2, 3]}, index=[list('abc'), list('def')]) >>> kdf.index.nlevels 2 """ return len(self._kdf._internal.index_spark_column_names) def rename( self, name: Union[str, Tuple[str, ...], List[Union[str, Tuple[str, ...]]]], inplace: bool = False, ): """ Alter Index or MultiIndex name. Able to set new names without level. Defaults to returning new index. Parameters ---------- name : label or list of labels Name(s) to set. inplace : boolean, default False Modifies the object directly, instead of creating a new Index or MultiIndex. Returns ------- Index or MultiIndex The same type as the caller or None if inplace is True. Examples -------- >>> df = ks.DataFrame({'a': ['A', 'C'], 'b': ['A', 'B']}, columns=['a', 'b']) >>> df.index.rename("c") Int64Index([0, 1], dtype='int64', name='c') >>> df.set_index("a", inplace=True) >>> df.index.rename("d") Index(['A', 'C'], dtype='object', name='d') You can also change the index name in place. >>> df.index.rename("e", inplace=True) >>> df.index Index(['A', 'C'], dtype='object', name='e') >>> df # doctest: +NORMALIZE_WHITESPACE b e A A C B Support for MultiIndex >>> kidx = ks.MultiIndex.from_tuples([('a', 'x'), ('b', 'y')]) >>> kidx.names = ['hello', 'koalas'] >>> kidx # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y')], names=['hello', 'koalas']) >>> kidx.rename(['aloha', 'databricks']) # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y')], names=['aloha', 'databricks']) """ names = self._verify_for_rename(name) if inplace: kdf = self._kdf else: kdf = self._kdf.copy() kdf._internal = kdf._internal.copy( index_map=OrderedDict(zip(kdf._internal.index_spark_column_names, names)) ) idx = kdf.index idx._internal._scol = self._scol if inplace: self._internal = idx._internal else: return idx def _verify_for_rename(self, name): if name is None or isinstance(name, tuple): return [name] elif isinstance(name, str): return [(name,)] elif is_list_like(name): if len(self._internal.index_map) != len(name): raise ValueError( "Length of new names must be {}, got {}".format( len(self._internal.index_map), len(name) ) ) return [n if n is None or isinstance(n, tuple) else (n,) for n in name] else: raise TypeError("name must be a hashable type") # TODO: add downcast parameter for fillna function def fillna(self, value): """ Fill NA/NaN values with the specified value. Parameters ---------- value : scalar Scalar value to use to fill holes (e.g. 0). This value cannot be a list-likes. Returns ------- Index : filled with value Examples -------- >>> ki = ks.DataFrame({'a': ['a', 'b', 'c']}, index=[1, 2, None]).index >>> ki Float64Index([1.0, 2.0, nan], dtype='float64') >>> ki.fillna(0) Float64Index([1.0, 2.0, 0.0], dtype='float64') """ if not isinstance(value, (float, int, str, bool)): raise TypeError("Unsupported type %s" % type(value)) sdf = self._internal.spark_frame.fillna(value) result = DataFrame(self._kdf._internal.with_new_sdf(sdf)).index return result # TODO: ADD keep parameter def drop_duplicates(self): """ Return Index with duplicate values removed. Returns ------- deduplicated : Index See Also -------- Series.drop_duplicates : Equivalent method on Series. DataFrame.drop_duplicates : Equivalent method on DataFrame. Examples -------- Generate an pandas.Index with duplicate values. >>> idx = ks.Index(['lama', 'cow', 'lama', 'beetle', 'lama', 'hippo']) >>> idx.drop_duplicates().sort_values() Index(['beetle', 'cow', 'hippo', 'lama'], dtype='object') """ sdf = self._internal.spark_frame.select( self._internal.index_spark_columns ).drop_duplicates() internal = _InternalFrame(spark_frame=sdf, index_map=self._kdf._internal.index_map) result = DataFrame(internal).index return result def to_series(self, name: Union[str, Tuple[str, ...]] = None) -> Series: """ Create a Series with both index and values equal to the index keys useful with map for returning an indexer based on an index. Parameters ---------- name : string, optional name of resulting Series. If None, defaults to name of original index Returns ------- Series : dtype will be based on the type of the Index values. Examples -------- >>> df = ks.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], ... columns=['dogs', 'cats'], ... index=list('abcd')) >>> df['dogs'].index.to_series() a a b b c c d d Name: 0, dtype: object """ kdf = self._kdf scol = self._scol if name is not None: scol = scol.alias(name_like_string(name)) column_labels = [None] if len(kdf._internal.index_map) > 1 else kdf._internal.index_names return Series( kdf._internal.copy( spark_column=scol, column_labels=column_labels, column_label_names=None ), anchor=kdf, ) def to_frame(self, index=True, name=None) -> DataFrame: """ Create a DataFrame with a column containing the Index. Parameters ---------- index : boolean, default True Set the index of the returned DataFrame as the original Index. name : object, default None The passed name should substitute for the index name (if it has one). Returns ------- DataFrame DataFrame containing the original Index data. See Also -------- Index.to_series : Convert an Index to a Series. Series.to_frame : Convert Series to DataFrame. Examples -------- >>> idx = ks.Index(['Ant', 'Bear', 'Cow'], name='animal') >>> idx.to_frame() # doctest: +NORMALIZE_WHITESPACE animal animal Ant Ant Bear Bear Cow Cow By default, the original Index is reused. To enforce a new Index: >>> idx.to_frame(index=False) animal 0 Ant 1 Bear 2 Cow To override the name of the resulting column, specify `name`: >>> idx.to_frame(name='zoo') # doctest: +NORMALIZE_WHITESPACE zoo animal Ant Ant Bear Bear Cow Cow """ if name is None: if self._internal.index_names[0] is None: name = ("0",) else: name = self._internal.index_names[0] elif isinstance(name, str): name = (name,) scol = self._scol.alias(name_like_string(name)) sdf = self._internal.spark_frame.select(scol, NATURAL_ORDER_COLUMN_NAME) if index: index_map = OrderedDict({name_like_string(name): self._internal.index_names[0]}) else: index_map = None # type: ignore internal = _InternalFrame( spark_frame=sdf, index_map=index_map, column_labels=[name], data_spark_columns=[scol_for(sdf, name_like_string(name))], ) return DataFrame(internal) def is_boolean(self): """ Return if the current index type is a boolean type. Examples -------- >>> ks.DataFrame({'a': [1]}, index=[True]).index.is_boolean() True """ return is_bool_dtype(self.dtype) def is_categorical(self): """ Return if the current index type is a categorical type. Examples -------- >>> ks.DataFrame({'a': [1]}, index=[1]).index.is_categorical() False """ return is_categorical_dtype(self.dtype) def is_floating(self): """ Return if the current index type is a floating type. Examples -------- >>> ks.DataFrame({'a': [1]}, index=[1]).index.is_floating() False """ return is_float_dtype(self.dtype) def is_integer(self): """ Return if the current index type is a integer type. Examples -------- >>> ks.DataFrame({'a': [1]}, index=[1]).index.is_integer() True """ return is_integer_dtype(self.dtype) def is_interval(self): """ Return if the current index type is an interval type. Examples -------- >>> ks.DataFrame({'a': [1]}, index=[1]).index.is_interval() False """ return is_interval_dtype(self.dtype) def is_numeric(self): """ Return if the current index type is a numeric type. Examples -------- >>> ks.DataFrame({'a': [1]}, index=[1]).index.is_numeric() True """ return is_numeric_dtype(self.dtype) def is_object(self): """ Return if the current index type is a object type. Examples -------- >>> ks.DataFrame({'a': [1]}, index=["a"]).index.is_object() True """ return is_object_dtype(self.dtype) def dropna(self): """ Return Index or MultiIndex without NA/NaN values Examples -------- >>> df = ks.DataFrame([[1, 2], [4, 5], [7, 8]], ... index=['cobra', 'viper', None], ... columns=['max_speed', 'shield']) >>> df max_speed shield cobra 1 2 viper 4 5 NaN 7 8 >>> df.index.dropna() Index(['cobra', 'viper'], dtype='object') Also support for MultiIndex >>> midx = pd.MultiIndex([['lama', 'cow', 'falcon'], ... [None, 'weight', 'length']], ... [[0, 1, 1, 1, 1, 1, 2, 2, 2], ... [0, 1, 1, 0, 1, 2, 1, 1, 2]]) >>> s = ks.Series([45, 200, 1.2, 30, 250, 1.5, 320, 1, None], ... index=midx) >>> s lama NaN 45.0 cow weight 200.0 weight 1.2 NaN 30.0 weight 250.0 length 1.5 falcon weight 320.0 weight 1.0 length NaN Name: 0, dtype: float64 >>> s.index.dropna() # doctest: +SKIP MultiIndex([( 'cow', 'weight'), ( 'cow', 'weight'), ( 'cow', 'weight'), ( 'cow', 'length'), ('falcon', 'weight'), ('falcon', 'weight'), ('falcon', 'length')], ) """ kdf = self._kdf.copy() sdf = kdf._internal.spark_frame.select(self._internal.index_spark_columns).dropna() internal = _InternalFrame(spark_frame=sdf, index_map=self._internal.index_map) return DataFrame(internal).index def unique(self, level=None): """ Return unique values in the index. Be aware the order of unique values might be different than pandas.Index.unique :param level: int or str, optional, default is None :return: Index without deuplicates Examples -------- >>> ks.DataFrame({'a': ['a', 'b', 'c']}, index=[1, 1, 3]).index.unique().sort_values() Int64Index([1, 3], dtype='int64') >>> ks.DataFrame({'a': ['a', 'b', 'c']}, index=['d', 'e', 'e']).index.unique().sort_values() Index(['d', 'e'], dtype='object') MultiIndex >>> ks.MultiIndex.from_tuples([("A", "X"), ("A", "Y"), ("A", "X")]).unique() ... # doctest: +SKIP MultiIndex([('A', 'X'), ('A', 'Y')], ) """ if level is not None: self._validate_index_level(level) scols = self._internal.index_spark_columns scol_names = self._internal.index_spark_column_names scols = [scol.alias(scol_name) for scol, scol_name in zip(scols, scol_names)] sdf = self._kdf._sdf.select(scols).distinct() return DataFrame( _InternalFrame(spark_frame=sdf, index_map=self._kdf._internal.index_map) ).index # TODO: add error parameter def drop(self, labels): """ Make new Index with passed list of labels deleted. Parameters ---------- labels : array-like Returns ------- dropped : Index Examples -------- >>> index = ks.Index([1, 2, 3]) >>> index Int64Index([1, 2, 3], dtype='int64') >>> index.drop([1]) Int64Index([2, 3], dtype='int64') """ if not isinstance(labels, (tuple, list)): labels = [labels] sdf = self._internal.spark_frame[~self._internal.index_spark_columns[0].isin(labels)] return Index( DataFrame(_InternalFrame(spark_frame=sdf, index_map=self._kdf._internal.index_map)) ) def _validate_index_level(self, level): """ Validate index level. For single-level Index getting level number is a no-op, but some verification must be done like in MultiIndex. """ if isinstance(level, int): if level < 0 and level != -1: raise IndexError( "Too many levels: Index has only 1 level," " %d is not a valid level number" % (level,) ) elif level > 0: raise IndexError("Too many levels:" " Index has only 1 level, not %d" % (level + 1)) elif level != self.name: raise KeyError( "Requested level ({}) does not match index name ({})".format(level, self.name) ) def copy(self, name=None): """ Make a copy of this object. name sets those attributes on the new object. Parameters ---------- name : string, optional to set name of index Examples -------- >>> df = ks.DataFrame([[1, 2], [4, 5], [7, 8]], ... index=['cobra', 'viper', 'sidewinder'], ... columns=['max_speed', 'shield']) >>> df max_speed shield cobra 1 2 viper 4 5 sidewinder 7 8 >>> df.index Index(['cobra', 'viper', 'sidewinder'], dtype='object') Copy index >>> df.index.copy() Index(['cobra', 'viper', 'sidewinder'], dtype='object') Copy index with name >>> df.index.copy(name='snake') Index(['cobra', 'viper', 'sidewinder'], dtype='object', name='snake') """ result = Index(self._kdf.copy()) if name: result.name = name return result def droplevel(self, level): """ Return index with requested level(s) removed. If resulting index has only 1 level left, the result will be of Index type, not MultiIndex. Parameters ---------- level : int, str, tuple, or list-like, default 0 If a string is given, must be the name of a level If list-like, elements must be names or indexes of levels. Returns ------- Index or MultiIndex Examples -------- >>> midx = ks.DataFrame({'a': ['a', 'b']}, index=[['a', 'x'], ['b', 'y'], [1, 2]]).index >>> midx # doctest: +SKIP MultiIndex([('a', 'b', 1), ('x', 'y', 2)], ) >>> midx.droplevel([0, 1]) # doctest: +SKIP Int64Index([1, 2], dtype='int64') >>> midx.droplevel(0) # doctest: +SKIP MultiIndex([('b', 1), ('y', 2)], ) >>> midx.names = [("a", "b"), "b", "c"] >>> midx.droplevel([('a', 'b')]) # doctest: +SKIP MultiIndex([('b', 1), ('y', 2)], names=['b', 'c']) """ names = self.names nlevels = self.nlevels if not isinstance(level, (tuple, list)): level = [level] for n in level: if isinstance(n, int) and (n > nlevels - 1): raise IndexError( "Too many levels: Index has only {} levels, not {}".format(nlevels, n + 1) ) if isinstance(n, (str, tuple)) and (n not in names): raise KeyError("Level {} not found".format(n)) if len(level) >= nlevels: raise ValueError( "Cannot remove {} levels from an index with {} " "levels: at least one level must be " "left.".format(len(level), nlevels) ) int_level = [n if isinstance(n, int) else names.index(n) for n in level] index_map = list(self._internal.index_map.items()) index_map = OrderedDict(index_map[c] for c in range(0, nlevels) if c not in int_level) sdf = self._internal.spark_frame sdf = sdf.select(*index_map.keys()) result = _InternalFrame(spark_frame=sdf, index_map=index_map) return DataFrame(result).index def symmetric_difference(self, other, result_name=None, sort=None): """ Compute the symmetric difference of two Index objects. Parameters ---------- other : Index or array-like result_name : str sort : True or None, default None Whether to sort the resulting index. * True : Attempt to sort the result. * None : Do not sort the result. Returns ------- symmetric_difference : Index Notes ----- ``symmetric_difference`` contains elements that appear in either ``idx1`` or ``idx2`` but not both. Equivalent to the Index created by ``idx1.difference(idx2) | idx2.difference(idx1)`` with duplicates dropped. Examples -------- >>> s1 = ks.Series([1, 2, 3, 4], index=[1, 2, 3, 4]) >>> s2 = ks.Series([1, 2, 3, 4], index=[2, 3, 4, 5]) >>> s1.index.symmetric_difference(s2.index) Int64Index([5, 1], dtype='int64') You can set name of result Index. >>> s1.index.symmetric_difference(s2.index, result_name='koalas') Int64Index([5, 1], dtype='int64', name='koalas') You can set sort to `True`, if you want to sort the resulting index. >>> s1.index.symmetric_difference(s2.index, sort=True) Int64Index([1, 5], dtype='int64') You can also use the ``^`` operator: >>> s1.index ^ s2.index Int64Index([5, 1], dtype='int64') """ if type(self) != type(other): raise NotImplementedError( "Doesn't support symmetric_difference between Index & MultiIndex for now" ) sdf_self = self._kdf._sdf.select(self._internal.index_spark_columns) sdf_other = other._kdf._sdf.select(other._internal.index_spark_columns) sdf_symdiff = sdf_self.union(sdf_other).subtract(sdf_self.intersect(sdf_other)) if sort: sdf_symdiff = sdf_symdiff.sort(self._internal.index_spark_columns) internal = _InternalFrame(spark_frame=sdf_symdiff, index_map=self._internal.index_map) result = Index(DataFrame(internal)) if result_name: result.name = result_name return result # TODO: return_indexer def sort_values(self, ascending=True): """ Return a sorted copy of the index. .. note:: This method is not supported for pandas when index has NaN value. pandas raises unexpected TypeError, but we support treating NaN as the smallest value. Parameters ---------- ascending : bool, default True Should the index values be sorted in an ascending order. Returns ------- sorted_index : ks.Index or ks.MultiIndex Sorted copy of the index. See Also -------- Series.sort_values : Sort values of a Series. DataFrame.sort_values : Sort values in a DataFrame. Examples -------- >>> idx = ks.Index([10, 100, 1, 1000]) >>> idx Int64Index([10, 100, 1, 1000], dtype='int64') Sort values in ascending order (default behavior). >>> idx.sort_values() Int64Index([1, 10, 100, 1000], dtype='int64') Sort values in descending order. >>> idx.sort_values(ascending=False) Int64Index([1000, 100, 10, 1], dtype='int64') Support for MultiIndex. >>> kidx = ks.MultiIndex.from_tuples([('a', 'x', 1), ('c', 'y', 2), ('b', 'z', 3)]) >>> kidx # doctest: +SKIP MultiIndex([('a', 'x', 1), ('c', 'y', 2), ('b', 'z', 3)], ) >>> kidx.sort_values() # doctest: +SKIP MultiIndex([('a', 'x', 1), ('b', 'z', 3), ('c', 'y', 2)], ) >>> kidx.sort_values(ascending=False) # doctest: +SKIP MultiIndex([('c', 'y', 2), ('b', 'z', 3), ('a', 'x', 1)], ) """ sdf = self._internal.spark_frame sdf = sdf.orderBy(self._internal.index_spark_columns, ascending=ascending) internal = _InternalFrame( spark_frame=sdf.select(self._internal.index_spark_columns), index_map=self._internal.index_map, ) return DataFrame(internal).index def sort(self, *args, **kwargs): """ Use sort_values instead. """ raise TypeError("cannot sort an Index object in-place, use sort_values instead") def min(self): """ Return the minimum value of the Index. Returns ------- scalar Minimum value. See Also -------- Index.max : Return the maximum value of the object. Series.min : Return the minimum value in a Series. DataFrame.min : Return the minimum values in a DataFrame. Examples -------- >>> idx = ks.Index([3, 2, 1]) >>> idx.min() 1 >>> idx = ks.Index(['c', 'b', 'a']) >>> idx.min() 'a' For a MultiIndex, the maximum is determined lexicographically. >>> idx = ks.MultiIndex.from_tuples([('a', 'x', 1), ('b', 'y', 2)]) >>> idx.min() ('a', 'x', 1) """ sdf = self._internal.spark_frame min_row = sdf.select(F.min(F.struct(self._internal.index_spark_columns))).head() result = tuple(min_row[0]) return result if len(result) > 1 else result[0] def max(self): """ Return the maximum value of the Index. Returns ------- scalar Maximum value. See Also -------- Index.min : Return the minimum value in an Index. Series.max : Return the maximum value in a Series. DataFrame.max : Return the maximum values in a DataFrame. Examples -------- >>> idx = pd.Index([3, 2, 1]) >>> idx.max() 3 >>> idx = pd.Index(['c', 'b', 'a']) >>> idx.max() 'c' For a MultiIndex, the maximum is determined lexicographically. >>> idx = ks.MultiIndex.from_tuples([('a', 'x', 1), ('b', 'y', 2)]) >>> idx.max() ('b', 'y', 2) """ sdf = self._internal.spark_frame max_row = sdf.select(F.max(F.struct(self._internal.index_spark_columns))).head() result = tuple(max_row[0]) return result if len(result) > 1 else result[0] def append(self, other): """ Append a collection of Index options together. Parameters ---------- other : Index Returns ------- appended : Index Examples -------- >>> kidx = ks.Index([10, 5, 0, 5, 10, 5, 0, 10]) >>> kidx Int64Index([10, 5, 0, 5, 10, 5, 0, 10], dtype='int64') >>> kidx.append(kidx) Int64Index([10, 5, 0, 5, 10, 5, 0, 10, 10, 5, 0, 5, 10, 5, 0, 10], dtype='int64') Support for MiltiIndex >>> kidx = ks.MultiIndex.from_tuples([('a', 'x'), ('b', 'y')]) >>> kidx # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y')], ) >>> kidx.append(kidx) # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y'), ('a', 'x'), ('b', 'y')], ) """ if type(self) is not type(other): raise NotImplementedError( "append() between Index & MultiIndex currently is not supported" ) sdf_self = self._internal.spark_frame.select(self._internal.index_spark_columns) sdf_other = other._internal.spark_frame.select(other._internal.index_spark_columns) sdf_appended = sdf_self.union(sdf_other) # names should be kept when MultiIndex, but Index wouldn't keep its name. if isinstance(self, MultiIndex): index_map = self._internal.index_map else: index_map = OrderedDict( (idx_col, None) for idx_col in self._internal.index_spark_column_names ) internal = _InternalFrame(spark_frame=sdf_appended, index_map=index_map) return DataFrame(internal).index def argmax(self): """ Return a maximum argument indexer. Parameters ---------- skipna : bool, default True Returns ------- maximum argument indexer Examples -------- >>> kidx = ks.Index([10, 9, 8, 7, 100, 5, 4, 3, 100, 3]) >>> kidx Int64Index([10, 9, 8, 7, 100, 5, 4, 3, 100, 3], dtype='int64') >>> kidx.argmax() 4 """ sdf = self._internal.spark_frame.select(self._scol) sequence_col = verify_temp_column_name(sdf, "__distributed_sequence_column__") sdf = _InternalFrame.attach_distributed_sequence_column(sdf, column_name=sequence_col) # spark_frame here looks like below # +-----------------+---------------+ # |__index_level_0__|__index_value__| # +-----------------+---------------+ # | 0| 10| # | 4| 100| # | 2| 8| # | 3| 7| # | 6| 4| # | 5| 5| # | 7| 3| # | 8| 100| # | 1| 9| # +-----------------+---------------+ return sdf.orderBy(self._scol.desc(), F.col(sequence_col).asc()).first()[0] def argmin(self): """ Return a minimum argument indexer. Parameters ---------- skipna : bool, default True Returns ------- minimum argument indexer Examples -------- >>> kidx = ks.Index([10, 9, 8, 7, 100, 5, 4, 3, 100, 3]) >>> kidx Int64Index([10, 9, 8, 7, 100, 5, 4, 3, 100, 3], dtype='int64') >>> kidx.argmin() 7 """ sdf = self._internal.spark_frame.select(self._scol) sequence_col = verify_temp_column_name(sdf, "__distributed_sequence_column__") sdf = _InternalFrame.attach_distributed_sequence_column(sdf, column_name=sequence_col) return sdf.orderBy(self._scol.asc(), F.col(sequence_col).asc()).first()[0] def set_names(self, names, level=None, inplace=False): """ Set Index or MultiIndex name. Able to set new names partially and by level. Parameters ---------- names : label or list of label Name(s) to set. level : int, label or list of int or label, optional If the index is a MultiIndex, level(s) to set (None for all levels). Otherwise level must be None. inplace : bool, default False Modifies the object directly, instead of creating a new Index or MultiIndex. Returns ------- Index The same type as the caller or None if inplace is True. See Also -------- Index.rename : Able to set new names without level. Examples -------- >>> idx = ks.Index([1, 2, 3, 4]) >>> idx Int64Index([1, 2, 3, 4], dtype='int64') >>> idx.set_names('quarter') Int64Index([1, 2, 3, 4], dtype='int64', name='quarter') For MultiIndex >>> idx = ks.MultiIndex.from_tuples([('a', 'x'), ('b', 'y')]) >>> idx # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y')], ) >>> idx.set_names(['kind', 'year'], inplace=True) >>> idx # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y')], names=['kind', 'year']) >>> idx.set_names('species', level=0) # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y')], names=['species', 'year']) """ if isinstance(self, MultiIndex): if level is not None: self_names = self.names self_names[level] = names names = self_names return self.rename(name=names, inplace=inplace) def difference(self, other, sort=None): """ Return a new Index with elements from the index that are not in `other`. This is the set difference of two Index objects. Parameters ---------- other : Index or array-like sort : True or None, default None Whether to sort the resulting index. * True : Attempt to sort the result. * None : Do not sort the result. Returns ------- difference : Index Examples -------- >>> idx1 = ks.Index([2, 1, 3, 4]) >>> idx2 = ks.Index([3, 4, 5, 6]) >>> idx1.difference(idx2, sort=True) Int64Index([1, 2], dtype='int64') MultiIndex >>> midx1 = ks.MultiIndex.from_tuples([('a', 'x', 1), ('b', 'y', 2), ('c', 'z', 3)]) >>> midx2 = ks.MultiIndex.from_tuples([('a', 'x', 1), ('b', 'z', 2), ('k', 'z', 3)]) >>> midx1.difference(midx2) # doctest: +SKIP MultiIndex([('b', 'y', 2), ('c', 'z', 3)], ) """ if not is_list_like(other): raise TypeError("Input must be Index or array-like") if not isinstance(sort, (type(None), type(True))): raise ValueError( "The 'sort' keyword only takes the values of None or True; {} was passed.".format( sort ) ) # Handling MultiIndex if isinstance(self, ks.MultiIndex) and not isinstance(other, ks.MultiIndex): if not all([isinstance(item, tuple) for item in other]): raise TypeError("other must be a MultiIndex or a list of tuples") other = ks.MultiIndex.from_tuples(other) if not isinstance(other, ks.Index): other = ks.Index(other) sdf_self = self._internal.spark_frame sdf_other = other._internal.spark_frame idx_self = self._internal.index_spark_columns idx_other = other._internal.index_spark_columns sdf_diff = sdf_self.select(idx_self).subtract(sdf_other.select(idx_other)) internal = _InternalFrame(spark_frame=sdf_diff, index_map=self._internal.index_map) result = DataFrame(internal).index # Name(s) will be kept when only name(s) of (Multi)Index are the same. if isinstance(self, type(other)) and isinstance(self, ks.MultiIndex): if self.names == other.names: result.names = self.names elif isinstance(self, type(other)) and not isinstance(self, ks.MultiIndex): if self.name == other.name: result.name = self.name return result if sort is None else result.sort_values() @property def is_all_dates(self): """ Return if all data types of the index are datetime. remember that since Koalas does not support multiple data types in an index, so it returns True if any type of data is datetime. Examples -------- >>> from datetime import datetime >>> idx = ks.Index([datetime(2019, 1, 1, 0, 0, 0), datetime(2019, 2, 3, 0, 0, 0)]) >>> idx DatetimeIndex(['2019-01-01', '2019-02-03'], dtype='datetime64[ns]', freq=None) >>> idx.is_all_dates True >>> idx = ks.Index([datetime(2019, 1, 1, 0, 0, 0), None]) >>> idx DatetimeIndex(['2019-01-01', 'NaT'], dtype='datetime64[ns]', freq=None) >>> idx.is_all_dates True >>> idx = ks.Index([0, 1, 2]) >>> idx Int64Index([0, 1, 2], dtype='int64') >>> idx.is_all_dates False """ return isinstance(self.spark_type, TimestampType) def repeat(self, repeats: int) -> "Index": """ Repeat elements of a Index/MultiIndex. Returns a new Index/MultiIndex where each element of the current Index/MultiIndex is repeated consecutively a given number of times. Parameters ---------- repeats : int The number of repetitions for each element. This should be a non-negative integer. Repeating 0 times will return an empty Index. Returns ------- repeated_index : Index/MultiIndex Newly created Index/MultiIndex with repeated elements. See Also -------- Series.repeat : Equivalent function for Series. Examples -------- >>> idx = ks.Index(['a', 'b', 'c']) >>> idx Index(['a', 'b', 'c'], dtype='object') >>> idx.repeat(2) Index(['a', 'b', 'c', 'a', 'b', 'c'], dtype='object') For MultiIndex, >>> midx = ks.MultiIndex.from_tuples([('x', 'a'), ('x', 'b'), ('y', 'c')]) >>> midx # doctest: +SKIP MultiIndex([('x', 'a'), ('x', 'b'), ('y', 'c')], ) >>> midx.repeat(2) # doctest: +SKIP MultiIndex([('x', 'a'), ('x', 'b'), ('y', 'c'), ('x', 'a'), ('x', 'b'), ('y', 'c')], ) >>> midx.repeat(0) # doctest: +SKIP MultiIndex([], ) """ if not isinstance(repeats, int): raise ValueError("`repeats` argument must be integer, but got {}".format(type(repeats))) elif repeats < 0: raise ValueError("negative dimensions are not allowed") sdf = self._internal.spark_frame.select(self._internal.index_spark_columns) internal = _InternalFrame( spark_frame=sdf, index_map=OrderedDict(zip(sdf.columns, self._internal.index_names)) ) kdf = DataFrame(internal) # type: DataFrame if repeats == 0: return DataFrame(kdf._internal.with_filter(F.lit(False))).index else: return ks.concat([kdf] * repeats).index def asof(self, label): """ Return the label from the index, or, if not present, the previous one. Assuming that the index is sorted, return the passed index label if it is in the index, or return the previous index label if the passed one is not in the index. .. note:: This API is dependent on :meth:`Index.is_monotonic_increasing` which can be expensive. Parameters ---------- label : object The label up to which the method returns the latest index label. Returns ------- object The passed label if it is in the index. The previous label if the passed label is not in the sorted index or `NaN` if there is no such label. Examples -------- `Index.asof` returns the latest index label up to the passed label. >>> idx = ks.Index(['2013-12-31', '2014-01-02', '2014-01-03']) >>> idx.asof('2014-01-01') '2013-12-31' If the label is in the index, the method returns the passed label. >>> idx.asof('2014-01-02') '2014-01-02' If all of the labels in the index are later than the passed label, NaN is returned. >>> idx.asof('1999-01-02') nan """ sdf = self._internal._sdf if self.is_monotonic_increasing: sdf = sdf.select(self._scol).where(self._scol <= label).select(F.max(self._scol)) elif self.is_monotonic_decreasing: sdf = sdf.select(self._scol).where(self._scol >= label).select(F.min(self._scol)) else: raise ValueError("index must be monotonic increasing or decreasing") result = sdf.head()[0] return result if result is not None else np.nan def union(self, other, sort=None): """ Form the union of two Index objects. Parameters ---------- other : Index or array-like sort : bool or None, default None Whether to sort the resulting Index. Returns ------- union : Index Examples -------- Index >>> idx1 = ks.Index([1, 2, 3, 4]) >>> idx2 = ks.Index([3, 4, 5, 6]) >>> idx1.union(idx2).sort_values() Int64Index([1, 2, 3, 4, 5, 6], dtype='int64') MultiIndex >>> midx1 = ks.MultiIndex.from_tuples([("x", "a"), ("x", "b"), ("x", "c"), ("x", "d")]) >>> midx2 = ks.MultiIndex.from_tuples([("x", "c"), ("x", "d"), ("x", "e"), ("x", "f")]) >>> midx1.union(midx2).sort_values() # doctest: +SKIP MultiIndex([('x', 'a'), ('x', 'b'), ('x', 'c'), ('x', 'd'), ('x', 'e'), ('x', 'f')], ) """ sort = True if sort is None else sort sort = validate_bool_kwarg(sort, "sort") if type(self) is not type(other): if isinstance(self, MultiIndex): if not isinstance(other, list) or not all( [isinstance(item, tuple) for item in other] ): raise TypeError("other must be a MultiIndex or a list of tuples") other = MultiIndex.from_tuples(other) else: if isinstance(other, MultiIndex): # TODO: We can't support different type of values in a single column for now. raise NotImplementedError( "Union between Index and MultiIndex is not yet supported" ) elif isinstance(other, Series): other = other.to_frame().set_index(other.name).index elif isinstance(other, DataFrame): raise ValueError("Index data must be 1-dimensional") else: other = Index(other) sdf_self = self._internal._sdf.select(self._internal.index_spark_columns) sdf_other = other._internal._sdf.select(other._internal.index_spark_columns) sdf = sdf_self.union(sdf_other.subtract(sdf_self)) if isinstance(self, MultiIndex): sdf = sdf.drop_duplicates() if sort: sdf = sdf.sort(self._internal.index_spark_columns) internal = _InternalFrame(spark_frame=sdf, index_map=self._internal.index_map) return DataFrame(internal).index def __getattr__(self, item: str) -> Any: if hasattr(_MissingPandasLikeIndex, item): property_or_func = getattr(_MissingPandasLikeIndex, item) if isinstance(property_or_func, property): return property_or_func.fget(self) # type: ignore else: return partial(property_or_func, self) raise AttributeError("'Index' object has no attribute '{}'".format(item)) def __repr__(self): max_display_count = get_option("display.max_rows") if max_display_count is None: return repr(self.to_pandas()) pindex = self._kdf.head(max_display_count + 1).index._with_new_scol(self._scol).to_pandas() pindex_length = len(pindex) repr_string = repr(pindex[:max_display_count]) if pindex_length > max_display_count: footer = "\nShowing only the first {}".format(max_display_count) return repr_string + footer return repr_string def __iter__(self): return _MissingPandasLikeIndex.__iter__(self) def __xor__(self, other): return self.symmetric_difference(other) def __len__(self): return self.size class MultiIndex(Index): """ Koalas MultiIndex that corresponds to Pandas MultiIndex logically. This might hold Spark Column internally. :ivar _kdf: The parent dataframe :type _kdf: DataFrame :ivar _scol: Spark Column instance :type _scol: pyspark.Column See Also -------- Index : A single-level Index. Examples -------- >>> ks.DataFrame({'a': ['a', 'b', 'c']}, index=[[1, 2, 3], [4, 5, 6]]).index # doctest: +SKIP MultiIndex([(1, 4), (2, 5), (3, 6)], ) >>> ks.DataFrame({'a': [1, 2, 3]}, index=[list('abc'), list('def')]).index # doctest: +SKIP MultiIndex([('a', 'd'), ('b', 'e'), ('c', 'f')], ) """ def __init__(self, kdf: DataFrame): assert len(kdf._internal._index_map) > 1 scol = F.struct(kdf._internal.index_spark_columns) data_columns = kdf._sdf.select(scol).columns internal = kdf._internal.copy( spark_column=scol, column_labels=[(col, None) for col in data_columns], column_label_names=None, ) IndexOpsMixin.__init__(self, internal, kdf) def _with_new_scol(self, scol: spark.Column): raise NotImplementedError("Not supported for type MultiIndex") def any(self, *args, **kwargs): raise TypeError("cannot perform any with this index type: MultiIndex") def all(self, *args, **kwargs): raise TypeError("cannot perform all with this index type: MultiIndex") @staticmethod def from_tuples(tuples, sortorder=None, names=None): """ Convert list of tuples to MultiIndex. Parameters ---------- tuples : list / sequence of tuple-likes Each tuple is the index of one row/column. sortorder : int or None Level of sortedness (must be lexicographically sorted by that level). names : list / sequence of str, optional Names for the levels in the index. Returns ------- index : MultiIndex Examples -------- >>> tuples = [(1, 'red'), (1, 'blue'), ... (2, 'red'), (2, 'blue')] >>> ks.MultiIndex.from_tuples(tuples, names=('number', 'color')) # doctest: +SKIP MultiIndex([(1, 'red'), (1, 'blue'), (2, 'red'), (2, 'blue')], names=['number', 'color']) """ return DataFrame( index=pd.MultiIndex.from_tuples(tuples=tuples, sortorder=sortorder, names=names) ).index @staticmethod def from_arrays(arrays, sortorder=None, names=None): """ Convert arrays to MultiIndex. Parameters ---------- arrays: list / sequence of array-likes Each array-like gives one level’s value for each data point. len(arrays) is the number of levels. sortorder: int or None Level of sortedness (must be lexicographically sorted by that level). names: list / sequence of str, optional Names for the levels in the index. Returns ------- index: MultiIndex Examples -------- >>> arrays = [[1, 1, 2, 2], ['red', 'blue', 'red', 'blue']] >>> ks.MultiIndex.from_arrays(arrays, names=('number', 'color')) # doctest: +SKIP MultiIndex([(1, 'red'), (1, 'blue'), (2, 'red'), (2, 'blue')], names=['number', 'color']) """ return DataFrame( index=pd.MultiIndex.from_arrays(arrays=arrays, sortorder=sortorder, names=names) ).index @staticmethod def from_product(iterables, sortorder=None, names=None): """ Make a MultiIndex from the cartesian product of multiple iterables. Parameters ---------- iterables : list / sequence of iterables Each iterable has unique labels for each level of the index. sortorder : int or None Level of sortedness (must be lexicographically sorted by that level). names : list / sequence of str, optional Names for the levels in the index. Returns ------- index : MultiIndex See Also -------- MultiIndex.from_arrays : Convert list of arrays to MultiIndex. MultiIndex.from_tuples : Convert list of tuples to MultiIndex. Examples -------- >>> numbers = [0, 1, 2] >>> colors = ['green', 'purple'] >>> ks.MultiIndex.from_product([numbers, colors], ... names=['number', 'color']) # doctest: +SKIP MultiIndex([(0, 'green'), (0, 'purple'), (1, 'green'), (1, 'purple'), (2, 'green'), (2, 'purple')], names=['number', 'color']) """ return DataFrame( index=pd.MultiIndex.from_product(iterables=iterables, sortorder=sortorder, names=names) ).index @property def name(self) -> str: raise PandasNotImplementedError(class_name="pd.MultiIndex", property_name="name") @name.setter def name(self, name: str) -> None: raise PandasNotImplementedError(class_name="pd.MultiIndex", property_name="name") def _verify_for_rename(self, name): if is_list_like(name): if len(self._internal.index_map) != len(name): raise ValueError( "Length of new names must be {}, got {}".format( len(self._internal.index_map), len(name) ) ) return [n if n is None or isinstance(n, tuple) else (n,) for n in name] else: raise TypeError("Must pass list-like as `names`.") def swaplevel(self, i=-2, j=-1): """ Swap level i with level j. Calling this method does not change the ordering of the values. Parameters ---------- i : int, str, default -2 First level of index to be swapped. Can pass level name as string. Type of parameters can be mixed. j : int, str, default -1 Second level of index to be swapped. Can pass level name as string. Type of parameters can be mixed. Returns ------- MultiIndex A new MultiIndex. Examples -------- >>> midx = ks.MultiIndex.from_arrays([['a', 'b'], [1, 2]], names = ['word', 'number']) >>> midx # doctest: +SKIP MultiIndex([('a', 1), ('b', 2)], names=['word', 'number']) >>> midx.swaplevel(0, 1) # doctest: +SKIP MultiIndex([(1, 'a'), (2, 'b')], names=['number', 'word']) >>> midx.swaplevel('number', 'word') # doctest: +SKIP MultiIndex([(1, 'a'), (2, 'b')], names=['number', 'word']) """ for index in (i, j): if not isinstance(index, int) and index not in self.names: raise KeyError("Level %s not found" % index) i = i if isinstance(i, int) else self.names.index(i) j = j if isinstance(j, int) else self.names.index(j) for index in (i, j): if index >= len(self.names) or index < -len(self.names): raise IndexError( "Too many levels: Index has only %s levels, " "%s is not a valid level number" % (len(self.names), index) ) index_map = list(self._internal.index_map.items()) index_map[i], index_map[j], = index_map[j], index_map[i] result = DataFrame(self._kdf._internal.copy(index_map=OrderedDict(index_map))).index return result @property def levshape(self): """ A tuple with the length of each level. Examples -------- >>> midx = ks.MultiIndex.from_tuples([('a', 'x'), ('b', 'y'), ('c', 'z')]) >>> midx # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y'), ('c', 'z')], ) >>> midx.levshape (3, 3) """ internal = self._internal result = internal._sdf.agg( *(F.countDistinct(c) for c in internal.index_spark_columns) ).collect()[0] return tuple(result) @staticmethod def _comparator_for_monotonic_increasing(data_type): if isinstance(data_type, BooleanType): return compare_allow_null else: return compare_null_last def _is_monotonic(self, order): if order == "increasing": return self._is_monotonic_increasing().all() else: return self._is_monotonic_decreasing().all() def _is_monotonic_increasing(self): scol = self._scol window = Window.orderBy(NATURAL_ORDER_COLUMN_NAME).rowsBetween(-1, -1) prev = F.lag(scol, 1).over(window) cond = F.lit(True) for field in self.spark_type[::-1]: left = scol.getField(field.name) right = prev.getField(field.name) compare = MultiIndex._comparator_for_monotonic_increasing(field.dataType) cond = F.when(left.eqNullSafe(right), cond).otherwise( compare(left, right, spark.Column.__gt__) ) cond = prev.isNull() | cond internal = _InternalFrame( spark_frame=self._internal.spark_frame.select( self._internal.index_spark_columns + [cond] ), index_map=self._internal.index_map, ) return _col(DataFrame(internal)) @staticmethod def _comparator_for_monotonic_decreasing(data_type): if isinstance(data_type, StringType): return compare_disallow_null elif isinstance(data_type, BooleanType): return compare_allow_null elif isinstance(data_type, NumericType): return compare_null_last else: return compare_null_first def _is_monotonic_decreasing(self): scol = self._scol window = Window.orderBy(NATURAL_ORDER_COLUMN_NAME).rowsBetween(-1, -1) prev = F.lag(scol, 1).over(window) cond = F.lit(True) for field in self.spark_type[::-1]: left = scol.getField(field.name) right = prev.getField(field.name) compare = MultiIndex._comparator_for_monotonic_decreasing(field.dataType) cond = F.when(left.eqNullSafe(right), cond).otherwise( compare(left, right, spark.Column.__lt__) ) cond = prev.isNull() | cond internal = _InternalFrame( spark_frame=self._internal.spark_frame.select( self._internal.index_spark_columns + [cond] ), index_map=self._internal.index_map, ) return _col(DataFrame(internal)) def to_frame(self, index=True, name=None) -> DataFrame: """ Create a DataFrame with the levels of the MultiIndex as columns. Column ordering is determined by the DataFrame constructor with data as a dict. Parameters ---------- index : boolean, default True Set the index of the returned DataFrame as the original MultiIndex. name : list / sequence of strings, optional The passed names should substitute index level names. Returns ------- DataFrame : a DataFrame containing the original MultiIndex data. See Also -------- DataFrame Examples -------- >>> tuples = [(1, 'red'), (1, 'blue'), ... (2, 'red'), (2, 'blue')] >>> idx = ks.MultiIndex.from_tuples(tuples, names=('number', 'color')) >>> idx # doctest: +SKIP MultiIndex([(1, 'red'), (1, 'blue'), (2, 'red'), (2, 'blue')], names=['number', 'color']) >>> idx.to_frame() # doctest: +NORMALIZE_WHITESPACE number color number color 1 red 1 red blue 1 blue 2 red 2 red blue 2 blue By default, the original Index is reused. To enforce a new Index: >>> idx.to_frame(index=False) number color 0 1 red 1 1 blue 2 2 red 3 2 blue To override the name of the resulting column, specify `name`: >>> idx.to_frame(name=['n', 'c']) # doctest: +NORMALIZE_WHITESPACE n c number color 1 red 1 red blue 1 blue 2 red 2 red blue 2 blue """ if name is None: name = [ name if name is not None else (str(i),) for i, name in enumerate(self._internal.index_names) ] elif is_list_like(name): if len(name) != len(self._internal.index_map): raise ValueError("'name' should have same length as number of levels on index.") name = [n if isinstance(n, tuple) else (n,) for n in name] else: raise TypeError("'name' must be a list / sequence of column names.") sdf = self._internal.spark_frame.select( [ scol.alias(name_like_string(label)) for scol, label in zip(self._internal.index_spark_columns, name) ] + [NATURAL_ORDER_COLUMN_NAME] ) if index: index_map = OrderedDict( (name_like_string(label), n) for label, n in zip(name, self._internal.index_names) ) else: index_map = None # type: ignore internal = _InternalFrame( spark_frame=sdf, index_map=index_map, column_labels=name, data_spark_columns=[scol_for(sdf, name_like_string(label)) for label in name], ) return DataFrame(internal) def to_pandas(self) -> pd.MultiIndex: """ Return a pandas MultiIndex. .. note:: This method should only be used if the resulting Pandas object is expected to be small, as all the data is loaded into the driver's memory. Examples -------- >>> df = ks.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], ... columns=['dogs', 'cats'], ... index=[list('abcd'), list('efgh')]) >>> df['dogs'].index.to_pandas() # doctest: +SKIP MultiIndex([('a', 'e'), ('b', 'f'), ('c', 'g'), ('d', 'h')], ) """ # TODO: We might need to handle internal state change. # So far, we don't have any functions to change the internal state of MultiIndex except for # series-like operations. In that case, it creates new Index object instead of MultiIndex. return self._kdf[[]]._to_internal_pandas().index toPandas = to_pandas def nunique(self, dropna=True): raise NotImplementedError("isna is not defined for MultiIndex") # TODO: add 'name' parameter after pd.MultiIndex.name is implemented def copy(self): """ Make a copy of this object. """ return MultiIndex(self._kdf.copy()) def symmetric_difference(self, other, result_name=None, sort=None): """ Compute the symmetric difference of two MultiIndex objects. Parameters ---------- other : Index or array-like result_name : list sort : True or None, default None Whether to sort the resulting index. * True : Attempt to sort the result. * None : Do not sort the result. Returns ------- symmetric_difference : MiltiIndex Notes ----- ``symmetric_difference`` contains elements that appear in either ``idx1`` or ``idx2`` but not both. Equivalent to the Index created by ``idx1.difference(idx2) | idx2.difference(idx1)`` with duplicates dropped. Examples -------- >>> midx1 = pd.MultiIndex([['lama', 'cow', 'falcon'], ... ['speed', 'weight', 'length']], ... [[0, 0, 0, 1, 1, 1, 2, 2, 2], ... [0, 0, 0, 0, 1, 2, 0, 1, 2]]) >>> midx2 = pd.MultiIndex([['koalas', 'cow', 'falcon'], ... ['speed', 'weight', 'length']], ... [[0, 0, 0, 1, 1, 1, 2, 2, 2], ... [0, 0, 0, 0, 1, 2, 0, 1, 2]]) >>> s1 = ks.Series([45, 200, 1.2, 30, 250, 1.5, 320, 1, 0.3], ... index=midx1) >>> s2 = ks.Series([45, 200, 1.2, 30, 250, 1.5, 320, 1, 0.3], ... index=midx2) >>> s1.index.symmetric_difference(s2.index) # doctest: +SKIP MultiIndex([('koalas', 'speed'), ( 'lama', 'speed')], ) You can set names of result Index. >>> s1.index.symmetric_difference(s2.index, result_name=['a', 'b']) # doctest: +SKIP MultiIndex([('koalas', 'speed'), ( 'lama', 'speed')], names=['a', 'b']) You can set sort to `True`, if you want to sort the resulting index. >>> s1.index.symmetric_difference(s2.index, sort=True) # doctest: +SKIP MultiIndex([('koalas', 'speed'), ( 'lama', 'speed')], ) You can also use the ``^`` operator: >>> s1.index ^ s2.index # doctest: +SKIP MultiIndex([('koalas', 'speed'), ( 'lama', 'speed')], ) """ if type(self) != type(other): raise NotImplementedError( "Doesn't support symmetric_difference between Index & MultiIndex for now" ) sdf_self = self._kdf._sdf.select(self._internal.index_spark_columns) sdf_other = other._kdf._sdf.select(other._internal.index_spark_columns) sdf_symdiff = sdf_self.union(sdf_other).subtract(sdf_self.intersect(sdf_other)) if sort: sdf_symdiff = sdf_symdiff.sort(self._internal.index_spark_columns) internal = _InternalFrame(spark_frame=sdf_symdiff, index_map=self._internal.index_map) result = MultiIndex(DataFrame(internal)) if result_name: result.names = result_name return result # TODO: ADD error parameter def drop(self, codes, level=None): """ Make new MultiIndex with passed list of labels deleted Parameters ---------- codes : array-like Must be a list of tuples level : int or level name, default None Returns ------- dropped : MultiIndex Examples -------- >>> index = ks.MultiIndex.from_tuples([('a', 'x'), ('b', 'y'), ('c', 'z')]) >>> index # doctest: +SKIP MultiIndex([('a', 'x'), ('b', 'y'), ('c', 'z')], ) >>> index.drop(['a']) # doctest: +SKIP MultiIndex([('b', 'y'), ('c', 'z')], ) >>> index.drop(['x', 'y'], level=1) # doctest: +SKIP MultiIndex([('c', 'z')], ) """ sdf = self._internal.spark_frame index_scols = self._internal.index_spark_columns if level is None: scol = index_scols[0] elif isinstance(level, int): scol = index_scols[level] else: spark_column_name = None for index_spark_column_name, index_name in self._internal.index_map.items(): if not isinstance(level, tuple): level = (level,) if level == index_name: if spark_column_name is not None: raise ValueError( "The name {} occurs multiple times, use a level number".format( name_like_string(level) ) ) spark_column_name = index_spark_column_name if spark_column_name is None: raise KeyError("Level {} not found".format(name_like_string(level))) scol = scol_for(sdf, spark_column_name) sdf = sdf[~scol.isin(codes)] return MultiIndex( DataFrame(_InternalFrame(spark_frame=sdf, index_map=self._kdf._internal.index_map)) ) def value_counts(self, normalize=False, sort=True, ascending=False, bins=None, dropna=True): if ( LooseVersion(pyspark.__version__) < LooseVersion("2.4") and default_session().conf.get("spark.sql.execution.arrow.enabled") == "true" and isinstance(self, MultiIndex) ): raise RuntimeError( "if you're using pyspark < 2.4, set conf " "'spark.sql.execution.arrow.enabled' to 'false' " "for using this function with MultiIndex" ) return super(MultiIndex, self).value_counts( normalize=normalize, sort=sort, ascending=ascending, bins=bins, dropna=dropna ) value_counts.__doc__ = IndexOpsMixin.value_counts.__doc__ def argmax(self): raise TypeError("reduction operation 'argmax' not allowed for this dtype") def argmin(self): raise TypeError("reduction operation 'argmin' not allowed for this dtype") def asof(self, label): raise NotImplementedError( "only the default get_loc method is currently supported for MultiIndex" ) @property def is_all_dates(self): """ is_all_dates always returns False for MultiIndex Examples -------- >>> from datetime import datetime >>> idx = ks.MultiIndex.from_tuples( ... [(datetime(2019, 1, 1, 0, 0, 0), datetime(2019, 1, 1, 0, 0, 0)), ... (datetime(2019, 1, 1, 0, 0, 0), datetime(2019, 1, 1, 0, 0, 0))]) >>> idx # doctest: +SKIP MultiIndex([('2019-01-01', '2019-01-01'), ('2019-01-01', '2019-01-01')], ) >>> idx.is_all_dates False """ return False def __getattr__(self, item: str) -> Any: if hasattr(_MissingPandasLikeMultiIndex, item): property_or_func = getattr(_MissingPandasLikeMultiIndex, item) if isinstance(property_or_func, property): return property_or_func.fget(self) # type: ignore else: return partial(property_or_func, self) raise AttributeError("'MultiIndex' object has no attribute '{}'".format(item)) def __repr__(self): max_display_count = get_option("display.max_rows") if max_display_count is None: return repr(self.to_pandas()) pindex = self._kdf.head(max_display_count + 1).index.to_pandas() pindex_length = len(pindex) repr_string = repr(pindex[:max_display_count]) if pindex_length > max_display_count: footer = "\nShowing only the first {}".format(max_display_count) return repr_string + footer return repr_string def __iter__(self): return _MissingPandasLikeMultiIndex.__iter__(self)
1
14,714
too long here. (104 > 100)
databricks-koalas
py
@@ -210,7 +210,14 @@ namespace Nethermind.DataMarketplace.Subprotocols } Logger.Warn($"GETTING MESSAGE: ndm.{NdmMessageCode.GetDescription(message.PacketType)}"); - MessageHandlers[message.PacketType](message); + try + { + MessageHandlers[message.PacketType](message); + } + catch(KeyNotFoundException) + { + Logger.Error("$There is no handler registered for this message"); + } } protected virtual void Handle(HiMessage message)
1
// Copyright (c) 2018 Demerzel Solutions Limited // This file is part of the Nethermind library. // // The Nethermind library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The Nethermind library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the Nethermind. If not, see <http://www.gnu.org/licenses/>. using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Nethermind.Core; using Nethermind.Core.Crypto; using Nethermind.Crypto; using Nethermind.Logging; using Nethermind.DataMarketplace.Channels; using Nethermind.DataMarketplace.Consumers.Shared; using Nethermind.DataMarketplace.Core.Domain; using Nethermind.DataMarketplace.Core.Services; using Nethermind.DataMarketplace.Subprotocols.Messages; using Nethermind.Int256; using Nethermind.Network; using Nethermind.Network.P2P; using Nethermind.Network.P2P.Subprotocols; using Nethermind.Network.Rlpx; using Nethermind.Stats; using Nethermind.Stats.Model; using Nethermind.Wallet; using Metrics = Nethermind.DataMarketplace.Consumers.Metrics; namespace Nethermind.DataMarketplace.Subprotocols { public class NdmSubprotocol : ProtocolHandlerBase, INdmPeer { protected readonly IDictionary<int, Action<Packet>> MessageHandlers; protected int DisposedValue; protected int DisconnectedValue; protected readonly BlockingCollection<Request<GetDepositApprovalsMessage, DepositApproval[]>> DepositApprovalsRequests = new BlockingCollection<Request<GetDepositApprovalsMessage, DepositApproval[]>>(); protected readonly BlockingCollection<Request<RequestEthMessage, FaucetResponse>> RequestEthRequests = new BlockingCollection<Request<RequestEthMessage, FaucetResponse>>(); protected readonly BlockingCollection<Request<DataRequestMessage, DataRequestResult>> DataRequestResultRequests = new BlockingCollection<Request<DataRequestMessage, DataRequestResult>>(); protected readonly IEcdsa Ecdsa; protected readonly IWallet Wallet; protected readonly INdmFaucet Faucet; protected readonly PublicKey ConfiguredNodeId; protected readonly IConsumerService ConsumerService; protected readonly INdmConsumerChannelManager NdmConsumerChannelManager; protected Address ConfiguredProviderAddress; protected Address ConfiguredConsumerAddress; protected readonly bool VerifySignature; protected bool HiReceived; public override string Name => "ndm"; protected override TimeSpan InitTimeout => Timeouts.NdmHi; public override byte ProtocolVersion { get; protected set; } = 1; public override string ProtocolCode => Protocol.Ndm; public override int MessageIdSpaceSize => 0x1F; public override bool HasAvailableCapability(Capability capability) => false; public override bool HasAgreedCapability(Capability capability) => false; public override void AddSupportedCapability(Capability capability) { } public override event EventHandler<ProtocolInitializedEventArgs>? ProtocolInitialized; public override event EventHandler<ProtocolEventArgs> SubprotocolRequested { add { } remove { } } public PublicKey NodeId => Session.RemoteNodeId; public Address? ConsumerAddress { get; protected set; } public Address? ProviderAddress { get; protected set; } public bool IsConsumer => !(ConsumerAddress is null) && ConsumerAddress != Address.Zero; public bool IsProvider => !(ProviderAddress is null) && ProviderAddress != Address.Zero; public NdmSubprotocol( ISession p2PSession, INodeStatsManager nodeStatsManager, IMessageSerializationService serializer, ILogManager logManager, IConsumerService consumerService, INdmConsumerChannelManager ndmConsumerChannelManager, IEcdsa ecdsa, IWallet wallet, INdmFaucet faucet, PublicKey configuredNodeId, Address configuredProviderAddress, Address configuredConsumerAddress, bool verifySignature = true) : base(p2PSession, nodeStatsManager, serializer, logManager) { Ecdsa = ecdsa; Wallet = wallet; Faucet = faucet; ConfiguredNodeId = configuredNodeId; ConsumerService = consumerService; NdmConsumerChannelManager = ndmConsumerChannelManager; ConfiguredProviderAddress = configuredProviderAddress; ConfiguredConsumerAddress = configuredConsumerAddress; VerifySignature = verifySignature; MessageHandlers = InitMessageHandlers(); } private IDictionary<int, Action<Packet>> InitMessageHandlers() => new Dictionary<int, Action<Packet>> { [NdmMessageCode.Hi] = message => Handle(Deserialize<HiMessage>(message.Data)), [NdmMessageCode.DataAssets] = message => Handle(Deserialize<DataAssetsMessage>(message.Data)), [NdmMessageCode.DataAsset] = message => Handle(Deserialize<DataAssetMessage>(message.Data)), [NdmMessageCode.DataAssetStateChanged] = message => Handle(Deserialize<DataAssetStateChangedMessage>(message.Data)), [NdmMessageCode.DataAssetRemoved] = message => Handle(Deserialize<DataAssetRemovedMessage>(message.Data)), [NdmMessageCode.DataRequestResult] = message => Handle(Deserialize<DataRequestResultMessage>(message.Data)), [NdmMessageCode.DataAssetData] = message => Handle(Deserialize<DataAssetDataMessage>(message.Data)), [NdmMessageCode.InvalidData] = message => Handle(Deserialize<InvalidDataMessage>(message.Data)), [NdmMessageCode.SessionStarted] = message => Handle(Deserialize<SessionStartedMessage>(message.Data)), [NdmMessageCode.SessionFinished] = message => Handle(Deserialize<SessionFinishedMessage>(message.Data)), [NdmMessageCode.DataStreamEnabled] = message => Handle(Deserialize<DataStreamEnabledMessage>(message.Data)), [NdmMessageCode.DataStreamDisabled] = message => Handle(Deserialize<DataStreamDisabledMessage>(message.Data)), [NdmMessageCode.DataAvailability] = message => Handle(Deserialize<DataAvailabilityMessage>(message.Data)), [NdmMessageCode.RequestDataDeliveryReceipt] = message => Handle(Deserialize<RequestDataDeliveryReceiptMessage>(message.Data)), [NdmMessageCode.EarlyRefundTicket] = message => Handle(Deserialize<EarlyRefundTicketMessage>(message.Data)), [NdmMessageCode.DepositApprovalConfirmed] = message => Handle(Deserialize<DepositApprovalConfirmedMessage>(message.Data)), [NdmMessageCode.DepositApprovalRejected] = message => Handle(Deserialize<DepositApprovalRejectedMessage>(message.Data)), [NdmMessageCode.DepositApprovals] = message => Handle(Deserialize<DepositApprovalsMessage>(message.Data)), [NdmMessageCode.ProviderAddressChanged] = message => Handle(Deserialize<ProviderAddressChangedMessage>(message.Data)), [NdmMessageCode.EthRequested] = message => Handle(Deserialize<EthRequestedMessage>(message.Data)), [NdmMessageCode.GraceUnitsExceeded] = message => Handle(Deserialize<GraceUnitsExceededMessage>(message.Data)) }; public override void Init() { try { Signature signature; if (VerifySignature) { if (Logger.IsInfo) Logger.Info("Signing Hi message for NDM P2P session..."); var hash = Keccak.Compute(ConfiguredNodeId.Address.Bytes); signature = Wallet.Sign(hash, ConfiguredNodeId.Address); if (Logger.IsInfo) Logger.Info("Signed Hi message for NDM P2P session."); } else { signature = new Signature(1, 1, 27); if (Logger.IsInfo) Logger.Info("Signing Hi message for NDM P2P was skipped."); } if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM sending: hi"); Send(new HiMessage(ProtocolVersion, ConfiguredProviderAddress, ConfiguredConsumerAddress, ConfiguredNodeId, signature)); CheckProtocolInitTimeout().ContinueWith(x => { if (x.IsFaulted && Logger.IsError) { Logger.Error("Error during NDM protocol handler timeout logic", x.Exception); } }); } catch (Exception ex) { if (Logger.IsError) Logger.Error(ex.ToString(), ex); Session.InitiateDisconnect(DisconnectReason.NdmInvalidHiSignature, "Invalid NDM signature for Hi message."); throw; } } public override void HandleMessage(Packet message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} {nameof(NdmSubprotocol)} handling a message with code {message.PacketType}."); if (message.PacketType == NdmMessageCode.Hi) { if (Logger.IsInfo) Logger.Info("NDM Received Hi"); } if (message.PacketType != NdmMessageCode.Hi && !HiReceived) { throw new SubprotocolException($"{Session.RemoteNodeId}" + $"No {nameof(HiReceived)} received prior to communication."); } Logger.Warn($"GETTING MESSAGE: ndm.{NdmMessageCode.GetDescription(message.PacketType)}"); MessageHandlers[message.PacketType](message); } protected virtual void Handle(HiMessage message) { if (HiReceived) { throw new SubprotocolException($"{nameof(HiMessage)} has already been received in the past"); } HiReceived = true; if (Logger.IsTrace) { if (Logger.IsInfo) { Logger.Info($"{Session.RemoteNodeId} NDM received hi with" + Environment.NewLine + $" prot version\t{message.ProtocolVersion}" + Environment.NewLine + $" provider address\t{message.ProviderAddress}" + Environment.NewLine + $" consumer address\t{message.ConsumerAddress}" + Environment.NewLine + $" node id\t{message.NodeId}"); } } ProviderAddress = message.ProviderAddress; ConsumerAddress = message.ConsumerAddress; if (!(IsConsumer || IsProvider)) { if (Logger.IsWarn) Logger.Warn("NDM peer is neither provider nor consumer (no addresses configured), skipping subprotocol connection."); Session.InitiateDisconnect(DisconnectReason.NdmPeerAddressesNotConfigured, "Addresses not configured for NDM peer."); return; } if (VerifySignature) { if (Logger.IsInfo) Logger.Info("Verifying signature for NDM P2P session..."); var hash = Keccak.Compute(message.NodeId.Bytes); var address = Ecdsa.RecoverPublicKey(message.Signature, hash).Address; if (!message.NodeId.Address.Equals(address)) { if (Logger.IsError) Logger.Error($"Invalid signature: '{message.NodeId.Address}' <> '{address}'."); Session.InitiateDisconnect(DisconnectReason.NdmInvalidHiSignature, "Invalid NDM signature for Hi message."); return; } if (Logger.IsInfo) Logger.Info("NDM P2P session was verified successfully."); } else { if (Logger.IsInfo) Logger.Info("NDM P2P signature verification was skipped."); } ReceivedProtocolInitMsg(message); var eventArgs = new NdmProtocolInitializedEventArgs(this) { Protocol = message.Protocol, ProtocolVersion = message.ProtocolVersion, ProviderAddress = message.ProviderAddress, ConsumerAddress = message.ConsumerAddress }; ProtocolInitialized?.Invoke(this, eventArgs); if (!IsProvider) { return; } ConsumerService.AddProviderPeer(this); SendGetDataAssets(); SendGetDepositApprovals().ContinueWith(async t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); return; } await ConsumerService.UpdateDepositApprovalsAsync(t.Result, message.ProviderAddress); }); } public override void DisconnectProtocol(DisconnectReason disconnectReason, string details) { if (Interlocked.Exchange(ref DisconnectedValue, 1) == 1) { return; } ConsumerService.FinishSessionsAsync(this).ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } private void SendGetDataAssets() { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM sending: getdataassets"); Send(new GetDataAssetsMessage()); } private void Handle(DataAssetStateChangedMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: dataassetstatechanged"); ConsumerService.ChangeDataAssetState(message.DataAssetId, message.State); } private void Handle(DataAssetMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: dataasset"); ConsumerService.AddDiscoveredDataAsset(message.DataAsset, this); } private void Handle(DataAssetRemovedMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: dataassetremoved"); ConsumerService.RemoveDiscoveredDataAsset(message.DataAssetId); } public virtual void ChangeConsumerAddress(Address address) { if (Logger.IsInfo) Logger.Info($"Changed address for consumer: '{ConsumerAddress}' -> '{address}'."); var wasConsumer = IsConsumer; ConsumerAddress = address; if (wasConsumer || !IsConsumer) { return; } } public virtual void ChangeProviderAddress(Address address) { if (Logger.IsInfo) Logger.Info($"Changed address for provider: '{ProviderAddress}' -> '{address}'."); var wasProvider = IsProvider; ProviderAddress = address; if (wasProvider || !IsProvider) { return; } ConsumerService.AddProviderPeer(this); SendGetDataAssets(); SendGetDepositApprovals().ContinueWith(async t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); return; } await ConsumerService.UpdateDepositApprovalsAsync(t.Result, ProviderAddress); }); } public void ChangeHostConsumerAddress(Address address) { ConfiguredConsumerAddress = address; } public void ChangeHostProviderAddress(Address address) { ConfiguredProviderAddress = address; } public void SendConsumerAddressChanged(Address consumer) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM sending: consumeraddresschanged"); Send(new ConsumerAddressChangedMessage(consumer)); } private void Handle(ProviderAddressChangedMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: provideraddresschanged"); ConsumerService.ChangeProviderAddressAsync(this, message.Address) .ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } public async Task<DataRequestResult> SendDataRequestAsync(DataRequest dataRequest, uint consumedUnits, CancellationToken? token = null) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM sending: senddatarequest"); var cancellationToken = token ?? CancellationToken.None; var message = new DataRequestMessage(dataRequest, consumedUnits); var request = new Request<DataRequestMessage, DataRequestResult>(message); DataRequestResultRequests.Add(request, cancellationToken); Send(request.Message); var task = request.CompletionSource.Task; var firstTask = await Task.WhenAny(task, Task.Delay(Timeouts.NdmDataRequestResult, cancellationToken)); if (firstTask.IsCanceled) { cancellationToken.ThrowIfCancellationRequested(); } if (firstTask != task) { throw new TimeoutException($"{Session.RemoteNodeId} Request timeout in {nameof(DataRequestMessage)}"); } return task.Result; } private void Handle(DataRequestResultMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: datarequestresult"); bool success = DataRequestResultRequests.TryTake(out var request); if (!success) { throw new SubprotocolException("Received a reponse for which no request has been made."); } request?.CompletionSource.SetResult(message.Result); } private void Handle(EarlyRefundTicketMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: earlyrefundticket"); ConsumerService.SetEarlyRefundTicketAsync(message.Ticket, message.Reason).ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } private void Handle(SessionStartedMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: sessionstarted"); ConsumerService.StartSessionAsync(message.Session, this); } public void SendFinishSession(Keccak depositId) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM sending: finishsession"); Send(new FinishSessionMessage(depositId)); } public void SendEnableDataStream(Keccak depositId, string client, string?[] args) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM sending: enabledatastream"); Send(new EnableDataStreamMessage(depositId, client, args)); } public void SendDisableDataStream(Keccak depositId, string client) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM sending: disabledatastream"); Send(new DisableDataStreamMessage(depositId, client)); } public void SendRequestDepositApproval(Keccak assetId, Address consumer, string kyc) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM sending: requestdepositapproval"); Send(new RequestDepositApprovalMessage(assetId, consumer, kyc)); } private void Handle(SessionFinishedMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: sessionfinished"); ConsumerService.FinishSessionAsync(message.Session, this).ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } private void Handle(DepositApprovalConfirmedMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: depositapprovalconfirmed"); ConsumerService.ConfirmDepositApprovalAsync(message.DataAssetId, message.Consumer) .ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } private void Handle(DepositApprovalRejectedMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: depositapprovalrejected"); ConsumerService.RejectDepositApprovalAsync(message.DataAssetId, message.Consumer) .ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } public async Task<IReadOnlyList<DepositApproval>> SendGetDepositApprovals(Keccak? dataAssetId = null, bool onlyPending = false, CancellationToken? token = null) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM sending: getdepositapprovals"); var cancellationToken = token ?? CancellationToken.None; var message = new GetDepositApprovalsMessage(dataAssetId, onlyPending); var request = new Request<GetDepositApprovalsMessage, DepositApproval[]>(message); DepositApprovalsRequests.Add(request, cancellationToken); Send(request.Message); var task = request.CompletionSource.Task; var firstTask = await Task.WhenAny(task, Task.Delay(Timeouts.NdmDepositApproval, cancellationToken)); if (firstTask.IsCanceled) { cancellationToken.ThrowIfCancellationRequested(); } if (firstTask != task) { throw new TimeoutException($"{Session.RemoteNodeId} Request timeout in " + $"{nameof(GetDepositApprovalsMessage)}"); } return task.Result; } private void Handle(DepositApprovalsMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: depositapprovals"); var success = DepositApprovalsRequests.TryTake(out var request); if (!success) { throw new SubprotocolException("Received a reponse for which no request has been made."); } request?.CompletionSource.SetResult(message.DepositApprovals); } private void Handle(DataStreamEnabledMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: datastreamenabled"); ConsumerService.SetEnabledDataStreamAsync(message.DepositId, message.Client, message.Args) .ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } private void Handle(DataStreamDisabledMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: datastreamdisabled"); ConsumerService.SetDisabledDataStreamAsync(message.DepositId, message.Client) .ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } private void Handle(EthRequestedMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: ethrequested"); bool success = RequestEthRequests.TryTake(out var request); if (!success) { throw new SubprotocolException("Received a reponse for which no request has been made."); } request?.CompletionSource.SetResult(message.Response); } private void Handle(GraceUnitsExceededMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: graceunitsexceeded"); ConsumerService.HandleGraceUnitsExceededAsync(message.DepositId, message.ConsumedUnits, message.GraceUnits) .ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } private void Handle(DataAssetDataMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: dataassetdata"); Metrics.ReceivedData++; ConsumerService.SetUnitsAsync(message.DepositId, message.ConsumedUnits).ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); NdmConsumerChannelManager.PublishAsync(message.DepositId, message.Client, message.Data) .ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } private void Handle(InvalidDataMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: invaliddata"); ConsumerService.HandleInvalidDataAsync(message.DepositId, message.Reason).ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); return; } if (Logger.IsTrace) Logger.Trace($"Received invalid data for deposit: '{message.DepositId}', reason: {message.Reason}"); }); } private void Handle(DataAssetsMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: dataassets"); ConsumerService.AddDiscoveredDataAssets(message.DataAssets, this); } private void Handle(DataAvailabilityMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: dataavailability"); ConsumerService.SetDataAvailabilityAsync(message.DepositId, message.DataAvailability).ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } public async Task<FaucetResponse> SendRequestEthAsync(Address address, UInt256 value, CancellationToken? token = null) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM sending: requesteth"); var cancellationToken = token ?? CancellationToken.None; var message = new RequestEthMessage(address, value); var request = new Request<RequestEthMessage, FaucetResponse>(message); RequestEthRequests.Add(request, cancellationToken); Send(request.Message); var task = request.CompletionSource.Task; var firstTask = await Task.WhenAny(task, Task.Delay(Timeouts.NdmEthRequest, cancellationToken)); if (firstTask.IsCanceled) { cancellationToken.ThrowIfCancellationRequested(); } if (firstTask != task) { throw new TimeoutException($"{Session.RemoteNodeId} Request timeout in {nameof(RequestEthMessage)}"); } return task.Result; } private void Handle(RequestDataDeliveryReceiptMessage message) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM received: requestdatadeliveryreceipt"); ConsumerService.SendDataDeliveryReceiptAsync(message.Request).ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } public void SendDataDeliveryReceipt(Keccak depositId, DataDeliveryReceipt receipt) { if (Logger.IsTrace) Logger.Trace($"{Session.RemoteNodeId} NDM sending: datadeliveryreceipt"); Send(new DataDeliveryReceiptMessage(depositId, receipt)); } public override void Dispose() { if (Interlocked.Exchange(ref DisposedValue, 1) == 1) { return; } try { DepositApprovalsRequests?.CompleteAdding(); DepositApprovalsRequests?.Dispose(); } catch (ObjectDisposedException) { } ConsumerService.FinishSessionsAsync(this).ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); } protected class Request<TMsg, TResult> { public Request(TMsg message) { CompletionSource = new TaskCompletionSource<TResult>(); Message = message; } public TMsg Message { get; } public TaskCompletionSource<TResult> CompletionSource { get; } } } }
1
24,560
Use TryGetValue instead of catching exception
NethermindEth-nethermind
.cs
@@ -4,12 +4,12 @@ feature 'Admin manages mentors' do scenario 'creating a new mentor' do user = create(:admin) - visit admin_path(as: user) + visit admin_root_path(as: user) click_link 'Mentors' - click_link 'Add new' + click_link 'New mentor' select(user.name, from: 'User') - click_button 'Save' + click_button 'Create Mentor' - expect(page).to have_content('Mentor successfully created') + expect(page).to have_content('Mentor was successfully created') end end
1
require "rails_helper" feature 'Admin manages mentors' do scenario 'creating a new mentor' do user = create(:admin) visit admin_path(as: user) click_link 'Mentors' click_link 'Add new' select(user.name, from: 'User') click_button 'Save' expect(page).to have_content('Mentor successfully created') end end
1
15,540
Prefer double-quoted strings unless you need single quotes to avoid extra backslashes for escaping.
thoughtbot-upcase
rb
@@ -61,6 +61,17 @@ namespace fastrtps { namespace rtps { +static void add_statistics_sent_submessage( + CacheChange_t* change, + size_t num_locators) +{ + static_cast<void>(change); + static_cast<void>(num_locators); + +#ifdef FASTDDS_STATISTICS + change->num_sent_submessages += num_locators; +#endif // ifdef FASTDDS_STATISTICS +} /** * Loops over all the readers in the vector, applying the given routine.
1
// Copyright 2016 Proyectos y Sistemas de Mantenimiento SL (eProsima). // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @file StatefulWriter.cpp * */ #include <fastdds/rtps/writer/StatefulWriter.h> #include <fastdds/rtps/writer/WriterListener.h> #include <fastdds/rtps/writer/IReaderDataFilter.hpp> #include <fastdds/rtps/writer/ReaderProxy.h> #include <fastdds/rtps/resources/AsyncWriterThread.h> #include <rtps/participant/RTPSParticipantImpl.h> #include <rtps/flowcontrol/FlowController.h> #include <rtps/history/BasicPayloadPool.hpp> #include <rtps/DataSharing/DataSharingPayloadPool.hpp> #include <rtps/DataSharing/DataSharingNotifier.hpp> #include <rtps/DataSharing/WriterPool.hpp> #include <fastdds/rtps/messages/RTPSMessageCreator.h> #include <fastdds/rtps/messages/RTPSMessageGroup.h> #include <fastdds/rtps/participant/RTPSParticipant.h> #include <fastdds/rtps/resources/ResourceEvent.h> #include <fastdds/rtps/resources/TimedEvent.h> #include <fastdds/rtps/history/WriterHistory.h> #include <fastdds/dds/log/Log.hpp> #include <fastrtps/utils/TimeConversion.h> #include <fastdds/rtps/builtin/BuiltinProtocols.h> #include <fastdds/rtps/builtin/liveliness/WLP.h> #include <rtps/RTPSDomainImpl.hpp> #include <rtps/history/CacheChangePool.h> #include <rtps/messages/RTPSGapBuilder.hpp> #include <rtps/writer/RTPSWriterCollector.h> #include "../builtin/discovery/database/DiscoveryDataBase.hpp" #include <mutex> #include <vector> #include <stdexcept> namespace eprosima { namespace fastrtps { namespace rtps { /** * Loops over all the readers in the vector, applying the given routine. * The loop continues until the result of the routine is true for any reader * or all readers have been processes. * The returned value is true if the routine returned true at any point, * or false otherwise. */ bool for_matched_readers( ResourceLimitedVector<ReaderProxy*>& reader_vector_1, std::function<bool(ReaderProxy*)> fun) { for (ReaderProxy* remote_reader : reader_vector_1) { if (fun(remote_reader)) { return true; } } return false; } bool for_matched_readers( ResourceLimitedVector<ReaderProxy*>& reader_vector_1, ResourceLimitedVector<ReaderProxy*>& reader_vector_2, std::function<bool(ReaderProxy*)> fun) { if (for_matched_readers(reader_vector_1, fun)) { return true; } return for_matched_readers(reader_vector_2, fun); } bool for_matched_readers( ResourceLimitedVector<ReaderProxy*>& reader_vector_1, ResourceLimitedVector<ReaderProxy*>& reader_vector_2, ResourceLimitedVector<ReaderProxy*>& reader_vector_3, std::function<bool(ReaderProxy*)> fun) { if (for_matched_readers(reader_vector_1, reader_vector_2, fun)) { return true; } return for_matched_readers(reader_vector_3, fun); } /** * Loops over all the readers in the vector, applying the given routine. * The loop continues until the result of the routine is true for any reader * or all readers have been processes. * The returned value is true if the routine returned true at any point, * or false otherwise. * * const version */ bool for_matched_readers( const ResourceLimitedVector<ReaderProxy*>& reader_vector_1, std::function<bool(const ReaderProxy*)> fun) { for (const ReaderProxy* remote_reader : reader_vector_1) { if (fun(remote_reader)) { return true; } } return false; } bool for_matched_readers( const ResourceLimitedVector<ReaderProxy*>& reader_vector_1, const ResourceLimitedVector<ReaderProxy*>& reader_vector_2, std::function<bool(const ReaderProxy*)> fun) { if (for_matched_readers(reader_vector_1, fun)) { return true; } return for_matched_readers(reader_vector_2, fun); } bool for_matched_readers( const ResourceLimitedVector<ReaderProxy*>& reader_vector_1, const ResourceLimitedVector<ReaderProxy*>& reader_vector_2, const ResourceLimitedVector<ReaderProxy*>& reader_vector_3, std::function<bool(const ReaderProxy*)> fun) { if (for_matched_readers(reader_vector_1, reader_vector_2, fun)) { return true; } return for_matched_readers(reader_vector_3, fun); } template<typename UnaryFun> bool send_data_or_fragments( RTPSMessageGroup& group, CacheChange_t* change, bool inline_qos, UnaryFun sent_fun) { bool sent_ok = true; if (change->getFragmentSize() > 0) { for (FragmentNumber_t frag = 1; frag <= change->getFragmentCount(); frag++) { sent_ok &= group.add_data_frag(*change, frag, inline_qos); if (sent_ok) { sent_fun(frag); } else { logError(RTPS_WRITER, "Error sending fragment (" << change->sequenceNumber << ", " << frag << ")"); break; } } } else { sent_ok = group.add_data(*change, inline_qos); if (sent_ok) { sent_fun(0); } else { logError(RTPS_WRITER, "Error sending change " << change->sequenceNumber); } } return sent_ok; } static void null_sent_fun( FragmentNumber_t /*frag*/) { } using namespace std::chrono; StatefulWriter::StatefulWriter( RTPSParticipantImpl* pimpl, const GUID_t& guid, const WriterAttributes& att, WriterHistory* history, WriterListener* listener) : RTPSWriter(pimpl, guid, att, history, listener) , periodic_hb_event_(nullptr) , nack_response_event_(nullptr) , ack_event_(nullptr) , m_heartbeatCount(0) , m_times(att.times) , matched_remote_readers_(att.matched_readers_allocation) , matched_readers_pool_(att.matched_readers_allocation) , next_all_acked_notify_sequence_(0, 1) , all_acked_(false) , may_remove_change_cond_() , may_remove_change_(0) , disable_heartbeat_piggyback_(att.disable_heartbeat_piggyback) , disable_positive_acks_(att.disable_positive_acks) , keep_duration_us_(att.keep_duration.to_ns() * 1e-3) , last_sequence_number_() , biggest_removed_sequence_number_() , sendBufferSize_(pimpl->get_min_network_send_buffer_size()) , currentUsageSendBufferSize_(static_cast<int32_t>(pimpl->get_min_network_send_buffer_size())) , m_controllers() , matched_local_readers_(att.matched_readers_allocation) , matched_datasharing_readers_(att.matched_readers_allocation) { init(pimpl, att); } StatefulWriter::StatefulWriter( RTPSParticipantImpl* pimpl, const GUID_t& guid, const WriterAttributes& att, const std::shared_ptr<IPayloadPool>& payload_pool, WriterHistory* history, WriterListener* listener) : RTPSWriter(pimpl, guid, att, payload_pool, history, listener) , periodic_hb_event_(nullptr) , nack_response_event_(nullptr) , ack_event_(nullptr) , m_heartbeatCount(0) , m_times(att.times) , matched_remote_readers_(att.matched_readers_allocation) , matched_readers_pool_(att.matched_readers_allocation) , next_all_acked_notify_sequence_(0, 1) , all_acked_(false) , may_remove_change_cond_() , may_remove_change_(0) , disable_heartbeat_piggyback_(att.disable_heartbeat_piggyback) , disable_positive_acks_(att.disable_positive_acks) , keep_duration_us_(att.keep_duration.to_ns() * 1e-3) , last_sequence_number_() , biggest_removed_sequence_number_() , sendBufferSize_(pimpl->get_min_network_send_buffer_size()) , currentUsageSendBufferSize_(static_cast<int32_t>(pimpl->get_min_network_send_buffer_size())) , m_controllers() , matched_local_readers_(att.matched_readers_allocation) , matched_datasharing_readers_(att.matched_readers_allocation) { init(pimpl, att); } StatefulWriter::StatefulWriter( RTPSParticipantImpl* pimpl, const GUID_t& guid, const WriterAttributes& att, const std::shared_ptr<IPayloadPool>& payload_pool, const std::shared_ptr<IChangePool>& change_pool, WriterHistory* hist, WriterListener* listen) : RTPSWriter(pimpl, guid, att, payload_pool, change_pool, hist, listen) , periodic_hb_event_(nullptr) , nack_response_event_(nullptr) , ack_event_(nullptr) , m_heartbeatCount(0) , m_times(att.times) , matched_remote_readers_(att.matched_readers_allocation) , matched_readers_pool_(att.matched_readers_allocation) , next_all_acked_notify_sequence_(0, 1) , all_acked_(false) , may_remove_change_cond_() , may_remove_change_(0) , disable_heartbeat_piggyback_(att.disable_heartbeat_piggyback) , disable_positive_acks_(att.disable_positive_acks) , keep_duration_us_(att.keep_duration.to_ns() * 1e-3) , last_sequence_number_() , biggest_removed_sequence_number_() , sendBufferSize_(pimpl->get_min_network_send_buffer_size()) , currentUsageSendBufferSize_(static_cast<int32_t>(pimpl->get_min_network_send_buffer_size())) , m_controllers() , matched_local_readers_(att.matched_readers_allocation) , matched_datasharing_readers_(att.matched_readers_allocation) { init(pimpl, att); } void StatefulWriter::init( RTPSParticipantImpl* pimpl, const WriterAttributes& att) { const RTPSParticipantAttributes& part_att = pimpl->getRTPSParticipantAttributes(); auto push_mode = PropertyPolicyHelper::find_property(att.endpoint.properties, "fastdds.push_mode"); m_pushMode = !((nullptr != push_mode) && ("false" == *push_mode)); periodic_hb_event_ = new TimedEvent( pimpl->getEventResource(), [&]() -> bool { return send_periodic_heartbeat(); }, TimeConv::Time_t2MilliSecondsDouble(m_times.heartbeatPeriod)); nack_response_event_ = new TimedEvent( pimpl->getEventResource(), [&]() -> bool { perform_nack_response(); return false; }, TimeConv::Time_t2MilliSecondsDouble(m_times.nackResponseDelay)); if (disable_positive_acks_) { ack_event_ = new TimedEvent( pimpl->getEventResource(), [&]() -> bool { return ack_timer_expired(); }, att.keep_duration.to_ns() * 1e-6); // in milliseconds } for (size_t n = 0; n < att.matched_readers_allocation.initial; ++n) { matched_readers_pool_.push_back(new ReaderProxy(m_times, part_att.allocation.locators, this)); } } StatefulWriter::~StatefulWriter() { logInfo(RTPS_WRITER, "StatefulWriter destructor"); for (std::unique_ptr<FlowController>& controller : m_controllers) { controller->disable(); } if (disable_positive_acks_) { delete(ack_event_); ack_event_ = nullptr; } if (nack_response_event_ != nullptr) { delete(nack_response_event_); nack_response_event_ = nullptr; } mp_RTPSParticipant->async_thread().unregister_writer(this); // After unregistering writer from AsyncWriterThread, delete all flow_controllers because they register the writer in // the AsyncWriterThread. m_controllers.clear(); // Stop all active proxies and pass them to the pool { std::lock_guard<RecursiveTimedMutex> guard(mp_mutex); while (!matched_remote_readers_.empty()) { ReaderProxy* remote_reader = matched_remote_readers_.back(); matched_remote_readers_.pop_back(); remote_reader->stop(); matched_readers_pool_.push_back(remote_reader); } while (!matched_local_readers_.empty()) { ReaderProxy* remote_reader = matched_local_readers_.back(); matched_local_readers_.pop_back(); remote_reader->stop(); matched_readers_pool_.push_back(remote_reader); } while (!matched_datasharing_readers_.empty()) { ReaderProxy* remote_reader = matched_datasharing_readers_.back(); matched_datasharing_readers_.pop_back(); remote_reader->stop(); matched_readers_pool_.push_back(remote_reader); } } // Destroy heartbeat event if (periodic_hb_event_ != nullptr) { delete(periodic_hb_event_); periodic_hb_event_ = nullptr; } // Delete all proxies in the pool for (ReaderProxy* remote_reader : matched_readers_pool_) { delete(remote_reader); } // TODO [ILG] Should we force this on all cases? if (is_datasharing_compatible()) { //Release payloads orderly for (std::vector<CacheChange_t*>::iterator chit = mp_history->changesBegin(); chit != mp_history->changesEnd(); ++chit) { IPayloadPool* pool = (*chit)->payload_owner(); if (pool) { pool->release_payload(**chit); } } } } /* * CHANGE-RELATED METHODS */ void StatefulWriter::prepare_datasharing_delivery( CacheChange_t* change) { auto pool = std::dynamic_pointer_cast<WriterPool>(payload_pool_); assert (pool != nullptr); pool->add_to_shared_history(change); logInfo(RTPS_WRITER, "Notifying readers of cache change with SN " << change->sequenceNumber); } void StatefulWriter::async_delivery( CacheChange_t* change, const std::chrono::time_point<std::chrono::steady_clock>& max_blocking_time) { bool should_wake_up = false; for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [this, &should_wake_up, &change, &max_blocking_time](ReaderProxy* reader) { ChangeForReader_t changeForReader(change); if (m_pushMode || !reader->is_reliable() || reader->is_local_reader()) { changeForReader.setStatus(UNSENT); should_wake_up = true; } else { changeForReader.setStatus(UNACKNOWLEDGED); } changeForReader.setRelevance(reader->rtps_is_relevant(change)); reader->add_change(changeForReader, false, max_blocking_time); return false; } ); if (should_wake_up) { mp_RTPSParticipant->async_thread().wake_up(this, max_blocking_time); } else { periodic_hb_event_->restart_timer(max_blocking_time); } } void StatefulWriter::sync_delivery( CacheChange_t* change, const std::chrono::time_point<std::chrono::steady_clock>& max_blocking_time) { //TODO(Ricardo) Temporal. bool expectsInlineQos = false; bool should_be_sent = false; locator_selector_.reset(false); // First step is to add the new CacheChange_t to all reader proxies. // It has to be done before sending, because if a timeout is caught, we will not include the // CacheChange_t in some reader proxies. for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [this, &change, &max_blocking_time, &expectsInlineQos, &should_be_sent](ReaderProxy* reader) { ChangeForReader_t changeForReader(change); bool send_to_this_reader = false; if (!reader->is_reliable()) { changeForReader.setStatus(ACKNOWLEDGED); send_to_this_reader = true; } else { if (m_pushMode || reader->is_local_reader()) { changeForReader.setStatus(UNDERWAY); send_to_this_reader = true; } else { changeForReader.setStatus(UNACKNOWLEDGED); } } changeForReader.setRelevance(reader->rtps_is_relevant(change)); reader->add_change(changeForReader, true, max_blocking_time); expectsInlineQos |= reader->expects_inline_qos(); if (send_to_this_reader) { if (reader->is_local_reader()) { intraprocess_heartbeat(reader, false); bool delivered = !changeForReader.isRelevant() || intraprocess_delivery(change, reader); reader->set_change_to_status( change->sequenceNumber, delivered ? ACKNOWLEDGED : UNDERWAY, false); } else if (reader->is_datasharing_reader()) { reader->datasharing_notify(); reader->set_change_to_status(change->sequenceNumber, UNDERWAY, false); } else { should_be_sent = true; locator_selector_.enable(reader->guid()); } } return false; } ); //At this point we are sure all information was stored. We now can send data. if (!should_be_sent) { if (getMatchedReadersSize() > 0) { periodic_hb_event_->restart_timer(max_blocking_time); } } else { try { if (!m_separateSendingEnabled) { if (locator_selector_.selected_size() > 0) { NetworkFactory& network = mp_RTPSParticipant->network_factory(); network.select_locators(locator_selector_); compute_selected_guids(); RTPSMessageGroup group(mp_RTPSParticipant, this, *this, max_blocking_time); auto sent_fun = [this, change]( FragmentNumber_t frag) { if (frag > 0) { for (ReaderProxy* it : matched_remote_readers_) { bool allFragmentsSent = false; it->mark_fragment_as_sent_for_change( change->sequenceNumber, frag, allFragmentsSent); } } }; send_data_or_fragments(group, change, expectsInlineQos, sent_fun); send_heartbeat_nts_(all_remote_readers_.size(), group, disable_positive_acks_); } } else { for (ReaderProxy* it : matched_remote_readers_) { RTPSMessageGroup group(mp_RTPSParticipant, this, it->message_sender(), max_blocking_time); if (change->getFragmentCount() > 0) { logError(RTPS_WRITER, "Cannot send large messages on separate sending mode"); } else { if (!group.add_data(*change, it->expects_inline_qos())) { logError(RTPS_WRITER, "Error sending change " << change->sequenceNumber); } } uint32_t last_processed = 0; send_heartbeat_piggyback_nts_(it, group, last_processed); } } periodic_hb_event_->restart_timer(max_blocking_time); if (disable_positive_acks_ && last_sequence_number_ == SequenceNumber_t()) { last_sequence_number_ = change->sequenceNumber; } } catch (const RTPSMessageGroup::timeout&) { logError(RTPS_WRITER, "Max blocking time reached"); } } if (!m_pushMode) { NetworkFactory& network = mp_RTPSParticipant->network_factory(); locator_selector_.reset(true); network.select_locators(locator_selector_); compute_selected_guids(); } check_acked_status(); } void StatefulWriter::unsent_change_added_to_history( CacheChange_t* change, const std::chrono::time_point<std::chrono::steady_clock>& max_blocking_time) { std::lock_guard<RecursiveTimedMutex> guard(mp_mutex); if (liveliness_lease_duration_ < c_TimeInfinite) { mp_RTPSParticipant->wlp()->assert_liveliness( getGuid(), liveliness_kind_, liveliness_lease_duration_); } // Prepare the metadata for datasharing if (is_datasharing_compatible()) { prepare_datasharing_delivery(change); } // Now for the rest of readers if (!matched_remote_readers_.empty() || !matched_datasharing_readers_.empty() || !matched_local_readers_.empty()) { if (!isAsync()) { sync_delivery(change, max_blocking_time); } else { async_delivery(change, max_blocking_time); } if (disable_positive_acks_) { auto source_timestamp = system_clock::time_point() + nanoseconds(change->sourceTimestamp.to_ns()); auto now = system_clock::now(); auto interval = source_timestamp - now + keep_duration_us_; assert(interval.count() >= 0); ack_event_->update_interval_millisec((double)duration_cast<milliseconds>(interval).count()); ack_event_->restart_timer(max_blocking_time); } } else { logInfo(RTPS_WRITER, "No reader proxy to add change."); check_acked_status(); } } bool StatefulWriter::intraprocess_delivery( CacheChange_t* change, ReaderProxy* reader_proxy) { RTPSReader* reader = reader_proxy->local_reader(); if (reader) { if (change->write_params.related_sample_identity() != SampleIdentity::unknown()) { change->write_params.sample_identity(change->write_params.related_sample_identity()); } return reader->processDataMsg(change); } return false; } bool StatefulWriter::intraprocess_gap( ReaderProxy* reader_proxy, const SequenceNumber_t& seq_num) { RTPSReader* reader = reader_proxy->local_reader(); if (reader) { return reader->processGapMsg(m_guid, seq_num, SequenceNumberSet_t(seq_num + 1)); } return false; } bool StatefulWriter::intraprocess_heartbeat( ReaderProxy* reader_proxy, bool liveliness) { bool returned_value = false; std::lock_guard<RecursiveTimedMutex> guardW(mp_mutex); RTPSReader* reader = RTPSDomainImpl::find_local_reader(reader_proxy->guid()); if (reader) { SequenceNumber_t first_seq = get_seq_num_min(); SequenceNumber_t last_seq = get_seq_num_max(); if (first_seq == c_SequenceNumber_Unknown || last_seq == c_SequenceNumber_Unknown) { if (liveliness) { first_seq = next_sequence_number(); last_seq = first_seq - 1; } } if ((first_seq != c_SequenceNumber_Unknown && last_seq != c_SequenceNumber_Unknown) && (liveliness || reader_proxy->has_changes())) { incrementHBCount(); if (true == (returned_value = reader->processHeartbeatMsg(m_guid, m_heartbeatCount, first_seq, last_seq, true, liveliness))) { if (reader_proxy->durability_kind() < TRANSIENT_LOCAL || this->getAttributes().durabilityKind < TRANSIENT_LOCAL) { SequenceNumber_t first_relevant = reader_proxy->first_relevant_sequence_number(); if (first_seq < first_relevant) { reader->processGapMsg(m_guid, first_seq, SequenceNumberSet_t(first_relevant)); } } } } } return returned_value; } bool StatefulWriter::change_removed_by_history( CacheChange_t* a_change) { SequenceNumber_t sequence_number = a_change->sequenceNumber; std::lock_guard<RecursiveTimedMutex> guard(mp_mutex); logInfo(RTPS_WRITER, "Change " << sequence_number << " to be removed."); // Take note of biggest removed sequence number to improve sending of gaps if (sequence_number > biggest_removed_sequence_number_) { biggest_removed_sequence_number_ = sequence_number; } // Invalidate CacheChange pointer in ReaderProxies. for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [sequence_number](ReaderProxy* reader) { reader->change_has_been_removed(sequence_number); return false; } ); // remove from datasharing pool history if (is_datasharing_compatible()) { auto pool = std::dynamic_pointer_cast<WriterPool>(payload_pool_); assert (pool != nullptr); pool->remove_from_shared_history(a_change); logInfo(RTPS_WRITER, "Removing shared cache change with SN " << a_change->sequenceNumber); } may_remove_change_ = 2; may_remove_change_cond_.notify_one(); return true; } void StatefulWriter::send_any_unsent_changes() { std::lock_guard<RecursiveTimedMutex> guard(mp_mutex); bool activateHeartbeatPeriod = false; SequenceNumber_t max_sequence = mp_history->next_sequence_number(); if (mp_history->getHistorySize() == 0 || getMatchedReadersSize() == 0) { send_heartbeat_to_all_readers(); } else if (m_separateSendingEnabled) { send_changes_separatedly(max_sequence, activateHeartbeatPeriod); } else { bool no_flow_controllers = m_controllers.empty() && mp_RTPSParticipant->getFlowControllers().empty(); if (no_flow_controllers || !there_are_remote_readers_) { send_all_unsent_changes(max_sequence, activateHeartbeatPeriod); } else { send_unsent_changes_with_flow_control(max_sequence, activateHeartbeatPeriod); } } if (activateHeartbeatPeriod) { periodic_hb_event_->restart_timer(); } // On VOLATILE writers, remove auto-acked (best effort readers) changes check_acked_status(); logInfo(RTPS_WRITER, "Finish sending unsent changes"); } void StatefulWriter::send_heartbeat_to_all_readers() { // This version is called from send_any_unsent_changes when any of the following conditions is satisfied: // a) history is empty // b) there are no matched readers // It may also be called from send_periodic_heartbeat if (m_separateSendingEnabled) { for (ReaderProxy* reader : matched_remote_readers_) { send_heartbeat_to_nts(*reader); } } else { for (ReaderProxy* reader : matched_local_readers_) { intraprocess_heartbeat(reader); } for (ReaderProxy* reader : matched_datasharing_readers_) { reader->datasharing_notify(); } if (there_are_remote_readers_) { RTPSMessageGroup group(mp_RTPSParticipant, this, *this); send_heartbeat_nts_(all_remote_readers_.size(), group, disable_positive_acks_); } } } void StatefulWriter::send_changes_separatedly( SequenceNumber_t max_sequence, bool& activateHeartbeatPeriod) { // This version is called when all of the following conditions are satisfied: // a) history is not empty // b) there is at least one matched reader // c) separate sending is enabled // Process datasharing then if (there_are_datasharing_readers_) { send_all_datasharing_changes(max_sequence); } for (ReaderProxy* remoteReader : matched_local_readers_) { // If there are no changes for this reader, simply jump to the next one if (!remoteReader->has_changes()) { continue; } SequenceNumber_t max_ack_seq = SequenceNumber_t::unknown(); auto unsent_change_process = [&](const SequenceNumber_t& seqNum, const ChangeForReader_t* unsentChange) { if (unsentChange != nullptr && unsentChange->isRelevant() && unsentChange->isValid()) { if (intraprocess_delivery(unsentChange->getChange(), remoteReader)) { max_ack_seq = seqNum; } else { remoteReader->set_change_to_status(seqNum, UNDERWAY, false); } } else { if (intraprocess_gap(remoteReader, seqNum)) { max_ack_seq = seqNum; } else { remoteReader->set_change_to_status(seqNum, UNDERWAY, true); } } }; remoteReader->for_each_unsent_change(max_sequence, unsent_change_process); if (max_ack_seq != SequenceNumber_t::unknown()) { remoteReader->acked_changes_set(max_ack_seq + 1); } } for (ReaderProxy* remoteReader : matched_remote_readers_) { // If there are no changes for this reader, simply jump to the next one if (!remoteReader->has_changes()) { continue; } // Specific destination message group RTPSMessageGroup group(mp_RTPSParticipant, this, remoteReader->message_sender()); SequenceNumber_t min_history_seq = get_seq_num_min(); if (remoteReader->is_reliable()) { // Add a HEARTBEAT to the datagram with final flag set to false. This way, the reader must send an // ACKNACK message for each DATA that it receives. send_heartbeat_nts_(1u, group, false); RTPSGapBuilder gaps(group, remoteReader->guid()); uint32_t lastBytesProcessed = 0; auto sent_fun = [this, remoteReader, &lastBytesProcessed, &group]( FragmentNumber_t /*frag*/) { // Heartbeat piggyback. send_heartbeat_piggyback_nts_(remoteReader, group, lastBytesProcessed); }; auto unsent_change_process = [&](const SequenceNumber_t& seqNum, const ChangeForReader_t* unsentChange) { if (unsentChange != nullptr && unsentChange->isRelevant() && unsentChange->isValid()) { bool sent_ok = send_data_or_fragments( group, unsentChange->getChange(), remoteReader->expects_inline_qos(), sent_fun); if (sent_ok) { remoteReader->set_change_to_status(seqNum, UNDERWAY, true); activateHeartbeatPeriod = true; } } else { if (seqNum >= min_history_seq) { gaps.add(seqNum); } remoteReader->set_change_to_status(seqNum, UNDERWAY, true); } }; remoteReader->for_each_unsent_change(max_sequence, unsent_change_process); } else { SequenceNumber_t max_ack_seq = SequenceNumber_t::unknown(); auto unsent_change_process = [&](const SequenceNumber_t& seqNum, const ChangeForReader_t* unsentChange) { if (unsentChange != nullptr && unsentChange->isRelevant() && unsentChange->isValid()) { bool sent_ok = send_data_or_fragments( group, unsentChange->getChange(), remoteReader->expects_inline_qos(), null_sent_fun); if (sent_ok) { max_ack_seq = seqNum; } } else { max_ack_seq = seqNum; } }; remoteReader->for_each_unsent_change(max_sequence, unsent_change_process); if (max_ack_seq != SequenceNumber_t::unknown()) { remoteReader->acked_changes_set(max_ack_seq + 1); } } } // Readers loop } void StatefulWriter::send_all_intraprocess_changes( SequenceNumber_t max_sequence) { for (ReaderProxy* remoteReader : matched_local_readers_) { intraprocess_heartbeat(remoteReader, false); SequenceNumber_t max_ack_seq = SequenceNumber_t::unknown(); auto unsent_change_process = [&](const SequenceNumber_t& seq_num, const ChangeForReader_t* unsentChange) { if (unsentChange != nullptr && unsentChange->isValid()) { if (intraprocess_delivery(unsentChange->getChange(), remoteReader)) { max_ack_seq = seq_num; } else { remoteReader->set_change_to_status(seq_num, UNDERWAY, false); } } else { if (intraprocess_gap(remoteReader, seq_num)) { max_ack_seq = seq_num; } else { remoteReader->set_change_to_status(seq_num, UNDERWAY, true); } } }; remoteReader->for_each_unsent_change(max_sequence, unsent_change_process); if (max_ack_seq != SequenceNumber_t::unknown()) { remoteReader->acked_changes_set(max_ack_seq + 1); } } } void StatefulWriter::send_all_datasharing_changes( SequenceNumber_t max_sequence) { for (ReaderProxy* remoteReader : matched_datasharing_readers_) { SequenceNumber_t max_ack_seq = SequenceNumber_t::unknown(); auto unsent_change_process = [&](const SequenceNumber_t& seq_num, const ChangeForReader_t* /*unsentChange*/) { if (!remoteReader->is_reliable()) { max_ack_seq = seq_num; } else { remoteReader->set_change_to_status(seq_num, UNACKNOWLEDGED, false); } }; remoteReader->for_each_unsent_change(max_sequence, unsent_change_process); if (max_ack_seq != SequenceNumber_t::unknown()) { remoteReader->acked_changes_set(max_ack_seq + 1); } // Finally notify the reader it has some data to read remoteReader->datasharing_notify(); } } void StatefulWriter::send_all_unsent_changes( SequenceNumber_t max_sequence, bool& activateHeartbeatPeriod) { // This version is called when all of the following conditions are satisfied: // a) history is not empty // b) there is at least one matched reader // c) separate sending is disabled // d) either all matched readers are local or no flow controllers are configured // Process intraprocess first if (there_are_local_readers_) { send_all_intraprocess_changes(max_sequence); } // Process datasharing then if (there_are_datasharing_readers_) { send_all_datasharing_changes(max_sequence); } if (there_are_remote_readers_) { static constexpr uint32_t implicit_flow_controller_size = RTPSMessageGroup::get_max_fragment_payload_size(); NetworkFactory& network = mp_RTPSParticipant->network_factory(); locator_selector_.reset(true); network.select_locators(locator_selector_); compute_selected_guids(); bool acknack_required = next_all_acked_notify_sequence_ < get_seq_num_min(); RTPSMessageGroup group(mp_RTPSParticipant, this, *this); acknack_required |= send_hole_gaps_to_group(group); uint32_t lastBytesProcessed = 0; auto sent_fun = [this, &lastBytesProcessed, &group]( FragmentNumber_t /*frag*/) { // Heartbeat piggyback. send_heartbeat_piggyback_nts_(nullptr, group, lastBytesProcessed); }; RTPSGapBuilder gap_builder(group); uint32_t total_sent_size = 0; History::iterator cit; for (cit = mp_history->changesBegin(); cit != mp_history->changesEnd() && (total_sent_size < implicit_flow_controller_size); cit++) { SequenceNumber_t seq = (*cit)->sequenceNumber; // Deselect all entries on the locator selector (we will only activate the // readers for which this sequence number is pending) locator_selector_.reset(false); bool is_irrelevant = true; // Will turn to false if change is relevant for at least one reader bool should_be_sent = false; bool inline_qos = false; for (ReaderProxy* remoteReader : matched_remote_readers_) { if (remoteReader->change_is_unsent(seq, is_irrelevant)) { should_be_sent = true; locator_selector_.enable(remoteReader->guid()); inline_qos |= remoteReader->expects_inline_qos(); if (is_irrelevant) { remoteReader->set_change_to_status(seq, UNDERWAY, true); } } } if (locator_selector_.state_has_changed()) { gap_builder.flush(); group.flush_and_reset(); network.select_locators(locator_selector_); compute_selected_guids(); } if (should_be_sent) { if (is_irrelevant) { gap_builder.add(seq); } else { bool sent_ok = send_data_or_fragments(group, *cit, inline_qos, sent_fun); if (sent_ok) { total_sent_size += (*cit)->serializedPayload.length; bool tmp_bool = false; for (ReaderProxy* remoteReader : matched_remote_readers_) { if (remoteReader->change_is_unsent(seq, tmp_bool)) { remoteReader->set_change_to_status(seq, UNDERWAY, true); if (remoteReader->is_reliable()) { activateHeartbeatPeriod = true; } } } } } } } // Heartbeat piggyback. if (acknack_required) { send_heartbeat_nts_(all_remote_readers_.size(), group, disable_positive_acks_); } group.flush_and_reset(); locator_selector_.reset(true); network.select_locators(locator_selector_); compute_selected_guids(); if (cit != mp_history->changesEnd()) { mp_RTPSParticipant->async_thread().wake_up(this); } } } void StatefulWriter::send_unsent_changes_with_flow_control( SequenceNumber_t max_sequence, bool& activateHeartbeatPeriod) { // This version is called when all of the following conditions are satisfied: // a) history is not empty // b) there is at least one matched reader // c) separate sending is disabled // d) there is at least one remote matched reader and flow controllers are configured // Process intraprocess first if (there_are_local_readers_) { send_all_intraprocess_changes(max_sequence); } // Process datasharing then if (there_are_datasharing_readers_) { send_all_datasharing_changes(max_sequence); } // From here onwards, only remote readers should be accessed RTPSWriterCollector<ReaderProxy*> relevantChanges; bool heartbeat_has_been_sent = false; NetworkFactory& network = mp_RTPSParticipant->network_factory(); locator_selector_.reset(true); network.select_locators(locator_selector_); compute_selected_guids(); RTPSMessageGroup group(mp_RTPSParticipant, this, *this); // GAP for holes in history sent to the readers that need it send_hole_gaps_to_group(group); // Reset the state of locator_selector to select all readers group.flush_and_reset(); locator_selector_.reset(true); network.select_locators(locator_selector_); compute_selected_guids(); for (ReaderProxy* remoteReader : matched_remote_readers_) { if (!heartbeat_has_been_sent && remoteReader->are_there_gaps()) { send_heartbeat_nts_(all_remote_readers_.size(), group, true); heartbeat_has_been_sent = true; } RTPSGapBuilder gaps(group, remoteReader->guid()); auto unsent_change_process = [&](const SequenceNumber_t& seq_num, const ChangeForReader_t* unsentChange) { if (unsentChange != nullptr && unsentChange->isValid()) { relevantChanges.add_change( unsentChange->getChange(), remoteReader, unsentChange->getUnsentFragments()); } else { // Skip holes in history, as they were added before if (unsentChange != nullptr && remoteReader->is_reliable()) { gaps.add(seq_num); } remoteReader->set_change_to_status(seq_num, UNDERWAY, true); } }; remoteReader->for_each_unsent_change(max_sequence, unsent_change_process); } // Clear all relevant changes through the local controllers first for (std::unique_ptr<FlowController>& controller : m_controllers) { (*controller)(relevantChanges); } // Clear all relevant changes through the parent controllers for (std::unique_ptr<FlowController>& controller : mp_RTPSParticipant->getFlowControllers()) { (*controller)(relevantChanges); } try { uint32_t lastBytesProcessed = 0; while (!relevantChanges.empty()) { RTPSWriterCollector<ReaderProxy*>::Item changeToSend = relevantChanges.pop(); bool expectsInlineQos = false; locator_selector_.reset(false); for (const ReaderProxy* remoteReader : changeToSend.remoteReaders) { locator_selector_.enable(remoteReader->guid()); expectsInlineQos |= remoteReader->expects_inline_qos(); } if (locator_selector_.state_has_changed()) { group.flush_and_reset(); network.select_locators(locator_selector_); compute_selected_guids(); } // TODO(Ricardo) Flowcontroller has to be used in RTPSMessageGroup. Study. // And controllers are notified about the changes being sent FlowController::NotifyControllersChangeSent(changeToSend.cacheChange); if (changeToSend.fragmentNumber != 0) { if (group.add_data_frag(*changeToSend.cacheChange, changeToSend.fragmentNumber, expectsInlineQos)) { bool must_wake_up_async_thread = false; for (ReaderProxy* remoteReader : changeToSend.remoteReaders) { bool allFragmentsSent = false; if (remoteReader->mark_fragment_as_sent_for_change( changeToSend.sequenceNumber, changeToSend.fragmentNumber, allFragmentsSent)) { must_wake_up_async_thread |= !allFragmentsSent; if (remoteReader->is_remote_and_reliable()) { activateHeartbeatPeriod = true; if (allFragmentsSent) { remoteReader->set_change_to_status(changeToSend.sequenceNumber, UNDERWAY, true); } } else { if (allFragmentsSent) { remoteReader->set_change_to_status(changeToSend.sequenceNumber, ACKNOWLEDGED, false); } } } } if (must_wake_up_async_thread) { mp_RTPSParticipant->async_thread().wake_up(this); } } else { logError(RTPS_WRITER, "Error sending fragment (" << changeToSend.sequenceNumber << ", " << changeToSend.fragmentNumber << ")"); } } else { if (group.add_data(*changeToSend.cacheChange, expectsInlineQos)) { for (ReaderProxy* remoteReader : changeToSend.remoteReaders) { remoteReader->set_change_to_status(changeToSend.sequenceNumber, UNDERWAY, true); if (remoteReader->is_remote_and_reliable()) { activateHeartbeatPeriod = true; } } } else { logError(RTPS_WRITER, "Error sending change " << changeToSend.sequenceNumber); } } // Heartbeat piggyback. send_heartbeat_piggyback_nts_(nullptr, group, lastBytesProcessed); } group.flush_and_reset(); } catch (const RTPSMessageGroup::timeout&) { logError(RTPS_WRITER, "Max blocking time reached"); } locator_selector_.reset(true); network.select_locators(locator_selector_); compute_selected_guids(); } bool StatefulWriter::send_hole_gaps_to_group( RTPSMessageGroup& group) { bool ret_val = false; // Add holes in history and send them to all readers in group SequenceNumber_t max_removed = biggest_removed_sequence_number_; SequenceNumber_t last_sequence = mp_history->next_sequence_number(); SequenceNumber_t min_history_seq = get_seq_num_min(); uint32_t history_size = static_cast<uint32_t>(mp_history->getHistorySize()); if ((min_readers_low_mark_ < max_removed) && // some holes pending acknowledgement (min_history_seq + history_size != last_sequence)) // There is a hole in the history { try { // Only send gaps to readers requiring it select_all_readers_with_lowmark_below(max_removed, group); send_heartbeat_nts_(all_remote_readers_.size(), group, true); ret_val = true; // Find holes in history from min_history_seq to last_sequence - 1 RTPSGapBuilder gap_builder(group); // Algorithm starts in min_history_seq SequenceNumber_t seq = min_history_seq; // Loop all history for (auto cit = mp_history->changesBegin(); cit != mp_history->changesEnd(); cit++) { // Add all sequence numbers until the change's sequence number while (seq < (*cit)->sequenceNumber) { gap_builder.add(seq); seq++; } // Skip change's sequence number seq++; } // Add all sequence numbers above last change while (seq < last_sequence) { gap_builder.add(seq); seq++; } } catch (const RTPSMessageGroup::timeout&) { logError(RTPS_WRITER, "Max blocking time reached"); } } return ret_val; } void StatefulWriter::select_all_readers_with_lowmark_below( SequenceNumber_t seq, RTPSMessageGroup& group) { // Deselect all entries on the locator selector (we will only activate the // readers for which this sequence number is pending) locator_selector_.reset(false); for_matched_readers(matched_local_readers_, matched_remote_readers_, [this, &seq](ReaderProxy* reader) { if (reader->changes_low_mark() < seq) { locator_selector_.enable(reader->guid()); } return false; } ); if (locator_selector_.state_has_changed()) { group.flush_and_reset(); getRTPSParticipant()->network_factory().select_locators(locator_selector_); compute_selected_guids(); } } /* * MATCHED_READER-RELATED METHODS */ void StatefulWriter::update_reader_info( bool create_sender_resources) { update_cached_info_nts(); compute_selected_guids(); if (create_sender_resources) { RTPSParticipantImpl* part = getRTPSParticipant(); locator_selector_.for_each([part](const Locator_t& loc) { part->createSenderResources(loc); }); } // Check if we have local or remote readers there_are_remote_readers_ = !matched_remote_readers_.empty(); there_are_local_readers_ = !matched_local_readers_.empty(); there_are_datasharing_readers_ = !matched_datasharing_readers_.empty(); } bool StatefulWriter::matched_reader_add( const ReaderProxyData& rdata) { if (rdata.guid() == c_Guid_Unknown) { logError(RTPS_WRITER, "Reliable Writer need GUID_t of matched readers"); return false; } std::lock_guard<RecursiveTimedMutex> guard(mp_mutex); // Check if it is already matched. if (for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [this, &rdata](ReaderProxy* reader) { if (reader->guid() == rdata.guid()) { logInfo(RTPS_WRITER, "Attempting to add existing reader, updating information."); if (reader->update(rdata)) { update_reader_info(true); } return true; } return false; })) { return false; } // Get a reader proxy from the inactive pool (or create a new one if necessary and allowed) ReaderProxy* rp = nullptr; if (matched_readers_pool_.empty()) { size_t max_readers = matched_readers_pool_.max_size(); if (getMatchedReadersSize() + matched_readers_pool_.size() < max_readers) { const RTPSParticipantAttributes& part_att = mp_RTPSParticipant->getRTPSParticipantAttributes(); rp = new ReaderProxy(m_times, part_att.allocation.locators, this); } else { logWarning(RTPS_WRITER, "Maximum number of reader proxies (" << max_readers << ") reached for writer " << m_guid); return false; } } else { rp = matched_readers_pool_.back(); matched_readers_pool_.pop_back(); } // Add info of new datareader. rp->start(rdata, is_datasharing_compatible_with(rdata)); locator_selector_.add_entry(rp->locator_selector_entry()); if (rp->is_local_reader()) { matched_local_readers_.push_back(rp); logInfo(RTPS_WRITER, "Adding reader " << rdata.guid() << " to " << this->m_guid.entityId << " as local reader"); } else { if (rp->is_datasharing_reader()) { matched_datasharing_readers_.push_back(rp); logInfo(RTPS_WRITER, "Adding reader " << rdata.guid() << " to " << this->m_guid.entityId << " as data sharing"); } else { matched_remote_readers_.push_back(rp); logInfo(RTPS_WRITER, "Adding reader " << rdata.guid() << " to " << this->m_guid.entityId << " as remote reader"); } } update_reader_info(true); if (rp->is_datasharing_reader()) { return true; } RTPSMessageGroup group(mp_RTPSParticipant, this, rp->message_sender()); // Add initial heartbeat to message group if (rp->is_local_reader()) { intraprocess_heartbeat(rp); } else { send_heartbeat_nts_(1u, group, disable_positive_acks_); } SequenceNumber_t current_seq = get_seq_num_min(); SequenceNumber_t last_seq = get_seq_num_max(); if (current_seq != SequenceNumber_t::unknown()) { (void)last_seq; assert(last_seq != SequenceNumber_t::unknown()); assert(current_seq <= last_seq); RTPSGapBuilder gap_builder(group); bool is_reliable = rp->is_reliable(); for (History::iterator cit = mp_history->changesBegin(); cit != mp_history->changesEnd(); ++cit) { // This is to cover the case when there are holes in the history if (is_reliable) { while (current_seq != (*cit)->sequenceNumber) { if (rp->is_local_reader()) { intraprocess_gap(rp, current_seq); } else { try { gap_builder.add(current_seq); } catch (const RTPSMessageGroup::timeout&) { logError(RTPS_WRITER, "Max blocking time reached"); } } ++current_seq; } } else { current_seq = (*cit)->sequenceNumber; } ChangeForReader_t changeForReader(*cit); bool relevance = rp->durability_kind() >= TRANSIENT_LOCAL && m_att.durabilityKind >= TRANSIENT_LOCAL && rp->rtps_is_relevant(*cit); changeForReader.setRelevance(relevance); if (!relevance && is_reliable) { if (rp->is_local_reader()) { intraprocess_gap(rp, current_seq); } else { try { gap_builder.add(current_seq); } catch (const RTPSMessageGroup::timeout&) { logError(RTPS_WRITER, "Max blocking time reached"); } } } // The ChangeForReader_t status has to be UNACKNOWLEDGED if (!rp->is_local_reader() || !changeForReader.isRelevant()) { changeForReader.setStatus(UNACKNOWLEDGED); } rp->add_change(changeForReader, false); ++current_seq; } // This is to cover the case where the last changes have been removed from the history if (is_reliable) { while (current_seq < next_sequence_number()) { if (rp->is_local_reader()) { intraprocess_gap(rp, current_seq); } else { try { gap_builder.add(current_seq); } catch (const RTPSMessageGroup::timeout&) { logError(RTPS_WRITER, "Max blocking time reached"); } } ++current_seq; } } try { if (rp->is_local_reader()) { mp_RTPSParticipant->async_thread().wake_up(this); } else if (is_reliable) { // Send Gap gap_builder.flush(); } } catch (const RTPSMessageGroup::timeout&) { logError(RTPS_WRITER, "Max blocking time reached"); } // Always activate heartbeat period. We need a confirmation of the reader. // The state has to be updated. periodic_hb_event_->restart_timer(); } if (!rp->is_local_reader()) { try { // Send all messages group.flush_and_reset(); } catch (const RTPSMessageGroup::timeout&) { logError(RTPS_WRITER, "Max blocking time reached"); } } logInfo(RTPS_WRITER, "Reader Proxy " << rp->guid() << " added to " << this->m_guid.entityId << " with " << rdata.remote_locators().unicast.size() << "(u)-" << rdata.remote_locators().multicast.size() << "(m) locators"); return true; } bool StatefulWriter::matched_reader_remove( const GUID_t& reader_guid) { ReaderProxy* rproxy = nullptr; std::unique_lock<RecursiveTimedMutex> lock(mp_mutex); for (ReaderProxyIterator it = matched_local_readers_.begin(); it != matched_local_readers_.end(); ++it) { if ((*it)->guid() == reader_guid) { logInfo(RTPS_WRITER, "Reader Proxy removed: " << reader_guid); rproxy = std::move(*it); it = matched_local_readers_.erase(it); break; } } if (rproxy == nullptr) { for (ReaderProxyIterator it = matched_datasharing_readers_.begin(); it != matched_datasharing_readers_.end(); ++it) { if ((*it)->guid() == reader_guid) { logInfo(RTPS_WRITER, "Reader Proxy removed: " << reader_guid); rproxy = std::move(*it); it = matched_datasharing_readers_.erase(it); break; } } } if (rproxy == nullptr) { for (ReaderProxyIterator it = matched_remote_readers_.begin(); it != matched_remote_readers_.end(); ++it) { if ((*it)->guid() == reader_guid) { logInfo(RTPS_WRITER, "Reader Proxy removed: " << reader_guid); rproxy = std::move(*it); it = matched_remote_readers_.erase(it); break; } } } locator_selector_.remove_entry(reader_guid); update_reader_info(false); if (getMatchedReadersSize() == 0) { periodic_hb_event_->cancel_timer(); } if (rproxy != nullptr) { rproxy->stop(); matched_readers_pool_.push_back(rproxy); lock.unlock(); check_acked_status(); return true; } logInfo(RTPS_HISTORY, "Reader Proxy doesn't exist in this writer"); return false; } bool StatefulWriter::matched_reader_is_matched( const GUID_t& reader_guid) { std::lock_guard<RecursiveTimedMutex> guard(mp_mutex); return for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [&reader_guid](ReaderProxy* reader) { return (reader->guid() == reader_guid); } ); } bool StatefulWriter::matched_reader_lookup( GUID_t& readerGuid, ReaderProxy** RP) { std::lock_guard<RecursiveTimedMutex> guard(mp_mutex); return for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [&readerGuid, RP](ReaderProxy* reader) { if (reader->guid() == readerGuid) { *RP = reader; return true; } return false; } ); } bool StatefulWriter::is_acked_by_all( const CacheChange_t* change) const { std::lock_guard<RecursiveTimedMutex> guard(mp_mutex); if (change->writerGUID != this->getGuid()) { logWarning(RTPS_WRITER, "The given change is not from this Writer"); return false; } return is_acked_by_all(change->sequenceNumber); } bool StatefulWriter::is_acked_by_all( const SequenceNumber_t seq) const { assert(mp_history->next_sequence_number() > seq); return (seq < next_all_acked_notify_sequence_) || !for_matched_readers(matched_local_readers_, matched_remote_readers_, [seq](const ReaderProxy* reader) { return !(reader->change_is_acked(seq)); }); } bool StatefulWriter::all_readers_updated() { std::lock_guard<RecursiveTimedMutex> guard(mp_mutex); return !for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [](const ReaderProxy* reader) { return (reader->has_changes()); } ); } bool StatefulWriter::wait_for_all_acked( const Duration_t& max_wait) { std::unique_lock<RecursiveTimedMutex> lock(mp_mutex); std::unique_lock<std::mutex> all_acked_lock(all_acked_mutex_); all_acked_ = !for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [](const ReaderProxy* reader) { return reader->has_changes(); } ); lock.unlock(); if (!all_acked_) { std::chrono::microseconds max_w(TimeConv::Duration_t2MicroSecondsInt64(max_wait)); all_acked_cond_.wait_for(all_acked_lock, max_w, [&]() { return all_acked_; }); } return all_acked_; } void StatefulWriter::rebuild_status_after_load() { SequenceNumber_t min_seq = get_seq_num_min(); if (min_seq != SequenceNumber_t::unknown()) { biggest_removed_sequence_number_ = min_seq - 1; may_remove_change_ = 1; } SequenceNumber_t next_seq = mp_history->next_sequence_number(); next_all_acked_notify_sequence_ = next_seq; min_readers_low_mark_ = next_seq - 1; all_acked_ = true; } void StatefulWriter::check_acked_status() { std::unique_lock<RecursiveTimedMutex> lock(mp_mutex); bool all_acked = true; bool has_min_low_mark = false; // #8945 If no readers matched, notify all old changes. SequenceNumber_t min_low_mark = mp_history->next_sequence_number() - 1; for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [&all_acked, &has_min_low_mark, &min_low_mark](ReaderProxy* reader) { SequenceNumber_t reader_low_mark = reader->changes_low_mark(); if (reader_low_mark < min_low_mark || !has_min_low_mark) { has_min_low_mark = true; min_low_mark = reader_low_mark; } if (reader->has_changes()) { all_acked = false; } return false; } ); bool something_changed = all_acked; SequenceNumber_t min_seq = get_seq_num_min(); if (min_seq != SequenceNumber_t::unknown()) { // In the case where we haven't received an acknack from a recently matched reader, // min_low_mark will be zero, and no change will be notified as received by all if (next_all_acked_notify_sequence_ <= min_low_mark) { if ((mp_listener != nullptr) && (min_low_mark >= get_seq_num_min())) { // We will inform backwards about the changes received by all readers, starting // on min_low_mark down until next_all_acked_notify_sequence_. This way we can // safely proceed with the traversal, in case a change is removed from the history // inside the callback History::iterator history_end = mp_history->changesEnd(); History::iterator cit = std::lower_bound(mp_history->changesBegin(), history_end, min_low_mark, []( const CacheChange_t* change, const SequenceNumber_t& seq) { return change->sequenceNumber < seq; }); if (cit != history_end && (*cit)->sequenceNumber == min_low_mark) { ++cit; } SequenceNumber_t seq{}; SequenceNumber_t end_seq = min_seq > next_all_acked_notify_sequence_ ? min_seq : next_all_acked_notify_sequence_; // The iterator starts pointing to the change inmediately after min_low_mark --cit; do { // Avoid notifying changes before next_all_acked_notify_sequence_ CacheChange_t* change = *cit; seq = change->sequenceNumber; if (seq < next_all_acked_notify_sequence_) { break; } // Change iterator before it possibly becomes invalidated if (cit != mp_history->changesBegin()) { --cit; } // Notify reception of change (may remove that change on VOLATILE writers) mp_listener->onWriterChangeReceivedByAll(this, change); // Stop if we got to either next_all_acked_notify_sequence_ or the first change } while (seq > end_seq); } next_all_acked_notify_sequence_ = min_low_mark + 1; } if (min_low_mark >= get_seq_num_min()) { may_remove_change_ = 1; } min_readers_low_mark_ = min_low_mark; something_changed = true; } if (all_acked) { std::unique_lock<std::mutex> all_acked_lock(all_acked_mutex_); SequenceNumber_t next_seq = mp_history->next_sequence_number(); next_all_acked_notify_sequence_ = next_seq; min_readers_low_mark_ = next_seq - 1; all_acked_ = true; all_acked_cond_.notify_all(); } if (something_changed) { may_remove_change_cond_.notify_one(); } } bool StatefulWriter::try_remove_change( const std::chrono::steady_clock::time_point& max_blocking_time_point, std::unique_lock<RecursiveTimedMutex>& lock) { logInfo(RTPS_WRITER, "Starting process try remove change for writer " << getGuid()); SequenceNumber_t min_low_mark; { std::lock_guard<RecursiveTimedMutex> guard(mp_mutex); min_low_mark = next_all_acked_notify_sequence_ - 1; } SequenceNumber_t calc = min_low_mark < get_seq_num_min() ? SequenceNumber_t() : (min_low_mark - get_seq_num_min()) + 1; unsigned int may_remove_change = 1; if (calc <= SequenceNumber_t()) { may_remove_change_ = 0; may_remove_change_cond_.wait_until(lock, max_blocking_time_point, [&]() { return may_remove_change_ > 0; }); may_remove_change = may_remove_change_; } // Some changes acked if (may_remove_change == 1) { return mp_history->remove_min_change(); } // Waiting a change was removed. else if (may_remove_change == 2) { return true; } return false; } bool StatefulWriter::wait_for_acknowledgement( const SequenceNumber_t& seq, const std::chrono::steady_clock::time_point& max_blocking_time_point, std::unique_lock<RecursiveTimedMutex>& lock) { return may_remove_change_cond_.wait_until(lock, max_blocking_time_point, [this, &seq]() { return is_acked_by_all(seq); }); } /* * PARAMETER_RELATED METHODS */ void StatefulWriter::updateAttributes( const WriterAttributes& att) { this->updateTimes(att.times); } void StatefulWriter::updateTimes( const WriterTimes& times) { std::lock_guard<RecursiveTimedMutex> guard(mp_mutex); if (m_times.heartbeatPeriod != times.heartbeatPeriod) { periodic_hb_event_->update_interval(times.heartbeatPeriod); } if (m_times.nackResponseDelay != times.nackResponseDelay) { if (nack_response_event_ != nullptr) { nack_response_event_->update_interval(times.nackResponseDelay); } } if (m_times.nackSupressionDuration != times.nackSupressionDuration) { for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [&times](ReaderProxy* reader) { reader->update_nack_supression_interval(times.nackSupressionDuration); return false; } ); for (ReaderProxy* it : matched_readers_pool_) { it->update_nack_supression_interval(times.nackSupressionDuration); } } m_times = times; } void StatefulWriter::add_flow_controller( std::unique_ptr<FlowController> controller) { m_controllers.push_back(std::move(controller)); } SequenceNumber_t StatefulWriter::next_sequence_number() const { return mp_history->next_sequence_number(); } bool StatefulWriter::send_periodic_heartbeat( bool final, bool liveliness) { std::lock_guard<RecursiveTimedMutex> guardW(mp_mutex); bool unacked_changes = false; if (!liveliness) { SequenceNumber_t firstSeq, lastSeq; firstSeq = get_seq_num_min(); lastSeq = get_seq_num_max(); if (firstSeq == c_SequenceNumber_Unknown || lastSeq == c_SequenceNumber_Unknown) { return false; } else { assert(firstSeq <= lastSeq); unacked_changes = for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [](ReaderProxy* reader) { return reader->has_unacknowledged(); } ); if (unacked_changes) { try { send_heartbeat_to_all_readers(); } catch (const RTPSMessageGroup::timeout&) { logError(RTPS_WRITER, "Max blocking time reached"); } } } } else if (m_separateSendingEnabled) { for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [this, &liveliness, &unacked_changes](ReaderProxy* reader) { send_heartbeat_to_nts(*reader, liveliness); unacked_changes = true; return false; } ); } else { // This is a liveliness heartbeat, we don't care about checking sequence numbers try { for (ReaderProxy* reader : matched_local_readers_) { intraprocess_heartbeat(reader, true); unacked_changes = true; } for (ReaderProxy* reader : matched_datasharing_readers_) { std::shared_ptr<WriterPool> p = std::dynamic_pointer_cast<WriterPool>(payload_pool_); assert(p); p->assert_liveliness(); reader->datasharing_notify(); unacked_changes = true; } if (there_are_remote_readers_) { unacked_changes = true; RTPSMessageGroup group(mp_RTPSParticipant, this, *this); send_heartbeat_nts_(all_remote_readers_.size(), group, final, liveliness); } } catch (const RTPSMessageGroup::timeout&) { logError(RTPS_WRITER, "Max blocking time reached"); } } return unacked_changes; } void StatefulWriter::send_heartbeat_to_nts( ReaderProxy& remoteReaderProxy, bool liveliness, bool force /* = false */) { if (remoteReaderProxy.is_reliable() && (force || liveliness || remoteReaderProxy.has_unacknowledged())) { if (remoteReaderProxy.is_local_reader()) { intraprocess_heartbeat(&remoteReaderProxy, liveliness); } else if (remoteReaderProxy.is_datasharing_reader()) { remoteReaderProxy.datasharing_notify(); } else { try { RTPSMessageGroup group(mp_RTPSParticipant, this, remoteReaderProxy.message_sender()); send_heartbeat_nts_(1u, group, disable_positive_acks_, liveliness); SequenceNumber_t first_seq = get_seq_num_min(); if (first_seq != c_SequenceNumber_Unknown) { SequenceNumber_t first_relevant = remoteReaderProxy.first_relevant_sequence_number(); if (remoteReaderProxy.durability_kind() == VOLATILE && first_seq < first_relevant) { group.add_gap(first_seq, SequenceNumberSet_t(first_relevant)); } remoteReaderProxy.send_gaps(group, mp_history->next_sequence_number()); } } catch (const RTPSMessageGroup::timeout&) { logError(RTPS_WRITER, "Max blocking time reached"); } } } } void StatefulWriter::send_heartbeat_nts_( size_t number_of_readers, RTPSMessageGroup& message_group, bool final, bool liveliness) { if (!number_of_readers) { return; } SequenceNumber_t firstSeq = get_seq_num_min(); SequenceNumber_t lastSeq = get_seq_num_max(); if (firstSeq == c_SequenceNumber_Unknown || lastSeq == c_SequenceNumber_Unknown) { assert(firstSeq == c_SequenceNumber_Unknown && lastSeq == c_SequenceNumber_Unknown); if (number_of_readers == 1 || liveliness) { firstSeq = next_sequence_number(); lastSeq = firstSeq - 1; } else { return; } } else { assert(firstSeq <= lastSeq); } incrementHBCount(); message_group.add_heartbeat(firstSeq, lastSeq, m_heartbeatCount, final, liveliness); // Update calculate of heartbeat piggyback. currentUsageSendBufferSize_ = static_cast<int32_t>(sendBufferSize_); logInfo(RTPS_WRITER, getGuid().entityId << " Sending Heartbeat (" << firstSeq << " - " << lastSeq << ")" ); } void StatefulWriter::send_heartbeat_piggyback_nts_( ReaderProxy* reader, RTPSMessageGroup& message_group, uint32_t& last_bytes_processed) { if (!disable_heartbeat_piggyback_) { size_t number_of_readers = reader == nullptr ? all_remote_readers_.size() : 1u; if (mp_history->isFull()) { if (reader == nullptr) { locator_selector_.reset(true); if (locator_selector_.state_has_changed()) { message_group.flush_and_reset(); getRTPSParticipant()->network_factory().select_locators(locator_selector_); compute_selected_guids(); } } send_heartbeat_nts_(number_of_readers, message_group, disable_positive_acks_); } else { uint32_t current_bytes = message_group.get_current_bytes_processed(); currentUsageSendBufferSize_ -= current_bytes - last_bytes_processed; last_bytes_processed = current_bytes; if (currentUsageSendBufferSize_ < 0) { send_heartbeat_nts_(number_of_readers, message_group, disable_positive_acks_); } } } } void StatefulWriter::perform_nack_response() { std::unique_lock<RecursiveTimedMutex> lock(mp_mutex); bool must_wake_up_async_thread = false; uint32_t changes_to_resend = 0; for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [&must_wake_up_async_thread, &changes_to_resend](ReaderProxy* reader) { uint32_t pending = reader->perform_acknack_response(); changes_to_resend += pending; if ( pending > 0 || reader->are_there_gaps()) { must_wake_up_async_thread = true; // Do not exit the loop, perform_acknack_response must be executed for all readers } return false; } ); if (must_wake_up_async_thread) { mp_RTPSParticipant->async_thread().wake_up(this); } lock.unlock(); // Notify the statistics module on_resent_data(changes_to_resend); } void StatefulWriter::perform_nack_supression( const GUID_t& reader_guid) { std::unique_lock<RecursiveTimedMutex> lock(mp_mutex); for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [this, &reader_guid](ReaderProxy* reader) { if (reader->guid() == reader_guid) { reader->perform_nack_supression(); periodic_hb_event_->restart_timer(); return true; } return false; } ); } bool StatefulWriter::process_acknack( const GUID_t& writer_guid, const GUID_t& reader_guid, uint32_t ack_count, const SequenceNumberSet_t& sn_set, bool final_flag, bool& result) { std::unique_lock<RecursiveTimedMutex> lock(mp_mutex); result = (m_guid == writer_guid); if (result) { SequenceNumber_t received_sequence_number = sn_set.empty() ? sn_set.base() : sn_set.max(); if (received_sequence_number <= next_sequence_number()) { for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [&](ReaderProxy* remote_reader) { if (remote_reader->guid() == reader_guid) { if (remote_reader->check_and_set_acknack_count(ack_count)) { // Sequence numbers before Base are set as Acknowledged. remote_reader->acked_changes_set(sn_set.base()); if (sn_set.base() > SequenceNumber_t(0, 0)) { if (remote_reader->requested_changes_set(sn_set) || remote_reader->are_there_gaps()) { nack_response_event_->restart_timer(); } else if (!final_flag) { periodic_hb_event_->restart_timer(); } } else if (sn_set.empty() && !final_flag) { // This is the preemptive acknack. if (remote_reader->process_initial_acknack()) { if (!remote_reader->is_datasharing_reader()) { if (remote_reader->is_local_reader()) { mp_RTPSParticipant->async_thread().wake_up(this); } else { // Send heartbeat if requested send_heartbeat_to_nts(*remote_reader, false, true); } } } if (remote_reader->is_local_reader() && !remote_reader->is_datasharing_reader()) { intraprocess_heartbeat(remote_reader); } } // Check if all CacheChange are acknowledge, because a user could be waiting // for this, or some CacheChanges could be removed if we are VOLATILE check_acked_status(); } return true; } return false; } ); } else { print_inconsistent_acknack(writer_guid, reader_guid, sn_set.base(), received_sequence_number, next_sequence_number()); } } return result; } bool StatefulWriter::process_nack_frag( const GUID_t& writer_guid, const GUID_t& reader_guid, uint32_t ack_count, const SequenceNumber_t& seq_num, const FragmentNumberSet_t fragments_state, bool& result) { std::unique_lock<RecursiveTimedMutex> lock(mp_mutex); result = false; if (m_guid == writer_guid) { result = true; for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [this, &reader_guid, &ack_count, &seq_num, &fragments_state](ReaderProxy* reader) { if (reader->guid() == reader_guid) { if (reader->process_nack_frag(reader_guid, ack_count, seq_num, fragments_state)) { nack_response_event_->restart_timer(); } return true; } return false; } ); } return result; } bool StatefulWriter::ack_timer_expired() { std::unique_lock<RecursiveTimedMutex> lock(mp_mutex); // The timer has expired so the earliest non-acked change must be marked as acknowledged // This will be done in the first while iteration, as we start with a negative interval auto interval = -keep_duration_us_; // On the other hand, we've seen in the tests that if samples are sent very quickly with little // time between consecutive samples, the timer interval could end up being negative // In this case, we keep marking changes as acknowledged until the timer is able to keep up, hence the while // loop while (interval.count() < 0) { for_matched_readers(matched_local_readers_, matched_datasharing_readers_, matched_remote_readers_, [this](ReaderProxy* reader) { if (reader->disable_positive_acks()) { reader->acked_changes_set(last_sequence_number_ + 1); } return false; } ); last_sequence_number_++; // Get the next cache change from the history CacheChange_t* change; if (!mp_history->get_change( last_sequence_number_, getGuid(), &change)) { return false; } auto source_timestamp = system_clock::time_point() + nanoseconds(change->sourceTimestamp.to_ns()); auto now = system_clock::now(); interval = source_timestamp - now + keep_duration_us_; } assert(interval.count() >= 0); ack_event_->update_interval_millisec((double)duration_cast<milliseconds>(interval).count()); return true; } void StatefulWriter::print_inconsistent_acknack( const GUID_t& writer_guid, const GUID_t& reader_guid, const SequenceNumber_t& min_requested_sequence_number, const SequenceNumber_t& max_requested_sequence_number, const SequenceNumber_t& next_sequence_number) { logWarning(RTPS_WRITER, "Inconsistent acknack received. Local Writer " << writer_guid << " next SequenceNumber " << next_sequence_number << ". Remote Reader " << reader_guid << " requested range is [" << min_requested_sequence_number << ", " << max_requested_sequence_number << "]."); // This is necessary to avoid Warning of unused variable in case warning log level is disable static_cast<void>(writer_guid); static_cast<void>(reader_guid); static_cast<void>(min_requested_sequence_number); static_cast<void>(max_requested_sequence_number); static_cast<void>(next_sequence_number); } void StatefulWriter::reader_data_filter( fastdds::rtps::IReaderDataFilter* reader_data_filter) { reader_data_filter_ = reader_data_filter; } const fastdds::rtps::IReaderDataFilter* StatefulWriter::reader_data_filter() const { return reader_data_filter_; } } // namespace rtps } // namespace fastrtps } // namespace eprosima
1
22,200
I think this method should be either: - a static method of `RTPSWriter` to avoid a StatelessWriter redefinition of the function. - a setter in the `CacheChange_t` struct.
eProsima-Fast-DDS
cpp
@@ -74,10 +74,11 @@ namespace Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http2 } } - // Review: This is called when a CancellationToken fires mid-write. In HTTP/1.x, this aborts the entire connection. - // Should we do that here? + // This is called when a CancellationToken fires mid-write. In HTTP/1.x, this aborts the entire connection. + // For HTTP/2 we abort the stream. void IHttpOutputAborter.Abort(ConnectionAbortedException abortReason) { + _stream.ResetAndAbort(abortReason, Http2ErrorCode.INTERNAL_ERROR); Dispose(); }
1
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Buffers; using System.Diagnostics; using System.IO.Pipelines; using System.Threading; using System.Threading.Tasks; using Microsoft.AspNetCore.Connections; using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http; using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http2.FlowControl; using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Infrastructure; using Microsoft.AspNetCore.Server.Kestrel.Transport.Abstractions.Internal; namespace Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http2 { public class Http2OutputProducer : IHttpOutputProducer, IHttpOutputAborter { private readonly int _streamId; private readonly Http2FrameWriter _frameWriter; private readonly TimingPipeFlusher _flusher; // This should only be accessed via the FrameWriter. The connection-level output flow control is protected by the // FrameWriter's connection-level write lock. private readonly StreamOutputFlowControl _flowControl; private readonly Http2Stream _stream; private readonly object _dataWriterLock = new object(); private readonly Pipe _dataPipe; private readonly Task _dataWriteProcessingTask; private bool _startedWritingDataFrames; private bool _completed; private bool _disposed; public Http2OutputProducer( int streamId, Http2FrameWriter frameWriter, StreamOutputFlowControl flowControl, ITimeoutControl timeoutControl, MemoryPool<byte> pool, Http2Stream stream) { _streamId = streamId; _frameWriter = frameWriter; _flowControl = flowControl; _stream = stream; _dataPipe = CreateDataPipe(pool); _flusher = new TimingPipeFlusher(_dataPipe.Writer, timeoutControl); _dataWriteProcessingTask = ProcessDataWrites(); } public void Dispose() { lock (_dataWriterLock) { if (_disposed) { return; } _disposed = true; if (!_completed) { _completed = true; // Complete with an exception to prevent an end of stream data frame from being sent without an // explicit call to WriteStreamSuffixAsync. ConnectionAbortedExceptions are swallowed, so the // message doesn't matter _dataPipe.Writer.Complete(new OperationCanceledException()); } _frameWriter.AbortPendingStreamDataWrites(_flowControl); } } // Review: This is called when a CancellationToken fires mid-write. In HTTP/1.x, this aborts the entire connection. // Should we do that here? void IHttpOutputAborter.Abort(ConnectionAbortedException abortReason) { Dispose(); } public Task WriteAsync<T>(Func<PipeWriter, T, long> callback, T state, CancellationToken cancellationToken) { throw new NotImplementedException(); } public Task FlushAsync(CancellationToken cancellationToken) { if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled(cancellationToken); } lock (_dataWriterLock) { if (_completed) { return Task.CompletedTask; } if (_startedWritingDataFrames) { // If there's already been response data written to the stream, just wait for that. Any header // should be in front of the data frames in the connection pipe. Trailers could change things. return _flusher.FlushAsync(this, cancellationToken); } else { // Flushing the connection pipe ensures headers already in the pipe are flushed even if no data // frames have been written. return _frameWriter.FlushAsync(this, cancellationToken); } } } public Task Write100ContinueAsync() { lock (_dataWriterLock) { if (_completed) { return Task.CompletedTask; } return _frameWriter.Write100ContinueAsync(_streamId); } } public void WriteResponseHeaders(int statusCode, string ReasonPhrase, HttpResponseHeaders responseHeaders) { lock (_dataWriterLock) { // The HPACK header compressor is stateful, if we compress headers for an aborted stream we must send them. // Optimize for not compressing or sending them. if (_completed) { return; } _frameWriter.WriteResponseHeaders(_streamId, statusCode, responseHeaders); } } public Task WriteDataAsync(ReadOnlySpan<byte> data, CancellationToken cancellationToken) { if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled(cancellationToken); } lock (_dataWriterLock) { // This length check is important because we don't want to set _startedWritingDataFrames unless a data // frame will actually be written causing the headers to be flushed. if (_completed || data.Length == 0) { return Task.CompletedTask; } _startedWritingDataFrames = true; _dataPipe.Writer.Write(data); return _flusher.FlushAsync(this, cancellationToken); } } public Task WriteStreamSuffixAsync() { lock (_dataWriterLock) { if (_completed) { return Task.CompletedTask; } _completed = true; _dataPipe.Writer.Complete(); return _dataWriteProcessingTask; } } public Task WriteRstStreamAsync(Http2ErrorCode error) { lock (_dataWriterLock) { // Always send the reset even if the response body is _completed. The request body may not have completed yet. Dispose(); return _frameWriter.WriteRstStreamAsync(_streamId, error); } } private async Task ProcessDataWrites() { try { ReadResult readResult; do { readResult = await _dataPipe.Reader.ReadAsync(); if (readResult.IsCompleted && _stream.Trailers?.Count > 0) { if (readResult.Buffer.Length > 0) { await _frameWriter.WriteDataAsync(_streamId, _flowControl, _stream.MinResponseDataRate, readResult.Buffer, endStream: false); } await _frameWriter.WriteResponseTrailers(_streamId, _stream.Trailers); } else { await _frameWriter.WriteDataAsync(_streamId, _flowControl, _stream.MinResponseDataRate, readResult.Buffer, endStream: readResult.IsCompleted); } _dataPipe.Reader.AdvanceTo(readResult.Buffer.End); } while (!readResult.IsCompleted); } catch (OperationCanceledException) { // Writes should not throw for aborted streams/connections. } catch (Exception ex) { Debug.Assert(false, ex.ToString()); } _dataPipe.Reader.Complete(); } private static Pipe CreateDataPipe(MemoryPool<byte> pool) => new Pipe(new PipeOptions ( pool: pool, readerScheduler: PipeScheduler.Inline, writerScheduler: PipeScheduler.ThreadPool, pauseWriterThreshold: 1, resumeWriterThreshold: 1, useSynchronizationContext: false, minimumSegmentSize: KestrelMemoryPool.MinimumSegmentSize )); } }
1
16,971
You add a stream back reference for trailers, and all of the sudden we have tight coupling!
aspnet-KestrelHttpServer
.cs
@@ -2,6 +2,7 @@ using System; using System.Collections; using System.Collections.Concurrent; using System.Collections.Generic; +using System.Linq; using Datadog.Trace.ClrProfiler.Emit; using Datadog.Trace.ExtensionMethods; using Datadog.Trace.Headers;
1
using System; using System.Collections; using System.Collections.Concurrent; using System.Collections.Generic; using Datadog.Trace.ClrProfiler.Emit; using Datadog.Trace.ExtensionMethods; using Datadog.Trace.Headers; using Datadog.Trace.Logging; using Datadog.Trace.Util; namespace Datadog.Trace.ClrProfiler.Integrations { internal class AspNetAmbientContext : IDisposable { private static readonly string HttpContextKey = "__Datadog_web_request_ambient_context__"; private static readonly string TopLevelOperationName = "web.request"; private static readonly string StartupDiagnosticMethod = "DEBUG"; private static readonly Vendors.Serilog.ILogger Log = DatadogLogging.GetLogger(typeof(AspNetAmbientContext)); private readonly ConcurrentStack<IDisposable> _disposables = new ConcurrentStack<IDisposable>(); private readonly ConcurrentDictionary<string, Scope> _scopeStorage = new ConcurrentDictionary<string, Scope>(); private readonly object _httpContext; private readonly Scope _rootScope; private AspNetAmbientContext(string integrationName, object httpContext) { try { Tracer = Tracer.Instance; _httpContext = httpContext; var request = _httpContext.GetProperty("Request").GetValueOrDefault(); var response = _httpContext.GetProperty("Response").GetValueOrDefault(); GetTagValues( request, out string absoluteUri, out string httpMethod, out string host, out string resourceName); if (httpMethod == StartupDiagnosticMethod) { // An initial diagnostic HttpContext is created on the start of many web applications AbortRegistration = true; return; } RegisterForDisposalWithPipeline(response, this); SpanContext propagatedContext = null; if (Tracer.ActiveScope == null) { try { // extract propagated http headers var requestHeaders = request.GetProperty<IEnumerable>("Headers").GetValueOrDefault(); if (requestHeaders != null) { var headersCollection = new DictionaryHeadersCollection(); foreach (object header in requestHeaders) { var key = header.GetProperty<string>("Key").GetValueOrDefault(); var values = header.GetProperty<IList<string>>("Value").GetValueOrDefault(); if (key != null && values != null) { headersCollection.Add(key, values); } } propagatedContext = SpanContextPropagator.Instance.Extract(headersCollection); } } catch (Exception ex) { Log.Error(ex, "Error extracting propagated HTTP headers."); } } _rootScope = Tracer.StartActive(TopLevelOperationName, propagatedContext); RegisterForDisposal(_rootScope); var span = _rootScope.Span; span.DecorateWebServerSpan( resourceName: resourceName, method: httpMethod, host: host, httpUrl: absoluteUri); var statusCode = response.GetProperty<int>("StatusCode"); if (statusCode.HasValue) { span.SetTag(Tags.HttpStatusCode, statusCode.Value.ToString()); } var analyticSampleRate = Tracer.Settings.GetIntegrationAnalyticsSampleRate(integrationName, enabledWithGlobalSetting: true); span.SetMetric(Tags.Analytics, analyticSampleRate); } catch (Exception ex) { // Don't crash client apps Log.Error(ex, $"Exception when initializing {nameof(AspNetAmbientContext)}."); } } /// <summary> /// Gets the instance of the Tracer for this web request. /// Ensure that the same Tracer instance is used throughout an entire request. /// </summary> internal Tracer Tracer { get; } /// <summary> /// Gets the root span for this web request. /// </summary> internal Span RootSpan => _rootScope?.Span; /// <summary> /// Gets a value indicating whether this context should be registered. /// </summary> internal bool AbortRegistration { get; } public void Dispose() { try { var request = _httpContext.GetProperty("Response"); var statusCodeResult = request.GetProperty<int>("StatusCode"); if (statusCodeResult.HasValue) { SetStatusCode(statusCodeResult.Value); } } catch (Exception ex) { // No exceptions in dispose Log.Error(ex, "Exception when trying to populate data at the end of the request pipeline."); } while (_disposables.TryPop(out IDisposable registeredDisposable)) { try { registeredDisposable?.Dispose(); } catch (Exception ex) { // No exceptions in dispose Log.Error(ex, $"Exception when disposing {registeredDisposable?.GetType().FullName ?? "NULL"}."); } } } /// <summary> /// Responsible for setting up an overarching Scope and then registering with the end of pipeline disposal. /// </summary> /// <param name="httpContext">Instance of Microsoft.AspNetCore.Http.DefaultHttpContext</param> internal static void Initialize(object httpContext) { var context = new AspNetAmbientContext(TopLevelOperationName, httpContext); if (context.AbortRegistration) { return; } if (httpContext.TryGetPropertyValue("Items", out IDictionary<object, object> contextItems)) { contextItems[HttpContextKey] = context; } } internal static AspNetAmbientContext RetrieveFromHttpContext(object httpContext) { AspNetAmbientContext context = null; try { if (httpContext.TryGetPropertyValue("Items", out IDictionary<object, object> contextItems)) { if (contextItems?.ContainsKey(HttpContextKey) ?? false) { context = contextItems[HttpContextKey] as AspNetAmbientContext; } } } catch (Exception ex) { Log.Error(ex, $"Error accessing {nameof(AspNetAmbientContext)}."); } return context; } internal bool TryPersistScope(string key, Scope scope) { return _scopeStorage.TryAdd(key, scope); } internal bool TryRetrieveScope(string key, out Scope scope) { return _scopeStorage.TryGetValue(key, out scope); } internal void RegisterForDisposal(IDisposable disposable) { _disposables.Push(disposable); } internal void SetStatusCode(int statusCode) { SetTagOnRootSpan(Tags.HttpStatusCode, statusCode.ToString()); } internal void SetTagOnRootSpan(string tag, string value) { _rootScope?.Span?.SetTag(tag, value); } internal void SetMetricOnRootSpan(string tag, double? value) { _rootScope?.Span?.SetMetric(tag, value); } internal bool SetExceptionOnRootSpan(Exception ex) { _rootScope?.Span?.SetException(ex); // Return false for use in exception filters return false; } internal void ResetWebServerRootTags(string resourceName, string method) { if (_rootScope?.Span != null) { if (!string.IsNullOrWhiteSpace(resourceName)) { _rootScope.Span.ResourceName = resourceName?.Trim(); } if (!string.IsNullOrWhiteSpace(method)) { SetTagOnRootSpan(Tags.HttpMethod, method); } } } private static void GetTagValues( object request, out string url, out string httpMethod, out string host, out string resourceName) { host = request.GetProperty("Host").GetProperty<string>("Value").GetValueOrDefault(); httpMethod = request.GetProperty<string>("Method").GetValueOrDefault()?.ToUpperInvariant() ?? "UNKNOWN"; string pathBase = request.GetProperty("PathBase").GetProperty<string>("Value").GetValueOrDefault(); string path = request.GetProperty("Path").GetProperty<string>("Value").GetValueOrDefault(); string queryString = request.GetProperty("QueryString").GetProperty<string>("Value").GetValueOrDefault(); string scheme = request.GetProperty<string>("Scheme").GetValueOrDefault()?.ToUpperInvariant() ?? "http"; url = $"{pathBase}{path}{queryString}"; string resourceUrl = UriHelpers.GetRelativeUrl(new Uri($"{scheme}://{host}{url}"), tryRemoveIds: true).ToLowerInvariant(); resourceName = $"{httpMethod} {resourceUrl}"; } private static void RegisterForDisposalWithPipeline(object response, IDisposable disposable) { try { if (response == null) { Log.Error($"HttpContext.Response is null, unable to register {disposable.GetType().FullName}"); return; } var disposalRegisterMethod = response.GetType().GetMethod("RegisterForDispose"); disposalRegisterMethod.Invoke(response, new object[] { disposable }); } catch (Exception ex) { Log.Error(ex, $"Unable to register {disposable.GetType().FullName}"); } } } }
1
17,319
I don't think we use `AspNetAmbientContext` anymore since . We can probably delete this file.
DataDog-dd-trace-dotnet
.cs
@@ -65,14 +65,14 @@ return modules; }([ /* 0 */ -/***/ function(module, exports, __webpack_require__) { +/***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(1); -/***/ }, +/***/ }), /* 1 */ -/***/ function(module, exports, __webpack_require__) { +/***/ (function(module, exports, __webpack_require__) { 'use strict';
1
/******/ (function(modules) { // webpackBootstrap /******/ // The module cache /******/ var installedModules = {}; /******/ // The require function /******/ function __webpack_require__(moduleId) { /******/ // Check if module is in cache /******/ if(installedModules[moduleId]) /******/ return installedModules[moduleId].exports; /******/ // Create a new module (and put it into the cache) /******/ var module = installedModules[moduleId] = { /******/ exports: {}, /******/ id: moduleId, /******/ loaded: false /******/ }; /******/ // Execute the module function /******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); /******/ // Flag the module as loaded /******/ module.loaded = true; /******/ // Return the exports of the module /******/ return module.exports; /******/ } /******/ // expose the modules object (__webpack_modules__) /******/ __webpack_require__.m = modules; /******/ // expose the module cache /******/ __webpack_require__.c = installedModules; /******/ // __webpack_public_path__ /******/ __webpack_require__.p = "/"; /******/ // Load entry module and return exports /******/ return __webpack_require__(0); /******/ }) /************************************************************************/ /******/ ((function(modules) { // Check all modules for deduplicated modules for(var i in modules) { if(Object.prototype.hasOwnProperty.call(modules, i)) { switch(typeof modules[i]) { case "function": break; case "object": // Module can be created from a template modules[i] = (function(_m) { var args = _m.slice(1), fn = modules[_m[0]]; return function (a,b,c) { fn.apply(this, [a,b,c].concat(args)); }; }(modules[i])); break; default: // Module is a copy of another module modules[i] = modules[modules[i]]; break; } } } return modules; }([ /* 0 */ /***/ function(module, exports, __webpack_require__) { module.exports = __webpack_require__(1); /***/ }, /* 1 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDom = __webpack_require__(29); var _reactDom2 = _interopRequireDefault(_reactDom); var _root = __webpack_require__(167); var _root2 = _interopRequireDefault(_root); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } _reactDom2.default.render(_react2.default.createElement(_root2.default, null), document.getElementById('app')); /***/ }, /* 2 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; module.exports = __webpack_require__(3); /***/ }, /* 3 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var ReactChildren = __webpack_require__(5); var ReactComponent = __webpack_require__(18); var ReactPureComponent = __webpack_require__(21); var ReactClass = __webpack_require__(22); var ReactDOMFactories = __webpack_require__(24); var ReactElement = __webpack_require__(9); var ReactPropTypes = __webpack_require__(25); var ReactVersion = __webpack_require__(27); var onlyChild = __webpack_require__(28); var warning = __webpack_require__(11); var createElement = ReactElement.createElement; var createFactory = ReactElement.createFactory; var cloneElement = ReactElement.cloneElement; if (false) { var ReactElementValidator = require('./ReactElementValidator'); createElement = ReactElementValidator.createElement; createFactory = ReactElementValidator.createFactory; cloneElement = ReactElementValidator.cloneElement; } var __spread = _assign; if (false) { var warned = false; __spread = function () { process.env.NODE_ENV !== 'production' ? warning(warned, 'React.__spread is deprecated and should not be used. Use ' + 'Object.assign directly or another helper function with similar ' + 'semantics. You may be seeing this warning due to your compiler. ' + 'See https://fb.me/react-spread-deprecation for more details.') : void 0; warned = true; return _assign.apply(null, arguments); }; } var React = { // Modern Children: { map: ReactChildren.map, forEach: ReactChildren.forEach, count: ReactChildren.count, toArray: ReactChildren.toArray, only: onlyChild }, Component: ReactComponent, PureComponent: ReactPureComponent, createElement: createElement, cloneElement: cloneElement, isValidElement: ReactElement.isValidElement, // Classic PropTypes: ReactPropTypes, createClass: ReactClass.createClass, createFactory: createFactory, createMixin: function (mixin) { // Currently a noop. Will be used to validate and trace mixins. return mixin; }, // This looks DOM specific but these are actually isomorphic helpers // since they are just generating DOM strings. DOM: ReactDOMFactories, version: ReactVersion, // Deprecated hook for JSX spread, don't use this for anything. __spread: __spread }; module.exports = React; /***/ }, /* 4 */ /***/ function(module, exports) { 'use strict'; /* eslint-disable no-unused-vars */ var hasOwnProperty = Object.prototype.hasOwnProperty; var propIsEnumerable = Object.prototype.propertyIsEnumerable; function toObject(val) { if (val === null || val === undefined) { throw new TypeError('Object.assign cannot be called with null or undefined'); } return Object(val); } function shouldUseNative() { try { if (!Object.assign) { return false; } // Detect buggy property enumeration order in older V8 versions. // https://bugs.chromium.org/p/v8/issues/detail?id=4118 var test1 = new String('abc'); // eslint-disable-line test1[5] = 'de'; if (Object.getOwnPropertyNames(test1)[0] === '5') { return false; } // https://bugs.chromium.org/p/v8/issues/detail?id=3056 var test2 = {}; for (var i = 0; i < 10; i++) { test2['_' + String.fromCharCode(i)] = i; } var order2 = Object.getOwnPropertyNames(test2).map(function (n) { return test2[n]; }); if (order2.join('') !== '0123456789') { return false; } // https://bugs.chromium.org/p/v8/issues/detail?id=3056 var test3 = {}; 'abcdefghijklmnopqrst'.split('').forEach(function (letter) { test3[letter] = letter; }); if (Object.keys(Object.assign({}, test3)).join('') !== 'abcdefghijklmnopqrst') { return false; } return true; } catch (e) { // We don't expect any of the above to throw, but better to be safe. return false; } } module.exports = shouldUseNative() ? Object.assign : function (target, source) { var from; var to = toObject(target); var symbols; for (var s = 1; s < arguments.length; s++) { from = Object(arguments[s]); for (var key in from) { if (hasOwnProperty.call(from, key)) { to[key] = from[key]; } } if (Object.getOwnPropertySymbols) { symbols = Object.getOwnPropertySymbols(from); for (var i = 0; i < symbols.length; i++) { if (propIsEnumerable.call(from, symbols[i])) { to[symbols[i]] = from[symbols[i]]; } } } } return to; }; /***/ }, /* 5 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var PooledClass = __webpack_require__(6); var ReactElement = __webpack_require__(9); var emptyFunction = __webpack_require__(12); var traverseAllChildren = __webpack_require__(15); var twoArgumentPooler = PooledClass.twoArgumentPooler; var fourArgumentPooler = PooledClass.fourArgumentPooler; var userProvidedKeyEscapeRegex = /\/+/g; function escapeUserProvidedKey(text) { return ('' + text).replace(userProvidedKeyEscapeRegex, '$&/'); } /** * PooledClass representing the bookkeeping associated with performing a child * traversal. Allows avoiding binding callbacks. * * @constructor ForEachBookKeeping * @param {!function} forEachFunction Function to perform traversal with. * @param {?*} forEachContext Context to perform context with. */ function ForEachBookKeeping(forEachFunction, forEachContext) { this.func = forEachFunction; this.context = forEachContext; this.count = 0; } ForEachBookKeeping.prototype.destructor = function () { this.func = null; this.context = null; this.count = 0; }; PooledClass.addPoolingTo(ForEachBookKeeping, twoArgumentPooler); function forEachSingleChild(bookKeeping, child, name) { var func = bookKeeping.func, context = bookKeeping.context; func.call(context, child, bookKeeping.count++); } /** * Iterates through children that are typically specified as `props.children`. * * See https://facebook.github.io/react/docs/top-level-api.html#react.children.foreach * * The provided forEachFunc(child, index) will be called for each * leaf child. * * @param {?*} children Children tree container. * @param {function(*, int)} forEachFunc * @param {*} forEachContext Context for forEachContext. */ function forEachChildren(children, forEachFunc, forEachContext) { if (children == null) { return children; } var traverseContext = ForEachBookKeeping.getPooled(forEachFunc, forEachContext); traverseAllChildren(children, forEachSingleChild, traverseContext); ForEachBookKeeping.release(traverseContext); } /** * PooledClass representing the bookkeeping associated with performing a child * mapping. Allows avoiding binding callbacks. * * @constructor MapBookKeeping * @param {!*} mapResult Object containing the ordered map of results. * @param {!function} mapFunction Function to perform mapping with. * @param {?*} mapContext Context to perform mapping with. */ function MapBookKeeping(mapResult, keyPrefix, mapFunction, mapContext) { this.result = mapResult; this.keyPrefix = keyPrefix; this.func = mapFunction; this.context = mapContext; this.count = 0; } MapBookKeeping.prototype.destructor = function () { this.result = null; this.keyPrefix = null; this.func = null; this.context = null; this.count = 0; }; PooledClass.addPoolingTo(MapBookKeeping, fourArgumentPooler); function mapSingleChildIntoContext(bookKeeping, child, childKey) { var result = bookKeeping.result, keyPrefix = bookKeeping.keyPrefix, func = bookKeeping.func, context = bookKeeping.context; var mappedChild = func.call(context, child, bookKeeping.count++); if (Array.isArray(mappedChild)) { mapIntoWithKeyPrefixInternal(mappedChild, result, childKey, emptyFunction.thatReturnsArgument); } else if (mappedChild != null) { if (ReactElement.isValidElement(mappedChild)) { mappedChild = ReactElement.cloneAndReplaceKey(mappedChild, // Keep both the (mapped) and old keys if they differ, just as // traverseAllChildren used to do for objects as children keyPrefix + (mappedChild.key && (!child || child.key !== mappedChild.key) ? escapeUserProvidedKey(mappedChild.key) + '/' : '') + childKey); } result.push(mappedChild); } } function mapIntoWithKeyPrefixInternal(children, array, prefix, func, context) { var escapedPrefix = ''; if (prefix != null) { escapedPrefix = escapeUserProvidedKey(prefix) + '/'; } var traverseContext = MapBookKeeping.getPooled(array, escapedPrefix, func, context); traverseAllChildren(children, mapSingleChildIntoContext, traverseContext); MapBookKeeping.release(traverseContext); } /** * Maps children that are typically specified as `props.children`. * * See https://facebook.github.io/react/docs/top-level-api.html#react.children.map * * The provided mapFunction(child, key, index) will be called for each * leaf child. * * @param {?*} children Children tree container. * @param {function(*, int)} func The map function. * @param {*} context Context for mapFunction. * @return {object} Object containing the ordered map of results. */ function mapChildren(children, func, context) { if (children == null) { return children; } var result = []; mapIntoWithKeyPrefixInternal(children, result, null, func, context); return result; } function forEachSingleChildDummy(traverseContext, child, name) { return null; } /** * Count the number of children that are typically specified as * `props.children`. * * See https://facebook.github.io/react/docs/top-level-api.html#react.children.count * * @param {?*} children Children tree container. * @return {number} The number of children. */ function countChildren(children, context) { return traverseAllChildren(children, forEachSingleChildDummy, null); } /** * Flatten a children object (typically specified as `props.children`) and * return an array with appropriately re-keyed children. * * See https://facebook.github.io/react/docs/top-level-api.html#react.children.toarray */ function toArray(children) { var result = []; mapIntoWithKeyPrefixInternal(children, result, null, emptyFunction.thatReturnsArgument); return result; } var ReactChildren = { forEach: forEachChildren, map: mapChildren, mapIntoWithKeyPrefixInternal: mapIntoWithKeyPrefixInternal, count: countChildren, toArray: toArray }; module.exports = ReactChildren; /***/ }, /* 6 */ [639, 7], /* 7 */ /***/ function(module, exports) { /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; /** * WARNING: DO NOT manually require this module. * This is a replacement for `invariant(...)` used by the error code system * and will _only_ be required by the corresponding babel pass. * It always throws. */ function reactProdInvariant(code) { var argCount = arguments.length - 1; var message = 'Minified React error #' + code + '; visit ' + 'http://facebook.github.io/react/docs/error-decoder.html?invariant=' + code; for (var argIdx = 0; argIdx < argCount; argIdx++) { message += '&args[]=' + encodeURIComponent(arguments[argIdx + 1]); } message += ' for the full message or use the non-minified dev environment' + ' for full errors and additional helpful warnings.'; var error = new Error(message); error.name = 'Invariant Violation'; error.framesToPop = 1; // we don't care about reactProdInvariant's own frame throw error; } module.exports = reactProdInvariant; /***/ }, /* 8 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; /** * Use invariant() to assert state which your program assumes to be true. * * Provide sprintf-style format (only %s is supported) and arguments * to provide information about what broke and what you were * expecting. * * The invariant message will be stripped in production, but the invariant * will remain to ensure logic does not differ in production. */ function invariant(condition, format, a, b, c, d, e, f) { if (false) { if (format === undefined) { throw new Error('invariant requires an error message argument'); } } if (!condition) { var error; if (format === undefined) { error = new Error('Minified exception occurred; use the non-minified dev environment ' + 'for the full error message and additional helpful warnings.'); } else { var args = [a, b, c, d, e, f]; var argIndex = 0; error = new Error(format.replace(/%s/g, function () { return args[argIndex++]; })); error.name = 'Invariant Violation'; } error.framesToPop = 1; // we don't care about invariant's own frame throw error; } } module.exports = invariant; /***/ }, /* 9 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var ReactCurrentOwner = __webpack_require__(10); var warning = __webpack_require__(11); var canDefineProperty = __webpack_require__(13); var hasOwnProperty = Object.prototype.hasOwnProperty; var REACT_ELEMENT_TYPE = __webpack_require__(14); var RESERVED_PROPS = { key: true, ref: true, __self: true, __source: true }; var specialPropKeyWarningShown, specialPropRefWarningShown; function hasValidRef(config) { if (false) { if (hasOwnProperty.call(config, 'ref')) { var getter = Object.getOwnPropertyDescriptor(config, 'ref').get; if (getter && getter.isReactWarning) { return false; } } } return config.ref !== undefined; } function hasValidKey(config) { if (false) { if (hasOwnProperty.call(config, 'key')) { var getter = Object.getOwnPropertyDescriptor(config, 'key').get; if (getter && getter.isReactWarning) { return false; } } } return config.key !== undefined; } function defineKeyPropWarningGetter(props, displayName) { var warnAboutAccessingKey = function () { if (!specialPropKeyWarningShown) { specialPropKeyWarningShown = true; false ? warning(false, '%s: `key` is not a prop. Trying to access it will result ' + 'in `undefined` being returned. If you need to access the same ' + 'value within the child component, you should pass it as a different ' + 'prop. (https://fb.me/react-special-props)', displayName) : void 0; } }; warnAboutAccessingKey.isReactWarning = true; Object.defineProperty(props, 'key', { get: warnAboutAccessingKey, configurable: true }); } function defineRefPropWarningGetter(props, displayName) { var warnAboutAccessingRef = function () { if (!specialPropRefWarningShown) { specialPropRefWarningShown = true; false ? warning(false, '%s: `ref` is not a prop. Trying to access it will result ' + 'in `undefined` being returned. If you need to access the same ' + 'value within the child component, you should pass it as a different ' + 'prop. (https://fb.me/react-special-props)', displayName) : void 0; } }; warnAboutAccessingRef.isReactWarning = true; Object.defineProperty(props, 'ref', { get: warnAboutAccessingRef, configurable: true }); } /** * Factory method to create a new React element. This no longer adheres to * the class pattern, so do not use new to call it. Also, no instanceof check * will work. Instead test $$typeof field against Symbol.for('react.element') to check * if something is a React Element. * * @param {*} type * @param {*} key * @param {string|object} ref * @param {*} self A *temporary* helper to detect places where `this` is * different from the `owner` when React.createElement is called, so that we * can warn. We want to get rid of owner and replace string `ref`s with arrow * functions, and as long as `this` and owner are the same, there will be no * change in behavior. * @param {*} source An annotation object (added by a transpiler or otherwise) * indicating filename, line number, and/or other information. * @param {*} owner * @param {*} props * @internal */ var ReactElement = function (type, key, ref, self, source, owner, props) { var element = { // This tag allow us to uniquely identify this as a React Element $$typeof: REACT_ELEMENT_TYPE, // Built-in properties that belong on the element type: type, key: key, ref: ref, props: props, // Record the component responsible for creating this element. _owner: owner }; if (false) { // The validation flag is currently mutative. We put it on // an external backing store so that we can freeze the whole object. // This can be replaced with a WeakMap once they are implemented in // commonly used development environments. element._store = {}; // To make comparing ReactElements easier for testing purposes, we make // the validation flag non-enumerable (where possible, which should // include every environment we run tests in), so the test framework // ignores it. if (canDefineProperty) { Object.defineProperty(element._store, 'validated', { configurable: false, enumerable: false, writable: true, value: false }); // self and source are DEV only properties. Object.defineProperty(element, '_self', { configurable: false, enumerable: false, writable: false, value: self }); // Two elements created in two different places should be considered // equal for testing purposes and therefore we hide it from enumeration. Object.defineProperty(element, '_source', { configurable: false, enumerable: false, writable: false, value: source }); } else { element._store.validated = false; element._self = self; element._source = source; } if (Object.freeze) { Object.freeze(element.props); Object.freeze(element); } } return element; }; /** * Create and return a new ReactElement of the given type. * See https://facebook.github.io/react/docs/top-level-api.html#react.createelement */ ReactElement.createElement = function (type, config, children) { var propName; // Reserved names are extracted var props = {}; var key = null; var ref = null; var self = null; var source = null; if (config != null) { if (hasValidRef(config)) { ref = config.ref; } if (hasValidKey(config)) { key = '' + config.key; } self = config.__self === undefined ? null : config.__self; source = config.__source === undefined ? null : config.__source; // Remaining properties are added to a new props object for (propName in config) { if (hasOwnProperty.call(config, propName) && !RESERVED_PROPS.hasOwnProperty(propName)) { props[propName] = config[propName]; } } } // Children can be more than one argument, and those are transferred onto // the newly allocated props object. var childrenLength = arguments.length - 2; if (childrenLength === 1) { props.children = children; } else if (childrenLength > 1) { var childArray = Array(childrenLength); for (var i = 0; i < childrenLength; i++) { childArray[i] = arguments[i + 2]; } if (false) { if (Object.freeze) { Object.freeze(childArray); } } props.children = childArray; } // Resolve default props if (type && type.defaultProps) { var defaultProps = type.defaultProps; for (propName in defaultProps) { if (props[propName] === undefined) { props[propName] = defaultProps[propName]; } } } if (false) { if (key || ref) { if (typeof props.$$typeof === 'undefined' || props.$$typeof !== REACT_ELEMENT_TYPE) { var displayName = typeof type === 'function' ? type.displayName || type.name || 'Unknown' : type; if (key) { defineKeyPropWarningGetter(props, displayName); } if (ref) { defineRefPropWarningGetter(props, displayName); } } } } return ReactElement(type, key, ref, self, source, ReactCurrentOwner.current, props); }; /** * Return a function that produces ReactElements of a given type. * See https://facebook.github.io/react/docs/top-level-api.html#react.createfactory */ ReactElement.createFactory = function (type) { var factory = ReactElement.createElement.bind(null, type); // Expose the type on the factory and the prototype so that it can be // easily accessed on elements. E.g. `<Foo />.type === Foo`. // This should not be named `constructor` since this may not be the function // that created the element, and it may not even be a constructor. // Legacy hook TODO: Warn if this is accessed factory.type = type; return factory; }; ReactElement.cloneAndReplaceKey = function (oldElement, newKey) { var newElement = ReactElement(oldElement.type, newKey, oldElement.ref, oldElement._self, oldElement._source, oldElement._owner, oldElement.props); return newElement; }; /** * Clone and return a new ReactElement using element as the starting point. * See https://facebook.github.io/react/docs/top-level-api.html#react.cloneelement */ ReactElement.cloneElement = function (element, config, children) { var propName; // Original props are copied var props = _assign({}, element.props); // Reserved names are extracted var key = element.key; var ref = element.ref; // Self is preserved since the owner is preserved. var self = element._self; // Source is preserved since cloneElement is unlikely to be targeted by a // transpiler, and the original source is probably a better indicator of the // true owner. var source = element._source; // Owner will be preserved, unless ref is overridden var owner = element._owner; if (config != null) { if (hasValidRef(config)) { // Silently steal the ref from the parent. ref = config.ref; owner = ReactCurrentOwner.current; } if (hasValidKey(config)) { key = '' + config.key; } // Remaining properties override existing props var defaultProps; if (element.type && element.type.defaultProps) { defaultProps = element.type.defaultProps; } for (propName in config) { if (hasOwnProperty.call(config, propName) && !RESERVED_PROPS.hasOwnProperty(propName)) { if (config[propName] === undefined && defaultProps !== undefined) { // Resolve default props props[propName] = defaultProps[propName]; } else { props[propName] = config[propName]; } } } } // Children can be more than one argument, and those are transferred onto // the newly allocated props object. var childrenLength = arguments.length - 2; if (childrenLength === 1) { props.children = children; } else if (childrenLength > 1) { var childArray = Array(childrenLength); for (var i = 0; i < childrenLength; i++) { childArray[i] = arguments[i + 2]; } props.children = childArray; } return ReactElement(element.type, key, ref, self, source, owner, props); }; /** * Verifies the object is a ReactElement. * See https://facebook.github.io/react/docs/top-level-api.html#react.isvalidelement * @param {?object} object * @return {boolean} True if `object` is a valid component. * @final */ ReactElement.isValidElement = function (object) { return typeof object === 'object' && object !== null && object.$$typeof === REACT_ELEMENT_TYPE; }; module.exports = ReactElement; /***/ }, /* 10 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; /** * Keeps track of the current owner. * * The current owner is the component who should own any components that are * currently being constructed. */ var ReactCurrentOwner = { /** * @internal * @type {ReactComponent} */ current: null }; module.exports = ReactCurrentOwner; /***/ }, /* 11 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2014-2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var emptyFunction = __webpack_require__(12); /** * Similar to invariant but only logs a warning if the condition is not met. * This can be used to log issues in development environments in critical * paths. Removing the logging code for production environments will keep the * same logic and follow the same code paths. */ var warning = emptyFunction; if (false) { (function () { var printWarning = function printWarning(format) { for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { args[_key - 1] = arguments[_key]; } var argIndex = 0; var message = 'Warning: ' + format.replace(/%s/g, function () { return args[argIndex++]; }); if (typeof console !== 'undefined') { console.error(message); } try { // --- Welcome to debugging React --- // This error was thrown as a convenience so that you can use this stack // to find the callsite that caused this warning to fire. throw new Error(message); } catch (x) {} }; warning = function warning(condition, format) { if (format === undefined) { throw new Error('`warning(condition, format, ...args)` requires a warning ' + 'message argument'); } if (format.indexOf('Failed Composite propType: ') === 0) { return; // Ignore CompositeComponent proptype check. } if (!condition) { for (var _len2 = arguments.length, args = Array(_len2 > 2 ? _len2 - 2 : 0), _key2 = 2; _key2 < _len2; _key2++) { args[_key2 - 2] = arguments[_key2]; } printWarning.apply(undefined, [format].concat(args)); } }; })(); } module.exports = warning; /***/ }, /* 12 */ /***/ function(module, exports) { "use strict"; /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ function makeEmptyFunction(arg) { return function () { return arg; }; } /** * This function accepts and discards inputs; it has no side effects. This is * primarily useful idiomatically for overridable function endpoints which * always need to be callable, since JS lacks a null-call idiom ala Cocoa. */ var emptyFunction = function emptyFunction() {}; emptyFunction.thatReturns = makeEmptyFunction; emptyFunction.thatReturnsFalse = makeEmptyFunction(false); emptyFunction.thatReturnsTrue = makeEmptyFunction(true); emptyFunction.thatReturnsNull = makeEmptyFunction(null); emptyFunction.thatReturnsThis = function () { return this; }; emptyFunction.thatReturnsArgument = function (arg) { return arg; }; module.exports = emptyFunction; /***/ }, /* 13 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var canDefineProperty = false; if (false) { try { // $FlowFixMe https://github.com/facebook/flow/issues/285 Object.defineProperty({}, 'x', { get: function () {} }); canDefineProperty = true; } catch (x) { // IE will fail on defineProperty } } module.exports = canDefineProperty; /***/ }, /* 14 */ /***/ function(module, exports) { /** * Copyright 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; // The Symbol used to tag the ReactElement type. If there is no native Symbol // nor polyfill, then a plain number is used for performance. var REACT_ELEMENT_TYPE = typeof Symbol === 'function' && Symbol['for'] && Symbol['for']('react.element') || 0xeac7; module.exports = REACT_ELEMENT_TYPE; /***/ }, /* 15 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(7); var ReactCurrentOwner = __webpack_require__(10); var REACT_ELEMENT_TYPE = __webpack_require__(14); var getIteratorFn = __webpack_require__(16); var invariant = __webpack_require__(8); var KeyEscapeUtils = __webpack_require__(17); var warning = __webpack_require__(11); var SEPARATOR = '.'; var SUBSEPARATOR = ':'; /** * This is inlined from ReactElement since this file is shared between * isomorphic and renderers. We could extract this to a * */ /** * TODO: Test that a single child and an array with one item have the same key * pattern. */ var didWarnAboutMaps = false; /** * Generate a key string that identifies a component within a set. * * @param {*} component A component that could contain a manual key. * @param {number} index Index that is used if a manual key is not provided. * @return {string} */ function getComponentKey(component, index) { // Do some typechecking here since we call this blindly. We want to ensure // that we don't block potential future ES APIs. if (component && typeof component === 'object' && component.key != null) { // Explicit key return KeyEscapeUtils.escape(component.key); } // Implicit key determined by the index in the set return index.toString(36); } /** * @param {?*} children Children tree container. * @param {!string} nameSoFar Name of the key path so far. * @param {!function} callback Callback to invoke with each child found. * @param {?*} traverseContext Used to pass information throughout the traversal * process. * @return {!number} The number of children in this subtree. */ function traverseAllChildrenImpl(children, nameSoFar, callback, traverseContext) { var type = typeof children; if (type === 'undefined' || type === 'boolean') { // All of the above are perceived as null. children = null; } if (children === null || type === 'string' || type === 'number' || // The following is inlined from ReactElement. This means we can optimize // some checks. React Fiber also inlines this logic for similar purposes. type === 'object' && children.$$typeof === REACT_ELEMENT_TYPE) { callback(traverseContext, children, // If it's the only child, treat the name as if it was wrapped in an array // so that it's consistent if the number of children grows. nameSoFar === '' ? SEPARATOR + getComponentKey(children, 0) : nameSoFar); return 1; } var child; var nextName; var subtreeCount = 0; // Count of children found in the current subtree. var nextNamePrefix = nameSoFar === '' ? SEPARATOR : nameSoFar + SUBSEPARATOR; if (Array.isArray(children)) { for (var i = 0; i < children.length; i++) { child = children[i]; nextName = nextNamePrefix + getComponentKey(child, i); subtreeCount += traverseAllChildrenImpl(child, nextName, callback, traverseContext); } } else { var iteratorFn = getIteratorFn(children); if (iteratorFn) { var iterator = iteratorFn.call(children); var step; if (iteratorFn !== children.entries) { var ii = 0; while (!(step = iterator.next()).done) { child = step.value; nextName = nextNamePrefix + getComponentKey(child, ii++); subtreeCount += traverseAllChildrenImpl(child, nextName, callback, traverseContext); } } else { if (false) { var mapsAsChildrenAddendum = ''; if (ReactCurrentOwner.current) { var mapsAsChildrenOwnerName = ReactCurrentOwner.current.getName(); if (mapsAsChildrenOwnerName) { mapsAsChildrenAddendum = ' Check the render method of `' + mapsAsChildrenOwnerName + '`.'; } } process.env.NODE_ENV !== 'production' ? warning(didWarnAboutMaps, 'Using Maps as children is not yet fully supported. It is an ' + 'experimental feature that might be removed. Convert it to a ' + 'sequence / iterable of keyed ReactElements instead.%s', mapsAsChildrenAddendum) : void 0; didWarnAboutMaps = true; } // Iterator will provide entry [k,v] tuples rather than values. while (!(step = iterator.next()).done) { var entry = step.value; if (entry) { child = entry[1]; nextName = nextNamePrefix + KeyEscapeUtils.escape(entry[0]) + SUBSEPARATOR + getComponentKey(child, 0); subtreeCount += traverseAllChildrenImpl(child, nextName, callback, traverseContext); } } } } else if (type === 'object') { var addendum = ''; if (false) { addendum = ' If you meant to render a collection of children, use an array ' + 'instead or wrap the object using createFragment(object) from the ' + 'React add-ons.'; if (children._isReactElement) { addendum = ' It looks like you\'re using an element created by a different ' + 'version of React. Make sure to use only one copy of React.'; } if (ReactCurrentOwner.current) { var name = ReactCurrentOwner.current.getName(); if (name) { addendum += ' Check the render method of `' + name + '`.'; } } } var childrenString = String(children); true ? false ? invariant(false, 'Objects are not valid as a React child (found: %s).%s', childrenString === '[object Object]' ? 'object with keys {' + Object.keys(children).join(', ') + '}' : childrenString, addendum) : _prodInvariant('31', childrenString === '[object Object]' ? 'object with keys {' + Object.keys(children).join(', ') + '}' : childrenString, addendum) : void 0; } } return subtreeCount; } /** * Traverses children that are typically specified as `props.children`, but * might also be specified through attributes: * * - `traverseAllChildren(this.props.children, ...)` * - `traverseAllChildren(this.props.leftPanelChildren, ...)` * * The `traverseContext` is an optional argument that is passed through the * entire traversal. It can be used to store accumulations or anything else that * the callback might find relevant. * * @param {?*} children Children tree object. * @param {!function} callback To invoke upon traversing each child. * @param {?*} traverseContext Context for traversal. * @return {!number} The number of children in this subtree. */ function traverseAllChildren(children, callback, traverseContext) { if (children == null) { return 0; } return traverseAllChildrenImpl(children, '', callback, traverseContext); } module.exports = traverseAllChildren; /***/ }, /* 16 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; /* global Symbol */ var ITERATOR_SYMBOL = typeof Symbol === 'function' && Symbol.iterator; var FAUX_ITERATOR_SYMBOL = '@@iterator'; // Before Symbol spec. /** * Returns the iterator method function contained on the iterable object. * * Be sure to invoke the function with the iterable as context: * * var iteratorFn = getIteratorFn(myIterable); * if (iteratorFn) { * var iterator = iteratorFn.call(myIterable); * ... * } * * @param {?object} maybeIterable * @return {?function} */ function getIteratorFn(maybeIterable) { var iteratorFn = maybeIterable && (ITERATOR_SYMBOL && maybeIterable[ITERATOR_SYMBOL] || maybeIterable[FAUX_ITERATOR_SYMBOL]); if (typeof iteratorFn === 'function') { return iteratorFn; } } module.exports = getIteratorFn; /***/ }, /* 17 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; /** * Escape and wrap key so it is safe to use as a reactid * * @param {string} key to be escaped. * @return {string} the escaped key. */ function escape(key) { var escapeRegex = /[=:]/g; var escaperLookup = { '=': '=0', ':': '=2' }; var escapedString = ('' + key).replace(escapeRegex, function (match) { return escaperLookup[match]; }); return '$' + escapedString; } /** * Unescape and unwrap key for human-readable display * * @param {string} key to unescape. * @return {string} the unescaped key. */ function unescape(key) { var unescapeRegex = /(=0|=2)/g; var unescaperLookup = { '=0': '=', '=2': ':' }; var keySubstring = key[0] === '.' && key[1] === '$' ? key.substring(2) : key.substring(1); return ('' + keySubstring).replace(unescapeRegex, function (match) { return unescaperLookup[match]; }); } var KeyEscapeUtils = { escape: escape, unescape: unescape }; module.exports = KeyEscapeUtils; /***/ }, /* 18 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(7); var ReactNoopUpdateQueue = __webpack_require__(19); var canDefineProperty = __webpack_require__(13); var emptyObject = __webpack_require__(20); var invariant = __webpack_require__(8); var warning = __webpack_require__(11); /** * Base class helpers for the updating state of a component. */ function ReactComponent(props, context, updater) { this.props = props; this.context = context; this.refs = emptyObject; // We initialize the default updater but the real one gets injected by the // renderer. this.updater = updater || ReactNoopUpdateQueue; } ReactComponent.prototype.isReactComponent = {}; /** * Sets a subset of the state. Always use this to mutate * state. You should treat `this.state` as immutable. * * There is no guarantee that `this.state` will be immediately updated, so * accessing `this.state` after calling this method may return the old value. * * There is no guarantee that calls to `setState` will run synchronously, * as they may eventually be batched together. You can provide an optional * callback that will be executed when the call to setState is actually * completed. * * When a function is provided to setState, it will be called at some point in * the future (not synchronously). It will be called with the up to date * component arguments (state, props, context). These values can be different * from this.* because your function may be called after receiveProps but before * shouldComponentUpdate, and this new state, props, and context will not yet be * assigned to this. * * @param {object|function} partialState Next partial state or function to * produce next partial state to be merged with current state. * @param {?function} callback Called after state is updated. * @final * @protected */ ReactComponent.prototype.setState = function (partialState, callback) { !(typeof partialState === 'object' || typeof partialState === 'function' || partialState == null) ? false ? invariant(false, 'setState(...): takes an object of state variables to update or a function which returns an object of state variables.') : _prodInvariant('85') : void 0; this.updater.enqueueSetState(this, partialState); if (callback) { this.updater.enqueueCallback(this, callback, 'setState'); } }; /** * Forces an update. This should only be invoked when it is known with * certainty that we are **not** in a DOM transaction. * * You may want to call this when you know that some deeper aspect of the * component's state has changed but `setState` was not called. * * This will not invoke `shouldComponentUpdate`, but it will invoke * `componentWillUpdate` and `componentDidUpdate`. * * @param {?function} callback Called after update is complete. * @final * @protected */ ReactComponent.prototype.forceUpdate = function (callback) { this.updater.enqueueForceUpdate(this); if (callback) { this.updater.enqueueCallback(this, callback, 'forceUpdate'); } }; /** * Deprecated APIs. These APIs used to exist on classic React classes but since * we would like to deprecate them, we're not going to move them over to this * modern base class. Instead, we define a getter that warns if it's accessed. */ if (false) { var deprecatedAPIs = { isMounted: ['isMounted', 'Instead, make sure to clean up subscriptions and pending requests in ' + 'componentWillUnmount to prevent memory leaks.'], replaceState: ['replaceState', 'Refactor your code to use setState instead (see ' + 'https://github.com/facebook/react/issues/3236).'] }; var defineDeprecationWarning = function (methodName, info) { if (canDefineProperty) { Object.defineProperty(ReactComponent.prototype, methodName, { get: function () { process.env.NODE_ENV !== 'production' ? warning(false, '%s(...) is deprecated in plain JavaScript React classes. %s', info[0], info[1]) : void 0; return undefined; } }); } }; for (var fnName in deprecatedAPIs) { if (deprecatedAPIs.hasOwnProperty(fnName)) { defineDeprecationWarning(fnName, deprecatedAPIs[fnName]); } } } module.exports = ReactComponent; /***/ }, /* 19 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var warning = __webpack_require__(11); function warnNoop(publicInstance, callerName) { if (false) { var constructor = publicInstance.constructor; process.env.NODE_ENV !== 'production' ? warning(false, '%s(...): Can only update a mounted or mounting component. ' + 'This usually means you called %s() on an unmounted component. ' + 'This is a no-op. Please check the code for the %s component.', callerName, callerName, constructor && (constructor.displayName || constructor.name) || 'ReactClass') : void 0; } } /** * This is the abstract API for an update queue. */ var ReactNoopUpdateQueue = { /** * Checks whether or not this composite component is mounted. * @param {ReactClass} publicInstance The instance we want to test. * @return {boolean} True if mounted, false otherwise. * @protected * @final */ isMounted: function (publicInstance) { return false; }, /** * Enqueue a callback that will be executed after all the pending updates * have processed. * * @param {ReactClass} publicInstance The instance to use as `this` context. * @param {?function} callback Called after state is updated. * @internal */ enqueueCallback: function (publicInstance, callback) {}, /** * Forces an update. This should only be invoked when it is known with * certainty that we are **not** in a DOM transaction. * * You may want to call this when you know that some deeper aspect of the * component's state has changed but `setState` was not called. * * This will not invoke `shouldComponentUpdate`, but it will invoke * `componentWillUpdate` and `componentDidUpdate`. * * @param {ReactClass} publicInstance The instance that should rerender. * @internal */ enqueueForceUpdate: function (publicInstance) { warnNoop(publicInstance, 'forceUpdate'); }, /** * Replaces all of the state. Always use this or `setState` to mutate state. * You should treat `this.state` as immutable. * * There is no guarantee that `this.state` will be immediately updated, so * accessing `this.state` after calling this method may return the old value. * * @param {ReactClass} publicInstance The instance that should rerender. * @param {object} completeState Next state. * @internal */ enqueueReplaceState: function (publicInstance, completeState) { warnNoop(publicInstance, 'replaceState'); }, /** * Sets a subset of the state. This only exists because _pendingState is * internal. This provides a merging strategy that is not available to deep * properties which is confusing. TODO: Expose pendingState or don't use it * during the merge. * * @param {ReactClass} publicInstance The instance that should rerender. * @param {object} partialState Next partial state to be merged with state. * @internal */ enqueueSetState: function (publicInstance, partialState) { warnNoop(publicInstance, 'setState'); } }; module.exports = ReactNoopUpdateQueue; /***/ }, /* 20 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var emptyObject = {}; if (false) { Object.freeze(emptyObject); } module.exports = emptyObject; /***/ }, /* 21 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var ReactComponent = __webpack_require__(18); var ReactNoopUpdateQueue = __webpack_require__(19); var emptyObject = __webpack_require__(20); /** * Base class helpers for the updating state of a component. */ function ReactPureComponent(props, context, updater) { // Duplicated from ReactComponent. this.props = props; this.context = context; this.refs = emptyObject; // We initialize the default updater but the real one gets injected by the // renderer. this.updater = updater || ReactNoopUpdateQueue; } function ComponentDummy() {} ComponentDummy.prototype = ReactComponent.prototype; ReactPureComponent.prototype = new ComponentDummy(); ReactPureComponent.prototype.constructor = ReactPureComponent; // Avoid an extra prototype jump for these methods. _assign(ReactPureComponent.prototype, ReactComponent.prototype); ReactPureComponent.prototype.isPureReactComponent = true; module.exports = ReactPureComponent; /***/ }, /* 22 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(7), _assign = __webpack_require__(4); var ReactComponent = __webpack_require__(18); var ReactElement = __webpack_require__(9); var ReactPropTypeLocationNames = __webpack_require__(23); var ReactNoopUpdateQueue = __webpack_require__(19); var emptyObject = __webpack_require__(20); var invariant = __webpack_require__(8); var warning = __webpack_require__(11); var MIXINS_KEY = 'mixins'; // Helper function to allow the creation of anonymous functions which do not // have .name set to the name of the variable being assigned to. function identity(fn) { return fn; } /** * Policies that describe methods in `ReactClassInterface`. */ var injectedMixins = []; /** * Composite components are higher-level components that compose other composite * or host components. * * To create a new type of `ReactClass`, pass a specification of * your new class to `React.createClass`. The only requirement of your class * specification is that you implement a `render` method. * * var MyComponent = React.createClass({ * render: function() { * return <div>Hello World</div>; * } * }); * * The class specification supports a specific protocol of methods that have * special meaning (e.g. `render`). See `ReactClassInterface` for * more the comprehensive protocol. Any other properties and methods in the * class specification will be available on the prototype. * * @interface ReactClassInterface * @internal */ var ReactClassInterface = { /** * An array of Mixin objects to include when defining your component. * * @type {array} * @optional */ mixins: 'DEFINE_MANY', /** * An object containing properties and methods that should be defined on * the component's constructor instead of its prototype (static methods). * * @type {object} * @optional */ statics: 'DEFINE_MANY', /** * Definition of prop types for this component. * * @type {object} * @optional */ propTypes: 'DEFINE_MANY', /** * Definition of context types for this component. * * @type {object} * @optional */ contextTypes: 'DEFINE_MANY', /** * Definition of context types this component sets for its children. * * @type {object} * @optional */ childContextTypes: 'DEFINE_MANY', // ==== Definition methods ==== /** * Invoked when the component is mounted. Values in the mapping will be set on * `this.props` if that prop is not specified (i.e. using an `in` check). * * This method is invoked before `getInitialState` and therefore cannot rely * on `this.state` or use `this.setState`. * * @return {object} * @optional */ getDefaultProps: 'DEFINE_MANY_MERGED', /** * Invoked once before the component is mounted. The return value will be used * as the initial value of `this.state`. * * getInitialState: function() { * return { * isOn: false, * fooBaz: new BazFoo() * } * } * * @return {object} * @optional */ getInitialState: 'DEFINE_MANY_MERGED', /** * @return {object} * @optional */ getChildContext: 'DEFINE_MANY_MERGED', /** * Uses props from `this.props` and state from `this.state` to render the * structure of the component. * * No guarantees are made about when or how often this method is invoked, so * it must not have side effects. * * render: function() { * var name = this.props.name; * return <div>Hello, {name}!</div>; * } * * @return {ReactComponent} * @nosideeffects * @required */ render: 'DEFINE_ONCE', // ==== Delegate methods ==== /** * Invoked when the component is initially created and about to be mounted. * This may have side effects, but any external subscriptions or data created * by this method must be cleaned up in `componentWillUnmount`. * * @optional */ componentWillMount: 'DEFINE_MANY', /** * Invoked when the component has been mounted and has a DOM representation. * However, there is no guarantee that the DOM node is in the document. * * Use this as an opportunity to operate on the DOM when the component has * been mounted (initialized and rendered) for the first time. * * @param {DOMElement} rootNode DOM element representing the component. * @optional */ componentDidMount: 'DEFINE_MANY', /** * Invoked before the component receives new props. * * Use this as an opportunity to react to a prop transition by updating the * state using `this.setState`. Current props are accessed via `this.props`. * * componentWillReceiveProps: function(nextProps, nextContext) { * this.setState({ * likesIncreasing: nextProps.likeCount > this.props.likeCount * }); * } * * NOTE: There is no equivalent `componentWillReceiveState`. An incoming prop * transition may cause a state change, but the opposite is not true. If you * need it, you are probably looking for `componentWillUpdate`. * * @param {object} nextProps * @optional */ componentWillReceiveProps: 'DEFINE_MANY', /** * Invoked while deciding if the component should be updated as a result of * receiving new props, state and/or context. * * Use this as an opportunity to `return false` when you're certain that the * transition to the new props/state/context will not require a component * update. * * shouldComponentUpdate: function(nextProps, nextState, nextContext) { * return !equal(nextProps, this.props) || * !equal(nextState, this.state) || * !equal(nextContext, this.context); * } * * @param {object} nextProps * @param {?object} nextState * @param {?object} nextContext * @return {boolean} True if the component should update. * @optional */ shouldComponentUpdate: 'DEFINE_ONCE', /** * Invoked when the component is about to update due to a transition from * `this.props`, `this.state` and `this.context` to `nextProps`, `nextState` * and `nextContext`. * * Use this as an opportunity to perform preparation before an update occurs. * * NOTE: You **cannot** use `this.setState()` in this method. * * @param {object} nextProps * @param {?object} nextState * @param {?object} nextContext * @param {ReactReconcileTransaction} transaction * @optional */ componentWillUpdate: 'DEFINE_MANY', /** * Invoked when the component's DOM representation has been updated. * * Use this as an opportunity to operate on the DOM when the component has * been updated. * * @param {object} prevProps * @param {?object} prevState * @param {?object} prevContext * @param {DOMElement} rootNode DOM element representing the component. * @optional */ componentDidUpdate: 'DEFINE_MANY', /** * Invoked when the component is about to be removed from its parent and have * its DOM representation destroyed. * * Use this as an opportunity to deallocate any external resources. * * NOTE: There is no `componentDidUnmount` since your component will have been * destroyed by that point. * * @optional */ componentWillUnmount: 'DEFINE_MANY', // ==== Advanced methods ==== /** * Updates the component's currently mounted DOM representation. * * By default, this implements React's rendering and reconciliation algorithm. * Sophisticated clients may wish to override this. * * @param {ReactReconcileTransaction} transaction * @internal * @overridable */ updateComponent: 'OVERRIDE_BASE' }; /** * Mapping from class specification keys to special processing functions. * * Although these are declared like instance properties in the specification * when defining classes using `React.createClass`, they are actually static * and are accessible on the constructor instead of the prototype. Despite * being static, they must be defined outside of the "statics" key under * which all other static methods are defined. */ var RESERVED_SPEC_KEYS = { displayName: function (Constructor, displayName) { Constructor.displayName = displayName; }, mixins: function (Constructor, mixins) { if (mixins) { for (var i = 0; i < mixins.length; i++) { mixSpecIntoComponent(Constructor, mixins[i]); } } }, childContextTypes: function (Constructor, childContextTypes) { if (false) { validateTypeDef(Constructor, childContextTypes, 'childContext'); } Constructor.childContextTypes = _assign({}, Constructor.childContextTypes, childContextTypes); }, contextTypes: function (Constructor, contextTypes) { if (false) { validateTypeDef(Constructor, contextTypes, 'context'); } Constructor.contextTypes = _assign({}, Constructor.contextTypes, contextTypes); }, /** * Special case getDefaultProps which should move into statics but requires * automatic merging. */ getDefaultProps: function (Constructor, getDefaultProps) { if (Constructor.getDefaultProps) { Constructor.getDefaultProps = createMergedResultFunction(Constructor.getDefaultProps, getDefaultProps); } else { Constructor.getDefaultProps = getDefaultProps; } }, propTypes: function (Constructor, propTypes) { if (false) { validateTypeDef(Constructor, propTypes, 'prop'); } Constructor.propTypes = _assign({}, Constructor.propTypes, propTypes); }, statics: function (Constructor, statics) { mixStaticSpecIntoComponent(Constructor, statics); }, autobind: function () {} }; function validateTypeDef(Constructor, typeDef, location) { for (var propName in typeDef) { if (typeDef.hasOwnProperty(propName)) { // use a warning instead of an invariant so components // don't show up in prod but only in __DEV__ false ? warning(typeof typeDef[propName] === 'function', '%s: %s type `%s` is invalid; it must be a function, usually from ' + 'React.PropTypes.', Constructor.displayName || 'ReactClass', ReactPropTypeLocationNames[location], propName) : void 0; } } } function validateMethodOverride(isAlreadyDefined, name) { var specPolicy = ReactClassInterface.hasOwnProperty(name) ? ReactClassInterface[name] : null; // Disallow overriding of base class methods unless explicitly allowed. if (ReactClassMixin.hasOwnProperty(name)) { !(specPolicy === 'OVERRIDE_BASE') ? false ? invariant(false, 'ReactClassInterface: You are attempting to override `%s` from your class specification. Ensure that your method names do not overlap with React methods.', name) : _prodInvariant('73', name) : void 0; } // Disallow defining methods more than once unless explicitly allowed. if (isAlreadyDefined) { !(specPolicy === 'DEFINE_MANY' || specPolicy === 'DEFINE_MANY_MERGED') ? false ? invariant(false, 'ReactClassInterface: You are attempting to define `%s` on your component more than once. This conflict may be due to a mixin.', name) : _prodInvariant('74', name) : void 0; } } /** * Mixin helper which handles policy validation and reserved * specification keys when building React classes. */ function mixSpecIntoComponent(Constructor, spec) { if (!spec) { if (false) { var typeofSpec = typeof spec; var isMixinValid = typeofSpec === 'object' && spec !== null; process.env.NODE_ENV !== 'production' ? warning(isMixinValid, '%s: You\'re attempting to include a mixin that is either null ' + 'or not an object. Check the mixins included by the component, ' + 'as well as any mixins they include themselves. ' + 'Expected object but got %s.', Constructor.displayName || 'ReactClass', spec === null ? null : typeofSpec) : void 0; } return; } !(typeof spec !== 'function') ? false ? invariant(false, 'ReactClass: You\'re attempting to use a component class or function as a mixin. Instead, just use a regular object.') : _prodInvariant('75') : void 0; !!ReactElement.isValidElement(spec) ? false ? invariant(false, 'ReactClass: You\'re attempting to use a component as a mixin. Instead, just use a regular object.') : _prodInvariant('76') : void 0; var proto = Constructor.prototype; var autoBindPairs = proto.__reactAutoBindPairs; // By handling mixins before any other properties, we ensure the same // chaining order is applied to methods with DEFINE_MANY policy, whether // mixins are listed before or after these methods in the spec. if (spec.hasOwnProperty(MIXINS_KEY)) { RESERVED_SPEC_KEYS.mixins(Constructor, spec.mixins); } for (var name in spec) { if (!spec.hasOwnProperty(name)) { continue; } if (name === MIXINS_KEY) { // We have already handled mixins in a special case above. continue; } var property = spec[name]; var isAlreadyDefined = proto.hasOwnProperty(name); validateMethodOverride(isAlreadyDefined, name); if (RESERVED_SPEC_KEYS.hasOwnProperty(name)) { RESERVED_SPEC_KEYS[name](Constructor, property); } else { // Setup methods on prototype: // The following member methods should not be automatically bound: // 1. Expected ReactClass methods (in the "interface"). // 2. Overridden methods (that were mixed in). var isReactClassMethod = ReactClassInterface.hasOwnProperty(name); var isFunction = typeof property === 'function'; var shouldAutoBind = isFunction && !isReactClassMethod && !isAlreadyDefined && spec.autobind !== false; if (shouldAutoBind) { autoBindPairs.push(name, property); proto[name] = property; } else { if (isAlreadyDefined) { var specPolicy = ReactClassInterface[name]; // These cases should already be caught by validateMethodOverride. !(isReactClassMethod && (specPolicy === 'DEFINE_MANY_MERGED' || specPolicy === 'DEFINE_MANY')) ? false ? invariant(false, 'ReactClass: Unexpected spec policy %s for key %s when mixing in component specs.', specPolicy, name) : _prodInvariant('77', specPolicy, name) : void 0; // For methods which are defined more than once, call the existing // methods before calling the new property, merging if appropriate. if (specPolicy === 'DEFINE_MANY_MERGED') { proto[name] = createMergedResultFunction(proto[name], property); } else if (specPolicy === 'DEFINE_MANY') { proto[name] = createChainedFunction(proto[name], property); } } else { proto[name] = property; if (false) { // Add verbose displayName to the function, which helps when looking // at profiling tools. if (typeof property === 'function' && spec.displayName) { proto[name].displayName = spec.displayName + '_' + name; } } } } } } } function mixStaticSpecIntoComponent(Constructor, statics) { if (!statics) { return; } for (var name in statics) { var property = statics[name]; if (!statics.hasOwnProperty(name)) { continue; } var isReserved = name in RESERVED_SPEC_KEYS; !!isReserved ? false ? invariant(false, 'ReactClass: You are attempting to define a reserved property, `%s`, that shouldn\'t be on the "statics" key. Define it as an instance property instead; it will still be accessible on the constructor.', name) : _prodInvariant('78', name) : void 0; var isInherited = name in Constructor; !!isInherited ? false ? invariant(false, 'ReactClass: You are attempting to define `%s` on your component more than once. This conflict may be due to a mixin.', name) : _prodInvariant('79', name) : void 0; Constructor[name] = property; } } /** * Merge two objects, but throw if both contain the same key. * * @param {object} one The first object, which is mutated. * @param {object} two The second object * @return {object} one after it has been mutated to contain everything in two. */ function mergeIntoWithNoDuplicateKeys(one, two) { !(one && two && typeof one === 'object' && typeof two === 'object') ? false ? invariant(false, 'mergeIntoWithNoDuplicateKeys(): Cannot merge non-objects.') : _prodInvariant('80') : void 0; for (var key in two) { if (two.hasOwnProperty(key)) { !(one[key] === undefined) ? false ? invariant(false, 'mergeIntoWithNoDuplicateKeys(): Tried to merge two objects with the same key: `%s`. This conflict may be due to a mixin; in particular, this may be caused by two getInitialState() or getDefaultProps() methods returning objects with clashing keys.', key) : _prodInvariant('81', key) : void 0; one[key] = two[key]; } } return one; } /** * Creates a function that invokes two functions and merges their return values. * * @param {function} one Function to invoke first. * @param {function} two Function to invoke second. * @return {function} Function that invokes the two argument functions. * @private */ function createMergedResultFunction(one, two) { return function mergedResult() { var a = one.apply(this, arguments); var b = two.apply(this, arguments); if (a == null) { return b; } else if (b == null) { return a; } var c = {}; mergeIntoWithNoDuplicateKeys(c, a); mergeIntoWithNoDuplicateKeys(c, b); return c; }; } /** * Creates a function that invokes two functions and ignores their return vales. * * @param {function} one Function to invoke first. * @param {function} two Function to invoke second. * @return {function} Function that invokes the two argument functions. * @private */ function createChainedFunction(one, two) { return function chainedFunction() { one.apply(this, arguments); two.apply(this, arguments); }; } /** * Binds a method to the component. * * @param {object} component Component whose method is going to be bound. * @param {function} method Method to be bound. * @return {function} The bound method. */ function bindAutoBindMethod(component, method) { var boundMethod = method.bind(component); if (false) { boundMethod.__reactBoundContext = component; boundMethod.__reactBoundMethod = method; boundMethod.__reactBoundArguments = null; var componentName = component.constructor.displayName; var _bind = boundMethod.bind; boundMethod.bind = function (newThis) { for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { args[_key - 1] = arguments[_key]; } // User is trying to bind() an autobound method; we effectively will // ignore the value of "this" that the user is trying to use, so // let's warn. if (newThis !== component && newThis !== null) { process.env.NODE_ENV !== 'production' ? warning(false, 'bind(): React component methods may only be bound to the ' + 'component instance. See %s', componentName) : void 0; } else if (!args.length) { process.env.NODE_ENV !== 'production' ? warning(false, 'bind(): You are binding a component method to the component. ' + 'React does this for you automatically in a high-performance ' + 'way, so you can safely remove this call. See %s', componentName) : void 0; return boundMethod; } var reboundMethod = _bind.apply(boundMethod, arguments); reboundMethod.__reactBoundContext = component; reboundMethod.__reactBoundMethod = method; reboundMethod.__reactBoundArguments = args; return reboundMethod; }; } return boundMethod; } /** * Binds all auto-bound methods in a component. * * @param {object} component Component whose method is going to be bound. */ function bindAutoBindMethods(component) { var pairs = component.__reactAutoBindPairs; for (var i = 0; i < pairs.length; i += 2) { var autoBindKey = pairs[i]; var method = pairs[i + 1]; component[autoBindKey] = bindAutoBindMethod(component, method); } } /** * Add more to the ReactClass base class. These are all legacy features and * therefore not already part of the modern ReactComponent. */ var ReactClassMixin = { /** * TODO: This will be deprecated because state should always keep a consistent * type signature and the only use case for this, is to avoid that. */ replaceState: function (newState, callback) { this.updater.enqueueReplaceState(this, newState); if (callback) { this.updater.enqueueCallback(this, callback, 'replaceState'); } }, /** * Checks whether or not this composite component is mounted. * @return {boolean} True if mounted, false otherwise. * @protected * @final */ isMounted: function () { return this.updater.isMounted(this); } }; var ReactClassComponent = function () {}; _assign(ReactClassComponent.prototype, ReactComponent.prototype, ReactClassMixin); /** * Module for creating composite components. * * @class ReactClass */ var ReactClass = { /** * Creates a composite component class given a class specification. * See https://facebook.github.io/react/docs/top-level-api.html#react.createclass * * @param {object} spec Class specification (which must define `render`). * @return {function} Component constructor function. * @public */ createClass: function (spec) { // To keep our warnings more understandable, we'll use a little hack here to // ensure that Constructor.name !== 'Constructor'. This makes sure we don't // unnecessarily identify a class without displayName as 'Constructor'. var Constructor = identity(function (props, context, updater) { // This constructor gets overridden by mocks. The argument is used // by mocks to assert on what gets mounted. if (false) { process.env.NODE_ENV !== 'production' ? warning(this instanceof Constructor, 'Something is calling a React component directly. Use a factory or ' + 'JSX instead. See: https://fb.me/react-legacyfactory') : void 0; } // Wire up auto-binding if (this.__reactAutoBindPairs.length) { bindAutoBindMethods(this); } this.props = props; this.context = context; this.refs = emptyObject; this.updater = updater || ReactNoopUpdateQueue; this.state = null; // ReactClasses doesn't have constructors. Instead, they use the // getInitialState and componentWillMount methods for initialization. var initialState = this.getInitialState ? this.getInitialState() : null; if (false) { // We allow auto-mocks to proceed as if they're returning null. if (initialState === undefined && this.getInitialState._isMockFunction) { // This is probably bad practice. Consider warning here and // deprecating this convenience. initialState = null; } } !(typeof initialState === 'object' && !Array.isArray(initialState)) ? false ? invariant(false, '%s.getInitialState(): must return an object or null', Constructor.displayName || 'ReactCompositeComponent') : _prodInvariant('82', Constructor.displayName || 'ReactCompositeComponent') : void 0; this.state = initialState; }); Constructor.prototype = new ReactClassComponent(); Constructor.prototype.constructor = Constructor; Constructor.prototype.__reactAutoBindPairs = []; injectedMixins.forEach(mixSpecIntoComponent.bind(null, Constructor)); mixSpecIntoComponent(Constructor, spec); // Initialize the defaultProps property after all mixins have been merged. if (Constructor.getDefaultProps) { Constructor.defaultProps = Constructor.getDefaultProps(); } if (false) { // This is a tag to indicate that the use of these method names is ok, // since it's used with createClass. If it's not, then it's likely a // mistake so we'll warn you to use the static property, property // initializer or constructor respectively. if (Constructor.getDefaultProps) { Constructor.getDefaultProps.isReactClassApproved = {}; } if (Constructor.prototype.getInitialState) { Constructor.prototype.getInitialState.isReactClassApproved = {}; } } !Constructor.prototype.render ? false ? invariant(false, 'createClass(...): Class specification must implement a `render` method.') : _prodInvariant('83') : void 0; if (false) { process.env.NODE_ENV !== 'production' ? warning(!Constructor.prototype.componentShouldUpdate, '%s has a method called ' + 'componentShouldUpdate(). Did you mean shouldComponentUpdate()? ' + 'The name is phrased as a question because the function is ' + 'expected to return a value.', spec.displayName || 'A component') : void 0; process.env.NODE_ENV !== 'production' ? warning(!Constructor.prototype.componentWillRecieveProps, '%s has a method called ' + 'componentWillRecieveProps(). Did you mean componentWillReceiveProps()?', spec.displayName || 'A component') : void 0; } // Reduce time spent doing lookups by setting these on the prototype. for (var methodName in ReactClassInterface) { if (!Constructor.prototype[methodName]) { Constructor.prototype[methodName] = null; } } return Constructor; }, injection: { injectMixin: function (mixin) { injectedMixins.push(mixin); } } }; module.exports = ReactClass; /***/ }, /* 23 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var ReactPropTypeLocationNames = {}; if (false) { ReactPropTypeLocationNames = { prop: 'prop', context: 'context', childContext: 'child context' }; } module.exports = ReactPropTypeLocationNames; /***/ }, /* 24 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ReactElement = __webpack_require__(9); /** * Create a factory that creates HTML tag elements. * * @private */ var createDOMFactory = ReactElement.createFactory; if (false) { var ReactElementValidator = require('./ReactElementValidator'); createDOMFactory = ReactElementValidator.createFactory; } /** * Creates a mapping from supported HTML tags to `ReactDOMComponent` classes. * This is also accessible via `React.DOM`. * * @public */ var ReactDOMFactories = { a: createDOMFactory('a'), abbr: createDOMFactory('abbr'), address: createDOMFactory('address'), area: createDOMFactory('area'), article: createDOMFactory('article'), aside: createDOMFactory('aside'), audio: createDOMFactory('audio'), b: createDOMFactory('b'), base: createDOMFactory('base'), bdi: createDOMFactory('bdi'), bdo: createDOMFactory('bdo'), big: createDOMFactory('big'), blockquote: createDOMFactory('blockquote'), body: createDOMFactory('body'), br: createDOMFactory('br'), button: createDOMFactory('button'), canvas: createDOMFactory('canvas'), caption: createDOMFactory('caption'), cite: createDOMFactory('cite'), code: createDOMFactory('code'), col: createDOMFactory('col'), colgroup: createDOMFactory('colgroup'), data: createDOMFactory('data'), datalist: createDOMFactory('datalist'), dd: createDOMFactory('dd'), del: createDOMFactory('del'), details: createDOMFactory('details'), dfn: createDOMFactory('dfn'), dialog: createDOMFactory('dialog'), div: createDOMFactory('div'), dl: createDOMFactory('dl'), dt: createDOMFactory('dt'), em: createDOMFactory('em'), embed: createDOMFactory('embed'), fieldset: createDOMFactory('fieldset'), figcaption: createDOMFactory('figcaption'), figure: createDOMFactory('figure'), footer: createDOMFactory('footer'), form: createDOMFactory('form'), h1: createDOMFactory('h1'), h2: createDOMFactory('h2'), h3: createDOMFactory('h3'), h4: createDOMFactory('h4'), h5: createDOMFactory('h5'), h6: createDOMFactory('h6'), head: createDOMFactory('head'), header: createDOMFactory('header'), hgroup: createDOMFactory('hgroup'), hr: createDOMFactory('hr'), html: createDOMFactory('html'), i: createDOMFactory('i'), iframe: createDOMFactory('iframe'), img: createDOMFactory('img'), input: createDOMFactory('input'), ins: createDOMFactory('ins'), kbd: createDOMFactory('kbd'), keygen: createDOMFactory('keygen'), label: createDOMFactory('label'), legend: createDOMFactory('legend'), li: createDOMFactory('li'), link: createDOMFactory('link'), main: createDOMFactory('main'), map: createDOMFactory('map'), mark: createDOMFactory('mark'), menu: createDOMFactory('menu'), menuitem: createDOMFactory('menuitem'), meta: createDOMFactory('meta'), meter: createDOMFactory('meter'), nav: createDOMFactory('nav'), noscript: createDOMFactory('noscript'), object: createDOMFactory('object'), ol: createDOMFactory('ol'), optgroup: createDOMFactory('optgroup'), option: createDOMFactory('option'), output: createDOMFactory('output'), p: createDOMFactory('p'), param: createDOMFactory('param'), picture: createDOMFactory('picture'), pre: createDOMFactory('pre'), progress: createDOMFactory('progress'), q: createDOMFactory('q'), rp: createDOMFactory('rp'), rt: createDOMFactory('rt'), ruby: createDOMFactory('ruby'), s: createDOMFactory('s'), samp: createDOMFactory('samp'), script: createDOMFactory('script'), section: createDOMFactory('section'), select: createDOMFactory('select'), small: createDOMFactory('small'), source: createDOMFactory('source'), span: createDOMFactory('span'), strong: createDOMFactory('strong'), style: createDOMFactory('style'), sub: createDOMFactory('sub'), summary: createDOMFactory('summary'), sup: createDOMFactory('sup'), table: createDOMFactory('table'), tbody: createDOMFactory('tbody'), td: createDOMFactory('td'), textarea: createDOMFactory('textarea'), tfoot: createDOMFactory('tfoot'), th: createDOMFactory('th'), thead: createDOMFactory('thead'), time: createDOMFactory('time'), title: createDOMFactory('title'), tr: createDOMFactory('tr'), track: createDOMFactory('track'), u: createDOMFactory('u'), ul: createDOMFactory('ul'), 'var': createDOMFactory('var'), video: createDOMFactory('video'), wbr: createDOMFactory('wbr'), // SVG circle: createDOMFactory('circle'), clipPath: createDOMFactory('clipPath'), defs: createDOMFactory('defs'), ellipse: createDOMFactory('ellipse'), g: createDOMFactory('g'), image: createDOMFactory('image'), line: createDOMFactory('line'), linearGradient: createDOMFactory('linearGradient'), mask: createDOMFactory('mask'), path: createDOMFactory('path'), pattern: createDOMFactory('pattern'), polygon: createDOMFactory('polygon'), polyline: createDOMFactory('polyline'), radialGradient: createDOMFactory('radialGradient'), rect: createDOMFactory('rect'), stop: createDOMFactory('stop'), svg: createDOMFactory('svg'), text: createDOMFactory('text'), tspan: createDOMFactory('tspan') }; module.exports = ReactDOMFactories; /***/ }, /* 25 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ReactElement = __webpack_require__(9); var ReactPropTypeLocationNames = __webpack_require__(23); var ReactPropTypesSecret = __webpack_require__(26); var emptyFunction = __webpack_require__(12); var getIteratorFn = __webpack_require__(16); var warning = __webpack_require__(11); /** * Collection of methods that allow declaration and validation of props that are * supplied to React components. Example usage: * * var Props = require('ReactPropTypes'); * var MyArticle = React.createClass({ * propTypes: { * // An optional string prop named "description". * description: Props.string, * * // A required enum prop named "category". * category: Props.oneOf(['News','Photos']).isRequired, * * // A prop named "dialog" that requires an instance of Dialog. * dialog: Props.instanceOf(Dialog).isRequired * }, * render: function() { ... } * }); * * A more formal specification of how these methods are used: * * type := array|bool|func|object|number|string|oneOf([...])|instanceOf(...) * decl := ReactPropTypes.{type}(.isRequired)? * * Each and every declaration produces a function with the same signature. This * allows the creation of custom validation functions. For example: * * var MyLink = React.createClass({ * propTypes: { * // An optional string or URI prop named "href". * href: function(props, propName, componentName) { * var propValue = props[propName]; * if (propValue != null && typeof propValue !== 'string' && * !(propValue instanceof URI)) { * return new Error( * 'Expected a string or an URI for ' + propName + ' in ' + * componentName * ); * } * } * }, * render: function() {...} * }); * * @internal */ var ANONYMOUS = '<<anonymous>>'; var ReactPropTypes = { array: createPrimitiveTypeChecker('array'), bool: createPrimitiveTypeChecker('boolean'), func: createPrimitiveTypeChecker('function'), number: createPrimitiveTypeChecker('number'), object: createPrimitiveTypeChecker('object'), string: createPrimitiveTypeChecker('string'), symbol: createPrimitiveTypeChecker('symbol'), any: createAnyTypeChecker(), arrayOf: createArrayOfTypeChecker, element: createElementTypeChecker(), instanceOf: createInstanceTypeChecker, node: createNodeChecker(), objectOf: createObjectOfTypeChecker, oneOf: createEnumTypeChecker, oneOfType: createUnionTypeChecker, shape: createShapeTypeChecker }; /** * inlined Object.is polyfill to avoid requiring consumers ship their own * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is */ /*eslint-disable no-self-compare*/ function is(x, y) { // SameValue algorithm if (x === y) { // Steps 1-5, 7-10 // Steps 6.b-6.e: +0 != -0 return x !== 0 || 1 / x === 1 / y; } else { // Step 6.a: NaN == NaN return x !== x && y !== y; } } /*eslint-enable no-self-compare*/ /** * We use an Error-like object for backward compatibility as people may call * PropTypes directly and inspect their output. However we don't use real * Errors anymore. We don't inspect their stack anyway, and creating them * is prohibitively expensive if they are created too often, such as what * happens in oneOfType() for any type before the one that matched. */ function PropTypeError(message) { this.message = message; this.stack = ''; } // Make `instanceof Error` still work for returned errors. PropTypeError.prototype = Error.prototype; function createChainableTypeChecker(validate) { if (false) { var manualPropTypeCallCache = {}; } function checkType(isRequired, props, propName, componentName, location, propFullName, secret) { componentName = componentName || ANONYMOUS; propFullName = propFullName || propName; if (false) { if (secret !== ReactPropTypesSecret && typeof console !== 'undefined') { var cacheKey = componentName + ':' + propName; if (!manualPropTypeCallCache[cacheKey]) { process.env.NODE_ENV !== 'production' ? warning(false, 'You are manually calling a React.PropTypes validation ' + 'function for the `%s` prop on `%s`. This is deprecated ' + 'and will not work in production with the next major version. ' + 'You may be seeing this warning due to a third-party PropTypes ' + 'library. See https://fb.me/react-warning-dont-call-proptypes ' + 'for details.', propFullName, componentName) : void 0; manualPropTypeCallCache[cacheKey] = true; } } } if (props[propName] == null) { var locationName = ReactPropTypeLocationNames[location]; if (isRequired) { if (props[propName] === null) { return new PropTypeError('The ' + locationName + ' `' + propFullName + '` is marked as required ' + ('in `' + componentName + '`, but its value is `null`.')); } return new PropTypeError('The ' + locationName + ' `' + propFullName + '` is marked as required in ' + ('`' + componentName + '`, but its value is `undefined`.')); } return null; } else { return validate(props, propName, componentName, location, propFullName); } } var chainedCheckType = checkType.bind(null, false); chainedCheckType.isRequired = checkType.bind(null, true); return chainedCheckType; } function createPrimitiveTypeChecker(expectedType) { function validate(props, propName, componentName, location, propFullName, secret) { var propValue = props[propName]; var propType = getPropType(propValue); if (propType !== expectedType) { var locationName = ReactPropTypeLocationNames[location]; // `propValue` being instance of, say, date/regexp, pass the 'object' // check, but we can offer a more precise error message here rather than // 'of type `object`'. var preciseType = getPreciseType(propValue); return new PropTypeError('Invalid ' + locationName + ' `' + propFullName + '` of type ' + ('`' + preciseType + '` supplied to `' + componentName + '`, expected ') + ('`' + expectedType + '`.')); } return null; } return createChainableTypeChecker(validate); } function createAnyTypeChecker() { return createChainableTypeChecker(emptyFunction.thatReturns(null)); } function createArrayOfTypeChecker(typeChecker) { function validate(props, propName, componentName, location, propFullName) { if (typeof typeChecker !== 'function') { return new PropTypeError('Property `' + propFullName + '` of component `' + componentName + '` has invalid PropType notation inside arrayOf.'); } var propValue = props[propName]; if (!Array.isArray(propValue)) { var locationName = ReactPropTypeLocationNames[location]; var propType = getPropType(propValue); return new PropTypeError('Invalid ' + locationName + ' `' + propFullName + '` of type ' + ('`' + propType + '` supplied to `' + componentName + '`, expected an array.')); } for (var i = 0; i < propValue.length; i++) { var error = typeChecker(propValue, i, componentName, location, propFullName + '[' + i + ']', ReactPropTypesSecret); if (error instanceof Error) { return error; } } return null; } return createChainableTypeChecker(validate); } function createElementTypeChecker() { function validate(props, propName, componentName, location, propFullName) { var propValue = props[propName]; if (!ReactElement.isValidElement(propValue)) { var locationName = ReactPropTypeLocationNames[location]; var propType = getPropType(propValue); return new PropTypeError('Invalid ' + locationName + ' `' + propFullName + '` of type ' + ('`' + propType + '` supplied to `' + componentName + '`, expected a single ReactElement.')); } return null; } return createChainableTypeChecker(validate); } function createInstanceTypeChecker(expectedClass) { function validate(props, propName, componentName, location, propFullName) { if (!(props[propName] instanceof expectedClass)) { var locationName = ReactPropTypeLocationNames[location]; var expectedClassName = expectedClass.name || ANONYMOUS; var actualClassName = getClassName(props[propName]); return new PropTypeError('Invalid ' + locationName + ' `' + propFullName + '` of type ' + ('`' + actualClassName + '` supplied to `' + componentName + '`, expected ') + ('instance of `' + expectedClassName + '`.')); } return null; } return createChainableTypeChecker(validate); } function createEnumTypeChecker(expectedValues) { if (!Array.isArray(expectedValues)) { false ? warning(false, 'Invalid argument supplied to oneOf, expected an instance of array.') : void 0; return emptyFunction.thatReturnsNull; } function validate(props, propName, componentName, location, propFullName) { var propValue = props[propName]; for (var i = 0; i < expectedValues.length; i++) { if (is(propValue, expectedValues[i])) { return null; } } var locationName = ReactPropTypeLocationNames[location]; var valuesString = JSON.stringify(expectedValues); return new PropTypeError('Invalid ' + locationName + ' `' + propFullName + '` of value `' + propValue + '` ' + ('supplied to `' + componentName + '`, expected one of ' + valuesString + '.')); } return createChainableTypeChecker(validate); } function createObjectOfTypeChecker(typeChecker) { function validate(props, propName, componentName, location, propFullName) { if (typeof typeChecker !== 'function') { return new PropTypeError('Property `' + propFullName + '` of component `' + componentName + '` has invalid PropType notation inside objectOf.'); } var propValue = props[propName]; var propType = getPropType(propValue); if (propType !== 'object') { var locationName = ReactPropTypeLocationNames[location]; return new PropTypeError('Invalid ' + locationName + ' `' + propFullName + '` of type ' + ('`' + propType + '` supplied to `' + componentName + '`, expected an object.')); } for (var key in propValue) { if (propValue.hasOwnProperty(key)) { var error = typeChecker(propValue, key, componentName, location, propFullName + '.' + key, ReactPropTypesSecret); if (error instanceof Error) { return error; } } } return null; } return createChainableTypeChecker(validate); } function createUnionTypeChecker(arrayOfTypeCheckers) { if (!Array.isArray(arrayOfTypeCheckers)) { false ? warning(false, 'Invalid argument supplied to oneOfType, expected an instance of array.') : void 0; return emptyFunction.thatReturnsNull; } function validate(props, propName, componentName, location, propFullName) { for (var i = 0; i < arrayOfTypeCheckers.length; i++) { var checker = arrayOfTypeCheckers[i]; if (checker(props, propName, componentName, location, propFullName, ReactPropTypesSecret) == null) { return null; } } var locationName = ReactPropTypeLocationNames[location]; return new PropTypeError('Invalid ' + locationName + ' `' + propFullName + '` supplied to ' + ('`' + componentName + '`.')); } return createChainableTypeChecker(validate); } function createNodeChecker() { function validate(props, propName, componentName, location, propFullName) { if (!isNode(props[propName])) { var locationName = ReactPropTypeLocationNames[location]; return new PropTypeError('Invalid ' + locationName + ' `' + propFullName + '` supplied to ' + ('`' + componentName + '`, expected a ReactNode.')); } return null; } return createChainableTypeChecker(validate); } function createShapeTypeChecker(shapeTypes) { function validate(props, propName, componentName, location, propFullName) { var propValue = props[propName]; var propType = getPropType(propValue); if (propType !== 'object') { var locationName = ReactPropTypeLocationNames[location]; return new PropTypeError('Invalid ' + locationName + ' `' + propFullName + '` of type `' + propType + '` ' + ('supplied to `' + componentName + '`, expected `object`.')); } for (var key in shapeTypes) { var checker = shapeTypes[key]; if (!checker) { continue; } var error = checker(propValue, key, componentName, location, propFullName + '.' + key, ReactPropTypesSecret); if (error) { return error; } } return null; } return createChainableTypeChecker(validate); } function isNode(propValue) { switch (typeof propValue) { case 'number': case 'string': case 'undefined': return true; case 'boolean': return !propValue; case 'object': if (Array.isArray(propValue)) { return propValue.every(isNode); } if (propValue === null || ReactElement.isValidElement(propValue)) { return true; } var iteratorFn = getIteratorFn(propValue); if (iteratorFn) { var iterator = iteratorFn.call(propValue); var step; if (iteratorFn !== propValue.entries) { while (!(step = iterator.next()).done) { if (!isNode(step.value)) { return false; } } } else { // Iterator will provide entry [k,v] tuples rather than values. while (!(step = iterator.next()).done) { var entry = step.value; if (entry) { if (!isNode(entry[1])) { return false; } } } } } else { return false; } return true; default: return false; } } function isSymbol(propType, propValue) { // Native Symbol. if (propType === 'symbol') { return true; } // 19.4.3.5 Symbol.prototype[@@toStringTag] === 'Symbol' if (propValue['@@toStringTag'] === 'Symbol') { return true; } // Fallback for non-spec compliant Symbols which are polyfilled. if (typeof Symbol === 'function' && propValue instanceof Symbol) { return true; } return false; } // Equivalent of `typeof` but with special handling for array and regexp. function getPropType(propValue) { var propType = typeof propValue; if (Array.isArray(propValue)) { return 'array'; } if (propValue instanceof RegExp) { // Old webkits (at least until Android 4.0) return 'function' rather than // 'object' for typeof a RegExp. We'll normalize this here so that /bla/ // passes PropTypes.object. return 'object'; } if (isSymbol(propType, propValue)) { return 'symbol'; } return propType; } // This handles more types than `getPropType`. Only used for error messages. // See `createPrimitiveTypeChecker`. function getPreciseType(propValue) { var propType = getPropType(propValue); if (propType === 'object') { if (propValue instanceof Date) { return 'date'; } else if (propValue instanceof RegExp) { return 'regexp'; } } return propType; } // Returns class name of the object, if any. function getClassName(propValue) { if (!propValue.constructor || !propValue.constructor.name) { return ANONYMOUS; } return propValue.constructor.name; } module.exports = ReactPropTypes; /***/ }, /* 26 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var ReactPropTypesSecret = 'SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED'; module.exports = ReactPropTypesSecret; /***/ }, /* 27 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; module.exports = '15.4.2'; /***/ }, /* 28 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(7); var ReactElement = __webpack_require__(9); var invariant = __webpack_require__(8); /** * Returns the first child in a collection of children and verifies that there * is only one child in the collection. * * See https://facebook.github.io/react/docs/top-level-api.html#react.children.only * * The current implementation of this function assumes that a single child gets * passed without a wrapper, but the purpose of this helper function is to * abstract away the particular structure of children. * * @param {?object} children Child collection structure. * @return {ReactElement} The first and only `ReactElement` contained in the * structure. */ function onlyChild(children) { !ReactElement.isValidElement(children) ? false ? invariant(false, 'React.Children.only expected to receive a single React element child.') : _prodInvariant('143') : void 0; return children; } module.exports = onlyChild; /***/ }, /* 29 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; module.exports = __webpack_require__(30); /***/ }, /* 30 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ /* globals __REACT_DEVTOOLS_GLOBAL_HOOK__*/ 'use strict'; var ReactDOMComponentTree = __webpack_require__(31); var ReactDefaultInjection = __webpack_require__(35); var ReactMount = __webpack_require__(158); var ReactReconciler = __webpack_require__(56); var ReactUpdates = __webpack_require__(53); var ReactVersion = __webpack_require__(163); var findDOMNode = __webpack_require__(164); var getHostComponentFromComposite = __webpack_require__(165); var renderSubtreeIntoContainer = __webpack_require__(166); var warning = __webpack_require__(11); ReactDefaultInjection.inject(); var ReactDOM = { findDOMNode: findDOMNode, render: ReactMount.render, unmountComponentAtNode: ReactMount.unmountComponentAtNode, version: ReactVersion, /* eslint-disable camelcase */ unstable_batchedUpdates: ReactUpdates.batchedUpdates, unstable_renderSubtreeIntoContainer: renderSubtreeIntoContainer }; // Inject the runtime into a devtools global hook regardless of browser. // Allows for debugging when the hook is injected on the page. if (typeof __REACT_DEVTOOLS_GLOBAL_HOOK__ !== 'undefined' && typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.inject === 'function') { __REACT_DEVTOOLS_GLOBAL_HOOK__.inject({ ComponentTree: { getClosestInstanceFromNode: ReactDOMComponentTree.getClosestInstanceFromNode, getNodeFromInstance: function (inst) { // inst is an internal instance (but could be a composite) if (inst._renderedComponent) { inst = getHostComponentFromComposite(inst); } if (inst) { return ReactDOMComponentTree.getNodeFromInstance(inst); } else { return null; } } }, Mount: ReactMount, Reconciler: ReactReconciler }); } if (false) { var ExecutionEnvironment = require('fbjs/lib/ExecutionEnvironment'); if (ExecutionEnvironment.canUseDOM && window.top === window.self) { // First check if devtools is not installed if (typeof __REACT_DEVTOOLS_GLOBAL_HOOK__ === 'undefined') { // If we're in Chrome or Firefox, provide a download link if not installed. if (navigator.userAgent.indexOf('Chrome') > -1 && navigator.userAgent.indexOf('Edge') === -1 || navigator.userAgent.indexOf('Firefox') > -1) { // Firefox does not have the issue with devtools loaded over file:// var showFileUrlMessage = window.location.protocol.indexOf('http') === -1 && navigator.userAgent.indexOf('Firefox') === -1; console.debug('Download the React DevTools ' + (showFileUrlMessage ? 'and use an HTTP server (instead of a file: URL) ' : '') + 'for a better development experience: ' + 'https://fb.me/react-devtools'); } } var testFunc = function testFn() {}; process.env.NODE_ENV !== 'production' ? warning((testFunc.name || testFunc.toString()).indexOf('testFn') !== -1, 'It looks like you\'re using a minified copy of the development build ' + 'of React. When deploying React apps to production, make sure to use ' + 'the production build which skips development warnings and is faster. ' + 'See https://fb.me/react-minification for more details.') : void 0; // If we're in IE8, check to see if we are in compatibility mode and provide // information on preventing compatibility mode var ieCompatibilityMode = document.documentMode && document.documentMode < 8; process.env.NODE_ENV !== 'production' ? warning(!ieCompatibilityMode, 'Internet Explorer is running in compatibility mode; please add the ' + 'following tag to your HTML to prevent this from happening: ' + '<meta http-equiv="X-UA-Compatible" content="IE=edge" />') : void 0; var expectedFeatures = [ // shims Array.isArray, Array.prototype.every, Array.prototype.forEach, Array.prototype.indexOf, Array.prototype.map, Date.now, Function.prototype.bind, Object.keys, String.prototype.trim]; for (var i = 0; i < expectedFeatures.length; i++) { if (!expectedFeatures[i]) { process.env.NODE_ENV !== 'production' ? warning(false, 'One or more ES5 shims expected by React are not available: ' + 'https://fb.me/react-warning-polyfills') : void 0; break; } } } } if (false) { var ReactInstrumentation = require('./ReactInstrumentation'); var ReactDOMUnknownPropertyHook = require('./ReactDOMUnknownPropertyHook'); var ReactDOMNullInputValuePropHook = require('./ReactDOMNullInputValuePropHook'); var ReactDOMInvalidARIAHook = require('./ReactDOMInvalidARIAHook'); ReactInstrumentation.debugTool.addHook(ReactDOMUnknownPropertyHook); ReactInstrumentation.debugTool.addHook(ReactDOMNullInputValuePropHook); ReactInstrumentation.debugTool.addHook(ReactDOMInvalidARIAHook); } module.exports = ReactDOM; /***/ }, /* 31 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var DOMProperty = __webpack_require__(33); var ReactDOMComponentFlags = __webpack_require__(34); var invariant = __webpack_require__(8); var ATTR_NAME = DOMProperty.ID_ATTRIBUTE_NAME; var Flags = ReactDOMComponentFlags; var internalInstanceKey = '__reactInternalInstance$' + Math.random().toString(36).slice(2); /** * Check if a given node should be cached. */ function shouldPrecacheNode(node, nodeID) { return node.nodeType === 1 && node.getAttribute(ATTR_NAME) === String(nodeID) || node.nodeType === 8 && node.nodeValue === ' react-text: ' + nodeID + ' ' || node.nodeType === 8 && node.nodeValue === ' react-empty: ' + nodeID + ' '; } /** * Drill down (through composites and empty components) until we get a host or * host text component. * * This is pretty polymorphic but unavoidable with the current structure we have * for `_renderedChildren`. */ function getRenderedHostOrTextFromComponent(component) { var rendered; while (rendered = component._renderedComponent) { component = rendered; } return component; } /** * Populate `_hostNode` on the rendered host/text component with the given * DOM node. The passed `inst` can be a composite. */ function precacheNode(inst, node) { var hostInst = getRenderedHostOrTextFromComponent(inst); hostInst._hostNode = node; node[internalInstanceKey] = hostInst; } function uncacheNode(inst) { var node = inst._hostNode; if (node) { delete node[internalInstanceKey]; inst._hostNode = null; } } /** * Populate `_hostNode` on each child of `inst`, assuming that the children * match up with the DOM (element) children of `node`. * * We cache entire levels at once to avoid an n^2 problem where we access the * children of a node sequentially and have to walk from the start to our target * node every time. * * Since we update `_renderedChildren` and the actual DOM at (slightly) * different times, we could race here and see a newer `_renderedChildren` than * the DOM nodes we see. To avoid this, ReactMultiChild calls * `prepareToManageChildren` before we change `_renderedChildren`, at which * time the container's child nodes are always cached (until it unmounts). */ function precacheChildNodes(inst, node) { if (inst._flags & Flags.hasCachedChildNodes) { return; } var children = inst._renderedChildren; var childNode = node.firstChild; outer: for (var name in children) { if (!children.hasOwnProperty(name)) { continue; } var childInst = children[name]; var childID = getRenderedHostOrTextFromComponent(childInst)._domID; if (childID === 0) { // We're currently unmounting this child in ReactMultiChild; skip it. continue; } // We assume the child nodes are in the same order as the child instances. for (; childNode !== null; childNode = childNode.nextSibling) { if (shouldPrecacheNode(childNode, childID)) { precacheNode(childInst, childNode); continue outer; } } // We reached the end of the DOM children without finding an ID match. true ? false ? invariant(false, 'Unable to find element with ID %s.', childID) : _prodInvariant('32', childID) : void 0; } inst._flags |= Flags.hasCachedChildNodes; } /** * Given a DOM node, return the closest ReactDOMComponent or * ReactDOMTextComponent instance ancestor. */ function getClosestInstanceFromNode(node) { if (node[internalInstanceKey]) { return node[internalInstanceKey]; } // Walk up the tree until we find an ancestor whose instance we have cached. var parents = []; while (!node[internalInstanceKey]) { parents.push(node); if (node.parentNode) { node = node.parentNode; } else { // Top of the tree. This node must not be part of a React tree (or is // unmounted, potentially). return null; } } var closest; var inst; for (; node && (inst = node[internalInstanceKey]); node = parents.pop()) { closest = inst; if (parents.length) { precacheChildNodes(inst, node); } } return closest; } /** * Given a DOM node, return the ReactDOMComponent or ReactDOMTextComponent * instance, or null if the node was not rendered by this React. */ function getInstanceFromNode(node) { var inst = getClosestInstanceFromNode(node); if (inst != null && inst._hostNode === node) { return inst; } else { return null; } } /** * Given a ReactDOMComponent or ReactDOMTextComponent, return the corresponding * DOM node. */ function getNodeFromInstance(inst) { // Without this first invariant, passing a non-DOM-component triggers the next // invariant for a missing parent, which is super confusing. !(inst._hostNode !== undefined) ? false ? invariant(false, 'getNodeFromInstance: Invalid argument.') : _prodInvariant('33') : void 0; if (inst._hostNode) { return inst._hostNode; } // Walk up the tree until we find an ancestor whose DOM node we have cached. var parents = []; while (!inst._hostNode) { parents.push(inst); !inst._hostParent ? false ? invariant(false, 'React DOM tree root should always have a node reference.') : _prodInvariant('34') : void 0; inst = inst._hostParent; } // Now parents contains each ancestor that does *not* have a cached native // node, and `inst` is the deepest ancestor that does. for (; parents.length; inst = parents.pop()) { precacheChildNodes(inst, inst._hostNode); } return inst._hostNode; } var ReactDOMComponentTree = { getClosestInstanceFromNode: getClosestInstanceFromNode, getInstanceFromNode: getInstanceFromNode, getNodeFromInstance: getNodeFromInstance, precacheChildNodes: precacheChildNodes, precacheNode: precacheNode, uncacheNode: uncacheNode }; module.exports = ReactDOMComponentTree; /***/ }, /* 32 */ 7, /* 33 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var invariant = __webpack_require__(8); function checkMask(value, bitmask) { return (value & bitmask) === bitmask; } var DOMPropertyInjection = { /** * Mapping from normalized, camelcased property names to a configuration that * specifies how the associated DOM property should be accessed or rendered. */ MUST_USE_PROPERTY: 0x1, HAS_BOOLEAN_VALUE: 0x4, HAS_NUMERIC_VALUE: 0x8, HAS_POSITIVE_NUMERIC_VALUE: 0x10 | 0x8, HAS_OVERLOADED_BOOLEAN_VALUE: 0x20, /** * Inject some specialized knowledge about the DOM. This takes a config object * with the following properties: * * isCustomAttribute: function that given an attribute name will return true * if it can be inserted into the DOM verbatim. Useful for data-* or aria-* * attributes where it's impossible to enumerate all of the possible * attribute names, * * Properties: object mapping DOM property name to one of the * DOMPropertyInjection constants or null. If your attribute isn't in here, * it won't get written to the DOM. * * DOMAttributeNames: object mapping React attribute name to the DOM * attribute name. Attribute names not specified use the **lowercase** * normalized name. * * DOMAttributeNamespaces: object mapping React attribute name to the DOM * attribute namespace URL. (Attribute names not specified use no namespace.) * * DOMPropertyNames: similar to DOMAttributeNames but for DOM properties. * Property names not specified use the normalized name. * * DOMMutationMethods: Properties that require special mutation methods. If * `value` is undefined, the mutation method should unset the property. * * @param {object} domPropertyConfig the config as described above. */ injectDOMPropertyConfig: function (domPropertyConfig) { var Injection = DOMPropertyInjection; var Properties = domPropertyConfig.Properties || {}; var DOMAttributeNamespaces = domPropertyConfig.DOMAttributeNamespaces || {}; var DOMAttributeNames = domPropertyConfig.DOMAttributeNames || {}; var DOMPropertyNames = domPropertyConfig.DOMPropertyNames || {}; var DOMMutationMethods = domPropertyConfig.DOMMutationMethods || {}; if (domPropertyConfig.isCustomAttribute) { DOMProperty._isCustomAttributeFunctions.push(domPropertyConfig.isCustomAttribute); } for (var propName in Properties) { !!DOMProperty.properties.hasOwnProperty(propName) ? false ? invariant(false, 'injectDOMPropertyConfig(...): You\'re trying to inject DOM property \'%s\' which has already been injected. You may be accidentally injecting the same DOM property config twice, or you may be injecting two configs that have conflicting property names.', propName) : _prodInvariant('48', propName) : void 0; var lowerCased = propName.toLowerCase(); var propConfig = Properties[propName]; var propertyInfo = { attributeName: lowerCased, attributeNamespace: null, propertyName: propName, mutationMethod: null, mustUseProperty: checkMask(propConfig, Injection.MUST_USE_PROPERTY), hasBooleanValue: checkMask(propConfig, Injection.HAS_BOOLEAN_VALUE), hasNumericValue: checkMask(propConfig, Injection.HAS_NUMERIC_VALUE), hasPositiveNumericValue: checkMask(propConfig, Injection.HAS_POSITIVE_NUMERIC_VALUE), hasOverloadedBooleanValue: checkMask(propConfig, Injection.HAS_OVERLOADED_BOOLEAN_VALUE) }; !(propertyInfo.hasBooleanValue + propertyInfo.hasNumericValue + propertyInfo.hasOverloadedBooleanValue <= 1) ? false ? invariant(false, 'DOMProperty: Value can be one of boolean, overloaded boolean, or numeric value, but not a combination: %s', propName) : _prodInvariant('50', propName) : void 0; if (false) { DOMProperty.getPossibleStandardName[lowerCased] = propName; } if (DOMAttributeNames.hasOwnProperty(propName)) { var attributeName = DOMAttributeNames[propName]; propertyInfo.attributeName = attributeName; if (false) { DOMProperty.getPossibleStandardName[attributeName] = propName; } } if (DOMAttributeNamespaces.hasOwnProperty(propName)) { propertyInfo.attributeNamespace = DOMAttributeNamespaces[propName]; } if (DOMPropertyNames.hasOwnProperty(propName)) { propertyInfo.propertyName = DOMPropertyNames[propName]; } if (DOMMutationMethods.hasOwnProperty(propName)) { propertyInfo.mutationMethod = DOMMutationMethods[propName]; } DOMProperty.properties[propName] = propertyInfo; } } }; /* eslint-disable max-len */ var ATTRIBUTE_NAME_START_CHAR = ':A-Z_a-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; /* eslint-enable max-len */ /** * DOMProperty exports lookup objects that can be used like functions: * * > DOMProperty.isValid['id'] * true * > DOMProperty.isValid['foobar'] * undefined * * Although this may be confusing, it performs better in general. * * @see http://jsperf.com/key-exists * @see http://jsperf.com/key-missing */ var DOMProperty = { ID_ATTRIBUTE_NAME: 'data-reactid', ROOT_ATTRIBUTE_NAME: 'data-reactroot', ATTRIBUTE_NAME_START_CHAR: ATTRIBUTE_NAME_START_CHAR, ATTRIBUTE_NAME_CHAR: ATTRIBUTE_NAME_START_CHAR + '\\-.0-9\\u00B7\\u0300-\\u036F\\u203F-\\u2040', /** * Map from property "standard name" to an object with info about how to set * the property in the DOM. Each object contains: * * attributeName: * Used when rendering markup or with `*Attribute()`. * attributeNamespace * propertyName: * Used on DOM node instances. (This includes properties that mutate due to * external factors.) * mutationMethod: * If non-null, used instead of the property or `setAttribute()` after * initial render. * mustUseProperty: * Whether the property must be accessed and mutated as an object property. * hasBooleanValue: * Whether the property should be removed when set to a falsey value. * hasNumericValue: * Whether the property must be numeric or parse as a numeric and should be * removed when set to a falsey value. * hasPositiveNumericValue: * Whether the property must be positive numeric or parse as a positive * numeric and should be removed when set to a falsey value. * hasOverloadedBooleanValue: * Whether the property can be used as a flag as well as with a value. * Removed when strictly equal to false; present without a value when * strictly equal to true; present with a value otherwise. */ properties: {}, /** * Mapping from lowercase property names to the properly cased version, used * to warn in the case of missing properties. Available only in __DEV__. * * autofocus is predefined, because adding it to the property whitelist * causes unintended side effects. * * @type {Object} */ getPossibleStandardName: false ? { autofocus: 'autoFocus' } : null, /** * All of the isCustomAttribute() functions that have been injected. */ _isCustomAttributeFunctions: [], /** * Checks whether a property name is a custom attribute. * @method */ isCustomAttribute: function (attributeName) { for (var i = 0; i < DOMProperty._isCustomAttributeFunctions.length; i++) { var isCustomAttributeFn = DOMProperty._isCustomAttributeFunctions[i]; if (isCustomAttributeFn(attributeName)) { return true; } } return false; }, injection: DOMPropertyInjection }; module.exports = DOMProperty; /***/ }, /* 34 */ /***/ function(module, exports) { /** * Copyright 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ReactDOMComponentFlags = { hasCachedChildNodes: 1 << 0 }; module.exports = ReactDOMComponentFlags; /***/ }, /* 35 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ARIADOMPropertyConfig = __webpack_require__(36); var BeforeInputEventPlugin = __webpack_require__(37); var ChangeEventPlugin = __webpack_require__(52); var DefaultEventPluginOrder = __webpack_require__(64); var EnterLeaveEventPlugin = __webpack_require__(65); var HTMLDOMPropertyConfig = __webpack_require__(70); var ReactComponentBrowserEnvironment = __webpack_require__(71); var ReactDOMComponent = __webpack_require__(84); var ReactDOMComponentTree = __webpack_require__(31); var ReactDOMEmptyComponent = __webpack_require__(129); var ReactDOMTreeTraversal = __webpack_require__(130); var ReactDOMTextComponent = __webpack_require__(131); var ReactDefaultBatchingStrategy = __webpack_require__(132); var ReactEventListener = __webpack_require__(133); var ReactInjection = __webpack_require__(136); var ReactReconcileTransaction = __webpack_require__(137); var SVGDOMPropertyConfig = __webpack_require__(145); var SelectEventPlugin = __webpack_require__(146); var SimpleEventPlugin = __webpack_require__(147); var alreadyInjected = false; function inject() { if (alreadyInjected) { // TODO: This is currently true because these injections are shared between // the client and the server package. They should be built independently // and not share any injection state. Then this problem will be solved. return; } alreadyInjected = true; ReactInjection.EventEmitter.injectReactEventListener(ReactEventListener); /** * Inject modules for resolving DOM hierarchy and plugin ordering. */ ReactInjection.EventPluginHub.injectEventPluginOrder(DefaultEventPluginOrder); ReactInjection.EventPluginUtils.injectComponentTree(ReactDOMComponentTree); ReactInjection.EventPluginUtils.injectTreeTraversal(ReactDOMTreeTraversal); /** * Some important event plugins included by default (without having to require * them). */ ReactInjection.EventPluginHub.injectEventPluginsByName({ SimpleEventPlugin: SimpleEventPlugin, EnterLeaveEventPlugin: EnterLeaveEventPlugin, ChangeEventPlugin: ChangeEventPlugin, SelectEventPlugin: SelectEventPlugin, BeforeInputEventPlugin: BeforeInputEventPlugin }); ReactInjection.HostComponent.injectGenericComponentClass(ReactDOMComponent); ReactInjection.HostComponent.injectTextComponentClass(ReactDOMTextComponent); ReactInjection.DOMProperty.injectDOMPropertyConfig(ARIADOMPropertyConfig); ReactInjection.DOMProperty.injectDOMPropertyConfig(HTMLDOMPropertyConfig); ReactInjection.DOMProperty.injectDOMPropertyConfig(SVGDOMPropertyConfig); ReactInjection.EmptyComponent.injectEmptyComponentFactory(function (instantiate) { return new ReactDOMEmptyComponent(instantiate); }); ReactInjection.Updates.injectReconcileTransaction(ReactReconcileTransaction); ReactInjection.Updates.injectBatchingStrategy(ReactDefaultBatchingStrategy); ReactInjection.Component.injectEnvironment(ReactComponentBrowserEnvironment); } module.exports = { inject: inject }; /***/ }, /* 36 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ARIADOMPropertyConfig = { Properties: { // Global States and Properties 'aria-current': 0, // state 'aria-details': 0, 'aria-disabled': 0, // state 'aria-hidden': 0, // state 'aria-invalid': 0, // state 'aria-keyshortcuts': 0, 'aria-label': 0, 'aria-roledescription': 0, // Widget Attributes 'aria-autocomplete': 0, 'aria-checked': 0, 'aria-expanded': 0, 'aria-haspopup': 0, 'aria-level': 0, 'aria-modal': 0, 'aria-multiline': 0, 'aria-multiselectable': 0, 'aria-orientation': 0, 'aria-placeholder': 0, 'aria-pressed': 0, 'aria-readonly': 0, 'aria-required': 0, 'aria-selected': 0, 'aria-sort': 0, 'aria-valuemax': 0, 'aria-valuemin': 0, 'aria-valuenow': 0, 'aria-valuetext': 0, // Live Region Attributes 'aria-atomic': 0, 'aria-busy': 0, 'aria-live': 0, 'aria-relevant': 0, // Drag-and-Drop Attributes 'aria-dropeffect': 0, 'aria-grabbed': 0, // Relationship Attributes 'aria-activedescendant': 0, 'aria-colcount': 0, 'aria-colindex': 0, 'aria-colspan': 0, 'aria-controls': 0, 'aria-describedby': 0, 'aria-errormessage': 0, 'aria-flowto': 0, 'aria-labelledby': 0, 'aria-owns': 0, 'aria-posinset': 0, 'aria-rowcount': 0, 'aria-rowindex': 0, 'aria-rowspan': 0, 'aria-setsize': 0 }, DOMAttributeNames: {}, DOMPropertyNames: {} }; module.exports = ARIADOMPropertyConfig; /***/ }, /* 37 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var EventPropagators = __webpack_require__(38); var ExecutionEnvironment = __webpack_require__(45); var FallbackCompositionState = __webpack_require__(46); var SyntheticCompositionEvent = __webpack_require__(49); var SyntheticInputEvent = __webpack_require__(51); var END_KEYCODES = [9, 13, 27, 32]; // Tab, Return, Esc, Space var START_KEYCODE = 229; var canUseCompositionEvent = ExecutionEnvironment.canUseDOM && 'CompositionEvent' in window; var documentMode = null; if (ExecutionEnvironment.canUseDOM && 'documentMode' in document) { documentMode = document.documentMode; } // Webkit offers a very useful `textInput` event that can be used to // directly represent `beforeInput`. The IE `textinput` event is not as // useful, so we don't use it. var canUseTextInputEvent = ExecutionEnvironment.canUseDOM && 'TextEvent' in window && !documentMode && !isPresto(); // In IE9+, we have access to composition events, but the data supplied // by the native compositionend event may be incorrect. Japanese ideographic // spaces, for instance (\u3000) are not recorded correctly. var useFallbackCompositionData = ExecutionEnvironment.canUseDOM && (!canUseCompositionEvent || documentMode && documentMode > 8 && documentMode <= 11); /** * Opera <= 12 includes TextEvent in window, but does not fire * text input events. Rely on keypress instead. */ function isPresto() { var opera = window.opera; return typeof opera === 'object' && typeof opera.version === 'function' && parseInt(opera.version(), 10) <= 12; } var SPACEBAR_CODE = 32; var SPACEBAR_CHAR = String.fromCharCode(SPACEBAR_CODE); // Events and their corresponding property names. var eventTypes = { beforeInput: { phasedRegistrationNames: { bubbled: 'onBeforeInput', captured: 'onBeforeInputCapture' }, dependencies: ['topCompositionEnd', 'topKeyPress', 'topTextInput', 'topPaste'] }, compositionEnd: { phasedRegistrationNames: { bubbled: 'onCompositionEnd', captured: 'onCompositionEndCapture' }, dependencies: ['topBlur', 'topCompositionEnd', 'topKeyDown', 'topKeyPress', 'topKeyUp', 'topMouseDown'] }, compositionStart: { phasedRegistrationNames: { bubbled: 'onCompositionStart', captured: 'onCompositionStartCapture' }, dependencies: ['topBlur', 'topCompositionStart', 'topKeyDown', 'topKeyPress', 'topKeyUp', 'topMouseDown'] }, compositionUpdate: { phasedRegistrationNames: { bubbled: 'onCompositionUpdate', captured: 'onCompositionUpdateCapture' }, dependencies: ['topBlur', 'topCompositionUpdate', 'topKeyDown', 'topKeyPress', 'topKeyUp', 'topMouseDown'] } }; // Track whether we've ever handled a keypress on the space key. var hasSpaceKeypress = false; /** * Return whether a native keypress event is assumed to be a command. * This is required because Firefox fires `keypress` events for key commands * (cut, copy, select-all, etc.) even though no character is inserted. */ function isKeypressCommand(nativeEvent) { return (nativeEvent.ctrlKey || nativeEvent.altKey || nativeEvent.metaKey) && // ctrlKey && altKey is equivalent to AltGr, and is not a command. !(nativeEvent.ctrlKey && nativeEvent.altKey); } /** * Translate native top level events into event types. * * @param {string} topLevelType * @return {object} */ function getCompositionEventType(topLevelType) { switch (topLevelType) { case 'topCompositionStart': return eventTypes.compositionStart; case 'topCompositionEnd': return eventTypes.compositionEnd; case 'topCompositionUpdate': return eventTypes.compositionUpdate; } } /** * Does our fallback best-guess model think this event signifies that * composition has begun? * * @param {string} topLevelType * @param {object} nativeEvent * @return {boolean} */ function isFallbackCompositionStart(topLevelType, nativeEvent) { return topLevelType === 'topKeyDown' && nativeEvent.keyCode === START_KEYCODE; } /** * Does our fallback mode think that this event is the end of composition? * * @param {string} topLevelType * @param {object} nativeEvent * @return {boolean} */ function isFallbackCompositionEnd(topLevelType, nativeEvent) { switch (topLevelType) { case 'topKeyUp': // Command keys insert or clear IME input. return END_KEYCODES.indexOf(nativeEvent.keyCode) !== -1; case 'topKeyDown': // Expect IME keyCode on each keydown. If we get any other // code we must have exited earlier. return nativeEvent.keyCode !== START_KEYCODE; case 'topKeyPress': case 'topMouseDown': case 'topBlur': // Events are not possible without cancelling IME. return true; default: return false; } } /** * Google Input Tools provides composition data via a CustomEvent, * with the `data` property populated in the `detail` object. If this * is available on the event object, use it. If not, this is a plain * composition event and we have nothing special to extract. * * @param {object} nativeEvent * @return {?string} */ function getDataFromCustomEvent(nativeEvent) { var detail = nativeEvent.detail; if (typeof detail === 'object' && 'data' in detail) { return detail.data; } return null; } // Track the current IME composition fallback object, if any. var currentComposition = null; /** * @return {?object} A SyntheticCompositionEvent. */ function extractCompositionEvent(topLevelType, targetInst, nativeEvent, nativeEventTarget) { var eventType; var fallbackData; if (canUseCompositionEvent) { eventType = getCompositionEventType(topLevelType); } else if (!currentComposition) { if (isFallbackCompositionStart(topLevelType, nativeEvent)) { eventType = eventTypes.compositionStart; } } else if (isFallbackCompositionEnd(topLevelType, nativeEvent)) { eventType = eventTypes.compositionEnd; } if (!eventType) { return null; } if (useFallbackCompositionData) { // The current composition is stored statically and must not be // overwritten while composition continues. if (!currentComposition && eventType === eventTypes.compositionStart) { currentComposition = FallbackCompositionState.getPooled(nativeEventTarget); } else if (eventType === eventTypes.compositionEnd) { if (currentComposition) { fallbackData = currentComposition.getData(); } } } var event = SyntheticCompositionEvent.getPooled(eventType, targetInst, nativeEvent, nativeEventTarget); if (fallbackData) { // Inject data generated from fallback path into the synthetic event. // This matches the property of native CompositionEventInterface. event.data = fallbackData; } else { var customData = getDataFromCustomEvent(nativeEvent); if (customData !== null) { event.data = customData; } } EventPropagators.accumulateTwoPhaseDispatches(event); return event; } /** * @param {string} topLevelType Record from `EventConstants`. * @param {object} nativeEvent Native browser event. * @return {?string} The string corresponding to this `beforeInput` event. */ function getNativeBeforeInputChars(topLevelType, nativeEvent) { switch (topLevelType) { case 'topCompositionEnd': return getDataFromCustomEvent(nativeEvent); case 'topKeyPress': /** * If native `textInput` events are available, our goal is to make * use of them. However, there is a special case: the spacebar key. * In Webkit, preventing default on a spacebar `textInput` event * cancels character insertion, but it *also* causes the browser * to fall back to its default spacebar behavior of scrolling the * page. * * Tracking at: * https://code.google.com/p/chromium/issues/detail?id=355103 * * To avoid this issue, use the keypress event as if no `textInput` * event is available. */ var which = nativeEvent.which; if (which !== SPACEBAR_CODE) { return null; } hasSpaceKeypress = true; return SPACEBAR_CHAR; case 'topTextInput': // Record the characters to be added to the DOM. var chars = nativeEvent.data; // If it's a spacebar character, assume that we have already handled // it at the keypress level and bail immediately. Android Chrome // doesn't give us keycodes, so we need to blacklist it. if (chars === SPACEBAR_CHAR && hasSpaceKeypress) { return null; } return chars; default: // For other native event types, do nothing. return null; } } /** * For browsers that do not provide the `textInput` event, extract the * appropriate string to use for SyntheticInputEvent. * * @param {string} topLevelType Record from `EventConstants`. * @param {object} nativeEvent Native browser event. * @return {?string} The fallback string for this `beforeInput` event. */ function getFallbackBeforeInputChars(topLevelType, nativeEvent) { // If we are currently composing (IME) and using a fallback to do so, // try to extract the composed characters from the fallback object. // If composition event is available, we extract a string only at // compositionevent, otherwise extract it at fallback events. if (currentComposition) { if (topLevelType === 'topCompositionEnd' || !canUseCompositionEvent && isFallbackCompositionEnd(topLevelType, nativeEvent)) { var chars = currentComposition.getData(); FallbackCompositionState.release(currentComposition); currentComposition = null; return chars; } return null; } switch (topLevelType) { case 'topPaste': // If a paste event occurs after a keypress, throw out the input // chars. Paste events should not lead to BeforeInput events. return null; case 'topKeyPress': /** * As of v27, Firefox may fire keypress events even when no character * will be inserted. A few possibilities: * * - `which` is `0`. Arrow keys, Esc key, etc. * * - `which` is the pressed key code, but no char is available. * Ex: 'AltGr + d` in Polish. There is no modified character for * this key combination and no character is inserted into the * document, but FF fires the keypress for char code `100` anyway. * No `input` event will occur. * * - `which` is the pressed key code, but a command combination is * being used. Ex: `Cmd+C`. No character is inserted, and no * `input` event will occur. */ if (nativeEvent.which && !isKeypressCommand(nativeEvent)) { return String.fromCharCode(nativeEvent.which); } return null; case 'topCompositionEnd': return useFallbackCompositionData ? null : nativeEvent.data; default: return null; } } /** * Extract a SyntheticInputEvent for `beforeInput`, based on either native * `textInput` or fallback behavior. * * @return {?object} A SyntheticInputEvent. */ function extractBeforeInputEvent(topLevelType, targetInst, nativeEvent, nativeEventTarget) { var chars; if (canUseTextInputEvent) { chars = getNativeBeforeInputChars(topLevelType, nativeEvent); } else { chars = getFallbackBeforeInputChars(topLevelType, nativeEvent); } // If no characters are being inserted, no BeforeInput event should // be fired. if (!chars) { return null; } var event = SyntheticInputEvent.getPooled(eventTypes.beforeInput, targetInst, nativeEvent, nativeEventTarget); event.data = chars; EventPropagators.accumulateTwoPhaseDispatches(event); return event; } /** * Create an `onBeforeInput` event to match * http://www.w3.org/TR/2013/WD-DOM-Level-3-Events-20131105/#events-inputevents. * * This event plugin is based on the native `textInput` event * available in Chrome, Safari, Opera, and IE. This event fires after * `onKeyPress` and `onCompositionEnd`, but before `onInput`. * * `beforeInput` is spec'd but not implemented in any browsers, and * the `input` event does not provide any useful information about what has * actually been added, contrary to the spec. Thus, `textInput` is the best * available event to identify the characters that have actually been inserted * into the target node. * * This plugin is also responsible for emitting `composition` events, thus * allowing us to share composition fallback code for both `beforeInput` and * `composition` event types. */ var BeforeInputEventPlugin = { eventTypes: eventTypes, extractEvents: function (topLevelType, targetInst, nativeEvent, nativeEventTarget) { return [extractCompositionEvent(topLevelType, targetInst, nativeEvent, nativeEventTarget), extractBeforeInputEvent(topLevelType, targetInst, nativeEvent, nativeEventTarget)]; } }; module.exports = BeforeInputEventPlugin; /***/ }, /* 38 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var EventPluginHub = __webpack_require__(39); var EventPluginUtils = __webpack_require__(41); var accumulateInto = __webpack_require__(43); var forEachAccumulated = __webpack_require__(44); var warning = __webpack_require__(11); var getListener = EventPluginHub.getListener; /** * Some event types have a notion of different registration names for different * "phases" of propagation. This finds listeners by a given phase. */ function listenerAtPhase(inst, event, propagationPhase) { var registrationName = event.dispatchConfig.phasedRegistrationNames[propagationPhase]; return getListener(inst, registrationName); } /** * Tags a `SyntheticEvent` with dispatched listeners. Creating this function * here, allows us to not have to bind or create functions for each event. * Mutating the event's members allows us to not have to create a wrapping * "dispatch" object that pairs the event with the listener. */ function accumulateDirectionalDispatches(inst, phase, event) { if (false) { process.env.NODE_ENV !== 'production' ? warning(inst, 'Dispatching inst must not be null') : void 0; } var listener = listenerAtPhase(inst, event, phase); if (listener) { event._dispatchListeners = accumulateInto(event._dispatchListeners, listener); event._dispatchInstances = accumulateInto(event._dispatchInstances, inst); } } /** * Collect dispatches (must be entirely collected before dispatching - see unit * tests). Lazily allocate the array to conserve memory. We must loop through * each event and perform the traversal for each one. We cannot perform a * single traversal for the entire collection of events because each event may * have a different target. */ function accumulateTwoPhaseDispatchesSingle(event) { if (event && event.dispatchConfig.phasedRegistrationNames) { EventPluginUtils.traverseTwoPhase(event._targetInst, accumulateDirectionalDispatches, event); } } /** * Same as `accumulateTwoPhaseDispatchesSingle`, but skips over the targetID. */ function accumulateTwoPhaseDispatchesSingleSkipTarget(event) { if (event && event.dispatchConfig.phasedRegistrationNames) { var targetInst = event._targetInst; var parentInst = targetInst ? EventPluginUtils.getParentInstance(targetInst) : null; EventPluginUtils.traverseTwoPhase(parentInst, accumulateDirectionalDispatches, event); } } /** * Accumulates without regard to direction, does not look for phased * registration names. Same as `accumulateDirectDispatchesSingle` but without * requiring that the `dispatchMarker` be the same as the dispatched ID. */ function accumulateDispatches(inst, ignoredDirection, event) { if (event && event.dispatchConfig.registrationName) { var registrationName = event.dispatchConfig.registrationName; var listener = getListener(inst, registrationName); if (listener) { event._dispatchListeners = accumulateInto(event._dispatchListeners, listener); event._dispatchInstances = accumulateInto(event._dispatchInstances, inst); } } } /** * Accumulates dispatches on an `SyntheticEvent`, but only for the * `dispatchMarker`. * @param {SyntheticEvent} event */ function accumulateDirectDispatchesSingle(event) { if (event && event.dispatchConfig.registrationName) { accumulateDispatches(event._targetInst, null, event); } } function accumulateTwoPhaseDispatches(events) { forEachAccumulated(events, accumulateTwoPhaseDispatchesSingle); } function accumulateTwoPhaseDispatchesSkipTarget(events) { forEachAccumulated(events, accumulateTwoPhaseDispatchesSingleSkipTarget); } function accumulateEnterLeaveDispatches(leave, enter, from, to) { EventPluginUtils.traverseEnterLeave(from, to, accumulateDispatches, leave, enter); } function accumulateDirectDispatches(events) { forEachAccumulated(events, accumulateDirectDispatchesSingle); } /** * A small set of propagation patterns, each of which will accept a small amount * of information, and generate a set of "dispatch ready event objects" - which * are sets of events that have already been annotated with a set of dispatched * listener functions/ids. The API is designed this way to discourage these * propagation strategies from actually executing the dispatches, since we * always want to collect the entire set of dispatches before executing event a * single one. * * @constructor EventPropagators */ var EventPropagators = { accumulateTwoPhaseDispatches: accumulateTwoPhaseDispatches, accumulateTwoPhaseDispatchesSkipTarget: accumulateTwoPhaseDispatchesSkipTarget, accumulateDirectDispatches: accumulateDirectDispatches, accumulateEnterLeaveDispatches: accumulateEnterLeaveDispatches }; module.exports = EventPropagators; /***/ }, /* 39 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var EventPluginRegistry = __webpack_require__(40); var EventPluginUtils = __webpack_require__(41); var ReactErrorUtils = __webpack_require__(42); var accumulateInto = __webpack_require__(43); var forEachAccumulated = __webpack_require__(44); var invariant = __webpack_require__(8); /** * Internal store for event listeners */ var listenerBank = {}; /** * Internal queue of events that have accumulated their dispatches and are * waiting to have their dispatches executed. */ var eventQueue = null; /** * Dispatches an event and releases it back into the pool, unless persistent. * * @param {?object} event Synthetic event to be dispatched. * @param {boolean} simulated If the event is simulated (changes exn behavior) * @private */ var executeDispatchesAndRelease = function (event, simulated) { if (event) { EventPluginUtils.executeDispatchesInOrder(event, simulated); if (!event.isPersistent()) { event.constructor.release(event); } } }; var executeDispatchesAndReleaseSimulated = function (e) { return executeDispatchesAndRelease(e, true); }; var executeDispatchesAndReleaseTopLevel = function (e) { return executeDispatchesAndRelease(e, false); }; var getDictionaryKey = function (inst) { // Prevents V8 performance issue: // https://github.com/facebook/react/pull/7232 return '.' + inst._rootNodeID; }; function isInteractive(tag) { return tag === 'button' || tag === 'input' || tag === 'select' || tag === 'textarea'; } function shouldPreventMouseEvent(name, type, props) { switch (name) { case 'onClick': case 'onClickCapture': case 'onDoubleClick': case 'onDoubleClickCapture': case 'onMouseDown': case 'onMouseDownCapture': case 'onMouseMove': case 'onMouseMoveCapture': case 'onMouseUp': case 'onMouseUpCapture': return !!(props.disabled && isInteractive(type)); default: return false; } } /** * This is a unified interface for event plugins to be installed and configured. * * Event plugins can implement the following properties: * * `extractEvents` {function(string, DOMEventTarget, string, object): *} * Required. When a top-level event is fired, this method is expected to * extract synthetic events that will in turn be queued and dispatched. * * `eventTypes` {object} * Optional, plugins that fire events must publish a mapping of registration * names that are used to register listeners. Values of this mapping must * be objects that contain `registrationName` or `phasedRegistrationNames`. * * `executeDispatch` {function(object, function, string)} * Optional, allows plugins to override how an event gets dispatched. By * default, the listener is simply invoked. * * Each plugin that is injected into `EventsPluginHub` is immediately operable. * * @public */ var EventPluginHub = { /** * Methods for injecting dependencies. */ injection: { /** * @param {array} InjectedEventPluginOrder * @public */ injectEventPluginOrder: EventPluginRegistry.injectEventPluginOrder, /** * @param {object} injectedNamesToPlugins Map from names to plugin modules. */ injectEventPluginsByName: EventPluginRegistry.injectEventPluginsByName }, /** * Stores `listener` at `listenerBank[registrationName][key]`. Is idempotent. * * @param {object} inst The instance, which is the source of events. * @param {string} registrationName Name of listener (e.g. `onClick`). * @param {function} listener The callback to store. */ putListener: function (inst, registrationName, listener) { !(typeof listener === 'function') ? false ? invariant(false, 'Expected %s listener to be a function, instead got type %s', registrationName, typeof listener) : _prodInvariant('94', registrationName, typeof listener) : void 0; var key = getDictionaryKey(inst); var bankForRegistrationName = listenerBank[registrationName] || (listenerBank[registrationName] = {}); bankForRegistrationName[key] = listener; var PluginModule = EventPluginRegistry.registrationNameModules[registrationName]; if (PluginModule && PluginModule.didPutListener) { PluginModule.didPutListener(inst, registrationName, listener); } }, /** * @param {object} inst The instance, which is the source of events. * @param {string} registrationName Name of listener (e.g. `onClick`). * @return {?function} The stored callback. */ getListener: function (inst, registrationName) { // TODO: shouldPreventMouseEvent is DOM-specific and definitely should not // live here; needs to be moved to a better place soon var bankForRegistrationName = listenerBank[registrationName]; if (shouldPreventMouseEvent(registrationName, inst._currentElement.type, inst._currentElement.props)) { return null; } var key = getDictionaryKey(inst); return bankForRegistrationName && bankForRegistrationName[key]; }, /** * Deletes a listener from the registration bank. * * @param {object} inst The instance, which is the source of events. * @param {string} registrationName Name of listener (e.g. `onClick`). */ deleteListener: function (inst, registrationName) { var PluginModule = EventPluginRegistry.registrationNameModules[registrationName]; if (PluginModule && PluginModule.willDeleteListener) { PluginModule.willDeleteListener(inst, registrationName); } var bankForRegistrationName = listenerBank[registrationName]; // TODO: This should never be null -- when is it? if (bankForRegistrationName) { var key = getDictionaryKey(inst); delete bankForRegistrationName[key]; } }, /** * Deletes all listeners for the DOM element with the supplied ID. * * @param {object} inst The instance, which is the source of events. */ deleteAllListeners: function (inst) { var key = getDictionaryKey(inst); for (var registrationName in listenerBank) { if (!listenerBank.hasOwnProperty(registrationName)) { continue; } if (!listenerBank[registrationName][key]) { continue; } var PluginModule = EventPluginRegistry.registrationNameModules[registrationName]; if (PluginModule && PluginModule.willDeleteListener) { PluginModule.willDeleteListener(inst, registrationName); } delete listenerBank[registrationName][key]; } }, /** * Allows registered plugins an opportunity to extract events from top-level * native browser events. * * @return {*} An accumulation of synthetic events. * @internal */ extractEvents: function (topLevelType, targetInst, nativeEvent, nativeEventTarget) { var events; var plugins = EventPluginRegistry.plugins; for (var i = 0; i < plugins.length; i++) { // Not every plugin in the ordering may be loaded at runtime. var possiblePlugin = plugins[i]; if (possiblePlugin) { var extractedEvents = possiblePlugin.extractEvents(topLevelType, targetInst, nativeEvent, nativeEventTarget); if (extractedEvents) { events = accumulateInto(events, extractedEvents); } } } return events; }, /** * Enqueues a synthetic event that should be dispatched when * `processEventQueue` is invoked. * * @param {*} events An accumulation of synthetic events. * @internal */ enqueueEvents: function (events) { if (events) { eventQueue = accumulateInto(eventQueue, events); } }, /** * Dispatches all synthetic events on the event queue. * * @internal */ processEventQueue: function (simulated) { // Set `eventQueue` to null before processing it so that we can tell if more // events get enqueued while processing. var processingEventQueue = eventQueue; eventQueue = null; if (simulated) { forEachAccumulated(processingEventQueue, executeDispatchesAndReleaseSimulated); } else { forEachAccumulated(processingEventQueue, executeDispatchesAndReleaseTopLevel); } !!eventQueue ? false ? invariant(false, 'processEventQueue(): Additional events were enqueued while processing an event queue. Support for this has not yet been implemented.') : _prodInvariant('95') : void 0; // This would be a good time to rethrow if any of the event handlers threw. ReactErrorUtils.rethrowCaughtError(); }, /** * These are needed for tests only. Do not use! */ __purge: function () { listenerBank = {}; }, __getListenerBank: function () { return listenerBank; } }; module.exports = EventPluginHub; /***/ }, /* 40 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var invariant = __webpack_require__(8); /** * Injectable ordering of event plugins. */ var eventPluginOrder = null; /** * Injectable mapping from names to event plugin modules. */ var namesToPlugins = {}; /** * Recomputes the plugin list using the injected plugins and plugin ordering. * * @private */ function recomputePluginOrdering() { if (!eventPluginOrder) { // Wait until an `eventPluginOrder` is injected. return; } for (var pluginName in namesToPlugins) { var pluginModule = namesToPlugins[pluginName]; var pluginIndex = eventPluginOrder.indexOf(pluginName); !(pluginIndex > -1) ? false ? invariant(false, 'EventPluginRegistry: Cannot inject event plugins that do not exist in the plugin ordering, `%s`.', pluginName) : _prodInvariant('96', pluginName) : void 0; if (EventPluginRegistry.plugins[pluginIndex]) { continue; } !pluginModule.extractEvents ? false ? invariant(false, 'EventPluginRegistry: Event plugins must implement an `extractEvents` method, but `%s` does not.', pluginName) : _prodInvariant('97', pluginName) : void 0; EventPluginRegistry.plugins[pluginIndex] = pluginModule; var publishedEvents = pluginModule.eventTypes; for (var eventName in publishedEvents) { !publishEventForPlugin(publishedEvents[eventName], pluginModule, eventName) ? false ? invariant(false, 'EventPluginRegistry: Failed to publish event `%s` for plugin `%s`.', eventName, pluginName) : _prodInvariant('98', eventName, pluginName) : void 0; } } } /** * Publishes an event so that it can be dispatched by the supplied plugin. * * @param {object} dispatchConfig Dispatch configuration for the event. * @param {object} PluginModule Plugin publishing the event. * @return {boolean} True if the event was successfully published. * @private */ function publishEventForPlugin(dispatchConfig, pluginModule, eventName) { !!EventPluginRegistry.eventNameDispatchConfigs.hasOwnProperty(eventName) ? false ? invariant(false, 'EventPluginHub: More than one plugin attempted to publish the same event name, `%s`.', eventName) : _prodInvariant('99', eventName) : void 0; EventPluginRegistry.eventNameDispatchConfigs[eventName] = dispatchConfig; var phasedRegistrationNames = dispatchConfig.phasedRegistrationNames; if (phasedRegistrationNames) { for (var phaseName in phasedRegistrationNames) { if (phasedRegistrationNames.hasOwnProperty(phaseName)) { var phasedRegistrationName = phasedRegistrationNames[phaseName]; publishRegistrationName(phasedRegistrationName, pluginModule, eventName); } } return true; } else if (dispatchConfig.registrationName) { publishRegistrationName(dispatchConfig.registrationName, pluginModule, eventName); return true; } return false; } /** * Publishes a registration name that is used to identify dispatched events and * can be used with `EventPluginHub.putListener` to register listeners. * * @param {string} registrationName Registration name to add. * @param {object} PluginModule Plugin publishing the event. * @private */ function publishRegistrationName(registrationName, pluginModule, eventName) { !!EventPluginRegistry.registrationNameModules[registrationName] ? false ? invariant(false, 'EventPluginHub: More than one plugin attempted to publish the same registration name, `%s`.', registrationName) : _prodInvariant('100', registrationName) : void 0; EventPluginRegistry.registrationNameModules[registrationName] = pluginModule; EventPluginRegistry.registrationNameDependencies[registrationName] = pluginModule.eventTypes[eventName].dependencies; if (false) { var lowerCasedName = registrationName.toLowerCase(); EventPluginRegistry.possibleRegistrationNames[lowerCasedName] = registrationName; if (registrationName === 'onDoubleClick') { EventPluginRegistry.possibleRegistrationNames.ondblclick = registrationName; } } } /** * Registers plugins so that they can extract and dispatch events. * * @see {EventPluginHub} */ var EventPluginRegistry = { /** * Ordered list of injected plugins. */ plugins: [], /** * Mapping from event name to dispatch config */ eventNameDispatchConfigs: {}, /** * Mapping from registration name to plugin module */ registrationNameModules: {}, /** * Mapping from registration name to event name */ registrationNameDependencies: {}, /** * Mapping from lowercase registration names to the properly cased version, * used to warn in the case of missing event handlers. Available * only in __DEV__. * @type {Object} */ possibleRegistrationNames: false ? {} : null, // Trust the developer to only use possibleRegistrationNames in __DEV__ /** * Injects an ordering of plugins (by plugin name). This allows the ordering * to be decoupled from injection of the actual plugins so that ordering is * always deterministic regardless of packaging, on-the-fly injection, etc. * * @param {array} InjectedEventPluginOrder * @internal * @see {EventPluginHub.injection.injectEventPluginOrder} */ injectEventPluginOrder: function (injectedEventPluginOrder) { !!eventPluginOrder ? false ? invariant(false, 'EventPluginRegistry: Cannot inject event plugin ordering more than once. You are likely trying to load more than one copy of React.') : _prodInvariant('101') : void 0; // Clone the ordering so it cannot be dynamically mutated. eventPluginOrder = Array.prototype.slice.call(injectedEventPluginOrder); recomputePluginOrdering(); }, /** * Injects plugins to be used by `EventPluginHub`. The plugin names must be * in the ordering injected by `injectEventPluginOrder`. * * Plugins can be injected as part of page initialization or on-the-fly. * * @param {object} injectedNamesToPlugins Map from names to plugin modules. * @internal * @see {EventPluginHub.injection.injectEventPluginsByName} */ injectEventPluginsByName: function (injectedNamesToPlugins) { var isOrderingDirty = false; for (var pluginName in injectedNamesToPlugins) { if (!injectedNamesToPlugins.hasOwnProperty(pluginName)) { continue; } var pluginModule = injectedNamesToPlugins[pluginName]; if (!namesToPlugins.hasOwnProperty(pluginName) || namesToPlugins[pluginName] !== pluginModule) { !!namesToPlugins[pluginName] ? false ? invariant(false, 'EventPluginRegistry: Cannot inject two different event plugins using the same name, `%s`.', pluginName) : _prodInvariant('102', pluginName) : void 0; namesToPlugins[pluginName] = pluginModule; isOrderingDirty = true; } } if (isOrderingDirty) { recomputePluginOrdering(); } }, /** * Looks up the plugin for the supplied event. * * @param {object} event A synthetic event. * @return {?object} The plugin that created the supplied event. * @internal */ getPluginModuleForEvent: function (event) { var dispatchConfig = event.dispatchConfig; if (dispatchConfig.registrationName) { return EventPluginRegistry.registrationNameModules[dispatchConfig.registrationName] || null; } if (dispatchConfig.phasedRegistrationNames !== undefined) { // pulling phasedRegistrationNames out of dispatchConfig helps Flow see // that it is not undefined. var phasedRegistrationNames = dispatchConfig.phasedRegistrationNames; for (var phase in phasedRegistrationNames) { if (!phasedRegistrationNames.hasOwnProperty(phase)) { continue; } var pluginModule = EventPluginRegistry.registrationNameModules[phasedRegistrationNames[phase]]; if (pluginModule) { return pluginModule; } } } return null; }, /** * Exposed for unit testing. * @private */ _resetEventPlugins: function () { eventPluginOrder = null; for (var pluginName in namesToPlugins) { if (namesToPlugins.hasOwnProperty(pluginName)) { delete namesToPlugins[pluginName]; } } EventPluginRegistry.plugins.length = 0; var eventNameDispatchConfigs = EventPluginRegistry.eventNameDispatchConfigs; for (var eventName in eventNameDispatchConfigs) { if (eventNameDispatchConfigs.hasOwnProperty(eventName)) { delete eventNameDispatchConfigs[eventName]; } } var registrationNameModules = EventPluginRegistry.registrationNameModules; for (var registrationName in registrationNameModules) { if (registrationNameModules.hasOwnProperty(registrationName)) { delete registrationNameModules[registrationName]; } } if (false) { var possibleRegistrationNames = EventPluginRegistry.possibleRegistrationNames; for (var lowerCasedName in possibleRegistrationNames) { if (possibleRegistrationNames.hasOwnProperty(lowerCasedName)) { delete possibleRegistrationNames[lowerCasedName]; } } } } }; module.exports = EventPluginRegistry; /***/ }, /* 41 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var ReactErrorUtils = __webpack_require__(42); var invariant = __webpack_require__(8); var warning = __webpack_require__(11); /** * Injected dependencies: */ /** * - `ComponentTree`: [required] Module that can convert between React instances * and actual node references. */ var ComponentTree; var TreeTraversal; var injection = { injectComponentTree: function (Injected) { ComponentTree = Injected; if (false) { process.env.NODE_ENV !== 'production' ? warning(Injected && Injected.getNodeFromInstance && Injected.getInstanceFromNode, 'EventPluginUtils.injection.injectComponentTree(...): Injected ' + 'module is missing getNodeFromInstance or getInstanceFromNode.') : void 0; } }, injectTreeTraversal: function (Injected) { TreeTraversal = Injected; if (false) { process.env.NODE_ENV !== 'production' ? warning(Injected && Injected.isAncestor && Injected.getLowestCommonAncestor, 'EventPluginUtils.injection.injectTreeTraversal(...): Injected ' + 'module is missing isAncestor or getLowestCommonAncestor.') : void 0; } } }; function isEndish(topLevelType) { return topLevelType === 'topMouseUp' || topLevelType === 'topTouchEnd' || topLevelType === 'topTouchCancel'; } function isMoveish(topLevelType) { return topLevelType === 'topMouseMove' || topLevelType === 'topTouchMove'; } function isStartish(topLevelType) { return topLevelType === 'topMouseDown' || topLevelType === 'topTouchStart'; } var validateEventDispatches; if (false) { validateEventDispatches = function (event) { var dispatchListeners = event._dispatchListeners; var dispatchInstances = event._dispatchInstances; var listenersIsArr = Array.isArray(dispatchListeners); var listenersLen = listenersIsArr ? dispatchListeners.length : dispatchListeners ? 1 : 0; var instancesIsArr = Array.isArray(dispatchInstances); var instancesLen = instancesIsArr ? dispatchInstances.length : dispatchInstances ? 1 : 0; process.env.NODE_ENV !== 'production' ? warning(instancesIsArr === listenersIsArr && instancesLen === listenersLen, 'EventPluginUtils: Invalid `event`.') : void 0; }; } /** * Dispatch the event to the listener. * @param {SyntheticEvent} event SyntheticEvent to handle * @param {boolean} simulated If the event is simulated (changes exn behavior) * @param {function} listener Application-level callback * @param {*} inst Internal component instance */ function executeDispatch(event, simulated, listener, inst) { var type = event.type || 'unknown-event'; event.currentTarget = EventPluginUtils.getNodeFromInstance(inst); if (simulated) { ReactErrorUtils.invokeGuardedCallbackWithCatch(type, listener, event); } else { ReactErrorUtils.invokeGuardedCallback(type, listener, event); } event.currentTarget = null; } /** * Standard/simple iteration through an event's collected dispatches. */ function executeDispatchesInOrder(event, simulated) { var dispatchListeners = event._dispatchListeners; var dispatchInstances = event._dispatchInstances; if (false) { validateEventDispatches(event); } if (Array.isArray(dispatchListeners)) { for (var i = 0; i < dispatchListeners.length; i++) { if (event.isPropagationStopped()) { break; } // Listeners and Instances are two parallel arrays that are always in sync. executeDispatch(event, simulated, dispatchListeners[i], dispatchInstances[i]); } } else if (dispatchListeners) { executeDispatch(event, simulated, dispatchListeners, dispatchInstances); } event._dispatchListeners = null; event._dispatchInstances = null; } /** * Standard/simple iteration through an event's collected dispatches, but stops * at the first dispatch execution returning true, and returns that id. * * @return {?string} id of the first dispatch execution who's listener returns * true, or null if no listener returned true. */ function executeDispatchesInOrderStopAtTrueImpl(event) { var dispatchListeners = event._dispatchListeners; var dispatchInstances = event._dispatchInstances; if (false) { validateEventDispatches(event); } if (Array.isArray(dispatchListeners)) { for (var i = 0; i < dispatchListeners.length; i++) { if (event.isPropagationStopped()) { break; } // Listeners and Instances are two parallel arrays that are always in sync. if (dispatchListeners[i](event, dispatchInstances[i])) { return dispatchInstances[i]; } } } else if (dispatchListeners) { if (dispatchListeners(event, dispatchInstances)) { return dispatchInstances; } } return null; } /** * @see executeDispatchesInOrderStopAtTrueImpl */ function executeDispatchesInOrderStopAtTrue(event) { var ret = executeDispatchesInOrderStopAtTrueImpl(event); event._dispatchInstances = null; event._dispatchListeners = null; return ret; } /** * Execution of a "direct" dispatch - there must be at most one dispatch * accumulated on the event or it is considered an error. It doesn't really make * sense for an event with multiple dispatches (bubbled) to keep track of the * return values at each dispatch execution, but it does tend to make sense when * dealing with "direct" dispatches. * * @return {*} The return value of executing the single dispatch. */ function executeDirectDispatch(event) { if (false) { validateEventDispatches(event); } var dispatchListener = event._dispatchListeners; var dispatchInstance = event._dispatchInstances; !!Array.isArray(dispatchListener) ? false ? invariant(false, 'executeDirectDispatch(...): Invalid `event`.') : _prodInvariant('103') : void 0; event.currentTarget = dispatchListener ? EventPluginUtils.getNodeFromInstance(dispatchInstance) : null; var res = dispatchListener ? dispatchListener(event) : null; event.currentTarget = null; event._dispatchListeners = null; event._dispatchInstances = null; return res; } /** * @param {SyntheticEvent} event * @return {boolean} True iff number of dispatches accumulated is greater than 0. */ function hasDispatches(event) { return !!event._dispatchListeners; } /** * General utilities that are useful in creating custom Event Plugins. */ var EventPluginUtils = { isEndish: isEndish, isMoveish: isMoveish, isStartish: isStartish, executeDirectDispatch: executeDirectDispatch, executeDispatchesInOrder: executeDispatchesInOrder, executeDispatchesInOrderStopAtTrue: executeDispatchesInOrderStopAtTrue, hasDispatches: hasDispatches, getInstanceFromNode: function (node) { return ComponentTree.getInstanceFromNode(node); }, getNodeFromInstance: function (node) { return ComponentTree.getNodeFromInstance(node); }, isAncestor: function (a, b) { return TreeTraversal.isAncestor(a, b); }, getLowestCommonAncestor: function (a, b) { return TreeTraversal.getLowestCommonAncestor(a, b); }, getParentInstance: function (inst) { return TreeTraversal.getParentInstance(inst); }, traverseTwoPhase: function (target, fn, arg) { return TreeTraversal.traverseTwoPhase(target, fn, arg); }, traverseEnterLeave: function (from, to, fn, argFrom, argTo) { return TreeTraversal.traverseEnterLeave(from, to, fn, argFrom, argTo); }, injection: injection }; module.exports = EventPluginUtils; /***/ }, /* 42 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var caughtError = null; /** * Call a function while guarding against errors that happens within it. * * @param {String} name of the guard to use for logging or debugging * @param {Function} func The function to invoke * @param {*} a First argument * @param {*} b Second argument */ function invokeGuardedCallback(name, func, a) { try { func(a); } catch (x) { if (caughtError === null) { caughtError = x; } } } var ReactErrorUtils = { invokeGuardedCallback: invokeGuardedCallback, /** * Invoked by ReactTestUtils.Simulate so that any errors thrown by the event * handler are sure to be rethrown by rethrowCaughtError. */ invokeGuardedCallbackWithCatch: invokeGuardedCallback, /** * During execution of guarded functions we will capture the first error which * we will rethrow to be handled by the top level error handler. */ rethrowCaughtError: function () { if (caughtError) { var error = caughtError; caughtError = null; throw error; } } }; if (false) { /** * To help development we can get better devtools integration by simulating a * real browser event. */ if (typeof window !== 'undefined' && typeof window.dispatchEvent === 'function' && typeof document !== 'undefined' && typeof document.createEvent === 'function') { var fakeNode = document.createElement('react'); ReactErrorUtils.invokeGuardedCallback = function (name, func, a) { var boundFunc = func.bind(null, a); var evtType = 'react-' + name; fakeNode.addEventListener(evtType, boundFunc, false); var evt = document.createEvent('Event'); // $FlowFixMe https://github.com/facebook/flow/issues/2336 evt.initEvent(evtType, false, false); fakeNode.dispatchEvent(evt); fakeNode.removeEventListener(evtType, boundFunc, false); }; } } module.exports = ReactErrorUtils; /***/ }, /* 43 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var invariant = __webpack_require__(8); /** * Accumulates items that must not be null or undefined into the first one. This * is used to conserve memory by avoiding array allocations, and thus sacrifices * API cleanness. Since `current` can be null before being passed in and not * null after this function, make sure to assign it back to `current`: * * `a = accumulateInto(a, b);` * * This API should be sparingly used. Try `accumulate` for something cleaner. * * @return {*|array<*>} An accumulation of items. */ function accumulateInto(current, next) { !(next != null) ? false ? invariant(false, 'accumulateInto(...): Accumulated items must not be null or undefined.') : _prodInvariant('30') : void 0; if (current == null) { return next; } // Both are not empty. Warning: Never call x.concat(y) when you are not // certain that x is an Array (x could be a string with concat method). if (Array.isArray(current)) { if (Array.isArray(next)) { current.push.apply(current, next); return current; } current.push(next); return current; } if (Array.isArray(next)) { // A bit too dangerous to mutate `next`. return [current].concat(next); } return [current, next]; } module.exports = accumulateInto; /***/ }, /* 44 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; /** * @param {array} arr an "accumulation" of items which is either an Array or * a single item. Useful when paired with the `accumulate` module. This is a * simple utility that allows us to reason about a collection of items, but * handling the case when there is exactly one item (and we do not need to * allocate an array). */ function forEachAccumulated(arr, cb, scope) { if (Array.isArray(arr)) { arr.forEach(cb, scope); } else if (arr) { cb.call(scope, arr); } } module.exports = forEachAccumulated; /***/ }, /* 45 */ /***/ function(module, exports) { /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var canUseDOM = !!(typeof window !== 'undefined' && window.document && window.document.createElement); /** * Simple, lightweight module assisting with the detection and context of * Worker. Helps avoid circular dependencies and allows code to reason about * whether or not they are in a Worker, even if they never include the main * `ReactWorker` dependency. */ var ExecutionEnvironment = { canUseDOM: canUseDOM, canUseWorkers: typeof Worker !== 'undefined', canUseEventListeners: canUseDOM && !!(window.addEventListener || window.attachEvent), canUseViewport: canUseDOM && !!window.screen, isInWorker: !canUseDOM // For now, this is true - might change in the future. }; module.exports = ExecutionEnvironment; /***/ }, /* 46 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var PooledClass = __webpack_require__(47); var getTextContentAccessor = __webpack_require__(48); /** * This helper class stores information about text content of a target node, * allowing comparison of content before and after a given event. * * Identify the node where selection currently begins, then observe * both its text content and its current position in the DOM. Since the * browser may natively replace the target node during composition, we can * use its position to find its replacement. * * @param {DOMEventTarget} root */ function FallbackCompositionState(root) { this._root = root; this._startText = this.getText(); this._fallbackText = null; } _assign(FallbackCompositionState.prototype, { destructor: function () { this._root = null; this._startText = null; this._fallbackText = null; }, /** * Get current text of input. * * @return {string} */ getText: function () { if ('value' in this._root) { return this._root.value; } return this._root[getTextContentAccessor()]; }, /** * Determine the differing substring between the initially stored * text content and the current content. * * @return {string} */ getData: function () { if (this._fallbackText) { return this._fallbackText; } var start; var startValue = this._startText; var startLength = startValue.length; var end; var endValue = this.getText(); var endLength = endValue.length; for (start = 0; start < startLength; start++) { if (startValue[start] !== endValue[start]) { break; } } var minEnd = startLength - start; for (end = 1; end <= minEnd; end++) { if (startValue[startLength - end] !== endValue[endLength - end]) { break; } } var sliceTail = end > 1 ? 1 - end : undefined; this._fallbackText = endValue.slice(start, sliceTail); return this._fallbackText; } }); PooledClass.addPoolingTo(FallbackCompositionState); module.exports = FallbackCompositionState; /***/ }, /* 47 */ [639, 32], /* 48 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ExecutionEnvironment = __webpack_require__(45); var contentKey = null; /** * Gets the key used to access text content on a DOM node. * * @return {?string} Key used to access text content. * @internal */ function getTextContentAccessor() { if (!contentKey && ExecutionEnvironment.canUseDOM) { // Prefer textContent to innerText because many browsers support both but // SVG <text> elements don't support innerText even when <div> does. contentKey = 'textContent' in document.documentElement ? 'textContent' : 'innerText'; } return contentKey; } module.exports = getTextContentAccessor; /***/ }, /* 49 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticEvent = __webpack_require__(50); /** * @interface Event * @see http://www.w3.org/TR/DOM-Level-3-Events/#events-compositionevents */ var CompositionEventInterface = { data: null }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticUIEvent} */ function SyntheticCompositionEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticEvent.augmentClass(SyntheticCompositionEvent, CompositionEventInterface); module.exports = SyntheticCompositionEvent; /***/ }, /* 50 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var PooledClass = __webpack_require__(47); var emptyFunction = __webpack_require__(12); var warning = __webpack_require__(11); var didWarnForAddedNewProperty = false; var isProxySupported = typeof Proxy === 'function'; var shouldBeReleasedProperties = ['dispatchConfig', '_targetInst', 'nativeEvent', 'isDefaultPrevented', 'isPropagationStopped', '_dispatchListeners', '_dispatchInstances']; /** * @interface Event * @see http://www.w3.org/TR/DOM-Level-3-Events/ */ var EventInterface = { type: null, target: null, // currentTarget is set when dispatching; no use in copying it here currentTarget: emptyFunction.thatReturnsNull, eventPhase: null, bubbles: null, cancelable: null, timeStamp: function (event) { return event.timeStamp || Date.now(); }, defaultPrevented: null, isTrusted: null }; /** * Synthetic events are dispatched by event plugins, typically in response to a * top-level event delegation handler. * * These systems should generally use pooling to reduce the frequency of garbage * collection. The system should check `isPersistent` to determine whether the * event should be released into the pool after being dispatched. Users that * need a persisted event should invoke `persist`. * * Synthetic events (and subclasses) implement the DOM Level 3 Events API by * normalizing browser quirks. Subclasses do not necessarily have to implement a * DOM interface; custom application-specific events can also subclass this. * * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {*} targetInst Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @param {DOMEventTarget} nativeEventTarget Target node. */ function SyntheticEvent(dispatchConfig, targetInst, nativeEvent, nativeEventTarget) { if (false) { // these have a getter/setter for warnings delete this.nativeEvent; delete this.preventDefault; delete this.stopPropagation; } this.dispatchConfig = dispatchConfig; this._targetInst = targetInst; this.nativeEvent = nativeEvent; var Interface = this.constructor.Interface; for (var propName in Interface) { if (!Interface.hasOwnProperty(propName)) { continue; } if (false) { delete this[propName]; // this has a getter/setter for warnings } var normalize = Interface[propName]; if (normalize) { this[propName] = normalize(nativeEvent); } else { if (propName === 'target') { this.target = nativeEventTarget; } else { this[propName] = nativeEvent[propName]; } } } var defaultPrevented = nativeEvent.defaultPrevented != null ? nativeEvent.defaultPrevented : nativeEvent.returnValue === false; if (defaultPrevented) { this.isDefaultPrevented = emptyFunction.thatReturnsTrue; } else { this.isDefaultPrevented = emptyFunction.thatReturnsFalse; } this.isPropagationStopped = emptyFunction.thatReturnsFalse; return this; } _assign(SyntheticEvent.prototype, { preventDefault: function () { this.defaultPrevented = true; var event = this.nativeEvent; if (!event) { return; } if (event.preventDefault) { event.preventDefault(); } else if (typeof event.returnValue !== 'unknown') { // eslint-disable-line valid-typeof event.returnValue = false; } this.isDefaultPrevented = emptyFunction.thatReturnsTrue; }, stopPropagation: function () { var event = this.nativeEvent; if (!event) { return; } if (event.stopPropagation) { event.stopPropagation(); } else if (typeof event.cancelBubble !== 'unknown') { // eslint-disable-line valid-typeof // The ChangeEventPlugin registers a "propertychange" event for // IE. This event does not support bubbling or cancelling, and // any references to cancelBubble throw "Member not found". A // typeof check of "unknown" circumvents this issue (and is also // IE specific). event.cancelBubble = true; } this.isPropagationStopped = emptyFunction.thatReturnsTrue; }, /** * We release all dispatched `SyntheticEvent`s after each event loop, adding * them back into the pool. This allows a way to hold onto a reference that * won't be added back into the pool. */ persist: function () { this.isPersistent = emptyFunction.thatReturnsTrue; }, /** * Checks if this event should be released back into the pool. * * @return {boolean} True if this should not be released, false otherwise. */ isPersistent: emptyFunction.thatReturnsFalse, /** * `PooledClass` looks for `destructor` on each instance it releases. */ destructor: function () { var Interface = this.constructor.Interface; for (var propName in Interface) { if (false) { Object.defineProperty(this, propName, getPooledWarningPropertyDefinition(propName, Interface[propName])); } else { this[propName] = null; } } for (var i = 0; i < shouldBeReleasedProperties.length; i++) { this[shouldBeReleasedProperties[i]] = null; } if (false) { Object.defineProperty(this, 'nativeEvent', getPooledWarningPropertyDefinition('nativeEvent', null)); Object.defineProperty(this, 'preventDefault', getPooledWarningPropertyDefinition('preventDefault', emptyFunction)); Object.defineProperty(this, 'stopPropagation', getPooledWarningPropertyDefinition('stopPropagation', emptyFunction)); } } }); SyntheticEvent.Interface = EventInterface; if (false) { if (isProxySupported) { /*eslint-disable no-func-assign */ SyntheticEvent = new Proxy(SyntheticEvent, { construct: function (target, args) { return this.apply(target, Object.create(target.prototype), args); }, apply: function (constructor, that, args) { return new Proxy(constructor.apply(that, args), { set: function (target, prop, value) { if (prop !== 'isPersistent' && !target.constructor.Interface.hasOwnProperty(prop) && shouldBeReleasedProperties.indexOf(prop) === -1) { process.env.NODE_ENV !== 'production' ? warning(didWarnForAddedNewProperty || target.isPersistent(), 'This synthetic event is reused for performance reasons. If you\'re ' + 'seeing this, you\'re adding a new property in the synthetic event object. ' + 'The property is never released. See ' + 'https://fb.me/react-event-pooling for more information.') : void 0; didWarnForAddedNewProperty = true; } target[prop] = value; return true; } }); } }); /*eslint-enable no-func-assign */ } } /** * Helper to reduce boilerplate when creating subclasses. * * @param {function} Class * @param {?object} Interface */ SyntheticEvent.augmentClass = function (Class, Interface) { var Super = this; var E = function () {}; E.prototype = Super.prototype; var prototype = new E(); _assign(prototype, Class.prototype); Class.prototype = prototype; Class.prototype.constructor = Class; Class.Interface = _assign({}, Super.Interface, Interface); Class.augmentClass = Super.augmentClass; PooledClass.addPoolingTo(Class, PooledClass.fourArgumentPooler); }; PooledClass.addPoolingTo(SyntheticEvent, PooledClass.fourArgumentPooler); module.exports = SyntheticEvent; /** * Helper to nullify syntheticEvent instance properties when destructing * * @param {object} SyntheticEvent * @param {String} propName * @return {object} defineProperty object */ function getPooledWarningPropertyDefinition(propName, getVal) { var isFunction = typeof getVal === 'function'; return { configurable: true, set: set, get: get }; function set(val) { var action = isFunction ? 'setting the method' : 'setting the property'; warn(action, 'This is effectively a no-op'); return val; } function get() { var action = isFunction ? 'accessing the method' : 'accessing the property'; var result = isFunction ? 'This is a no-op function' : 'This is set to null'; warn(action, result); return getVal; } function warn(action, result) { var warningCondition = false; false ? warning(warningCondition, 'This synthetic event is reused for performance reasons. If you\'re seeing this, ' + 'you\'re %s `%s` on a released/nullified synthetic event. %s. ' + 'If you must keep the original synthetic event around, use event.persist(). ' + 'See https://fb.me/react-event-pooling for more information.', action, propName, result) : void 0; } } /***/ }, /* 51 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticEvent = __webpack_require__(50); /** * @interface Event * @see http://www.w3.org/TR/2013/WD-DOM-Level-3-Events-20131105 * /#events-inputevents */ var InputEventInterface = { data: null }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticUIEvent} */ function SyntheticInputEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticEvent.augmentClass(SyntheticInputEvent, InputEventInterface); module.exports = SyntheticInputEvent; /***/ }, /* 52 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var EventPluginHub = __webpack_require__(39); var EventPropagators = __webpack_require__(38); var ExecutionEnvironment = __webpack_require__(45); var ReactDOMComponentTree = __webpack_require__(31); var ReactUpdates = __webpack_require__(53); var SyntheticEvent = __webpack_require__(50); var getEventTarget = __webpack_require__(61); var isEventSupported = __webpack_require__(62); var isTextInputElement = __webpack_require__(63); var eventTypes = { change: { phasedRegistrationNames: { bubbled: 'onChange', captured: 'onChangeCapture' }, dependencies: ['topBlur', 'topChange', 'topClick', 'topFocus', 'topInput', 'topKeyDown', 'topKeyUp', 'topSelectionChange'] } }; /** * For IE shims */ var activeElement = null; var activeElementInst = null; var activeElementValue = null; var activeElementValueProp = null; /** * SECTION: handle `change` event */ function shouldUseChangeEvent(elem) { var nodeName = elem.nodeName && elem.nodeName.toLowerCase(); return nodeName === 'select' || nodeName === 'input' && elem.type === 'file'; } var doesChangeEventBubble = false; if (ExecutionEnvironment.canUseDOM) { // See `handleChange` comment below doesChangeEventBubble = isEventSupported('change') && (!document.documentMode || document.documentMode > 8); } function manualDispatchChangeEvent(nativeEvent) { var event = SyntheticEvent.getPooled(eventTypes.change, activeElementInst, nativeEvent, getEventTarget(nativeEvent)); EventPropagators.accumulateTwoPhaseDispatches(event); // If change and propertychange bubbled, we'd just bind to it like all the // other events and have it go through ReactBrowserEventEmitter. Since it // doesn't, we manually listen for the events and so we have to enqueue and // process the abstract event manually. // // Batching is necessary here in order to ensure that all event handlers run // before the next rerender (including event handlers attached to ancestor // elements instead of directly on the input). Without this, controlled // components don't work properly in conjunction with event bubbling because // the component is rerendered and the value reverted before all the event // handlers can run. See https://github.com/facebook/react/issues/708. ReactUpdates.batchedUpdates(runEventInBatch, event); } function runEventInBatch(event) { EventPluginHub.enqueueEvents(event); EventPluginHub.processEventQueue(false); } function startWatchingForChangeEventIE8(target, targetInst) { activeElement = target; activeElementInst = targetInst; activeElement.attachEvent('onchange', manualDispatchChangeEvent); } function stopWatchingForChangeEventIE8() { if (!activeElement) { return; } activeElement.detachEvent('onchange', manualDispatchChangeEvent); activeElement = null; activeElementInst = null; } function getTargetInstForChangeEvent(topLevelType, targetInst) { if (topLevelType === 'topChange') { return targetInst; } } function handleEventsForChangeEventIE8(topLevelType, target, targetInst) { if (topLevelType === 'topFocus') { // stopWatching() should be a noop here but we call it just in case we // missed a blur event somehow. stopWatchingForChangeEventIE8(); startWatchingForChangeEventIE8(target, targetInst); } else if (topLevelType === 'topBlur') { stopWatchingForChangeEventIE8(); } } /** * SECTION: handle `input` event */ var isInputEventSupported = false; if (ExecutionEnvironment.canUseDOM) { // IE9 claims to support the input event but fails to trigger it when // deleting text, so we ignore its input events. // IE10+ fire input events to often, such when a placeholder // changes or when an input with a placeholder is focused. isInputEventSupported = isEventSupported('input') && (!document.documentMode || document.documentMode > 11); } /** * (For IE <=11) Replacement getter/setter for the `value` property that gets * set on the active element. */ var newValueProp = { get: function () { return activeElementValueProp.get.call(this); }, set: function (val) { // Cast to a string so we can do equality checks. activeElementValue = '' + val; activeElementValueProp.set.call(this, val); } }; /** * (For IE <=11) Starts tracking propertychange events on the passed-in element * and override the value property so that we can distinguish user events from * value changes in JS. */ function startWatchingForValueChange(target, targetInst) { activeElement = target; activeElementInst = targetInst; activeElementValue = target.value; activeElementValueProp = Object.getOwnPropertyDescriptor(target.constructor.prototype, 'value'); // Not guarded in a canDefineProperty check: IE8 supports defineProperty only // on DOM elements Object.defineProperty(activeElement, 'value', newValueProp); if (activeElement.attachEvent) { activeElement.attachEvent('onpropertychange', handlePropertyChange); } else { activeElement.addEventListener('propertychange', handlePropertyChange, false); } } /** * (For IE <=11) Removes the event listeners from the currently-tracked element, * if any exists. */ function stopWatchingForValueChange() { if (!activeElement) { return; } // delete restores the original property definition delete activeElement.value; if (activeElement.detachEvent) { activeElement.detachEvent('onpropertychange', handlePropertyChange); } else { activeElement.removeEventListener('propertychange', handlePropertyChange, false); } activeElement = null; activeElementInst = null; activeElementValue = null; activeElementValueProp = null; } /** * (For IE <=11) Handles a propertychange event, sending a `change` event if * the value of the active element has changed. */ function handlePropertyChange(nativeEvent) { if (nativeEvent.propertyName !== 'value') { return; } var value = nativeEvent.srcElement.value; if (value === activeElementValue) { return; } activeElementValue = value; manualDispatchChangeEvent(nativeEvent); } /** * If a `change` event should be fired, returns the target's ID. */ function getTargetInstForInputEvent(topLevelType, targetInst) { if (topLevelType === 'topInput') { // In modern browsers (i.e., not IE8 or IE9), the input event is exactly // what we want so fall through here and trigger an abstract event return targetInst; } } function handleEventsForInputEventIE(topLevelType, target, targetInst) { if (topLevelType === 'topFocus') { // In IE8, we can capture almost all .value changes by adding a // propertychange handler and looking for events with propertyName // equal to 'value' // In IE9-11, propertychange fires for most input events but is buggy and // doesn't fire when text is deleted, but conveniently, selectionchange // appears to fire in all of the remaining cases so we catch those and // forward the event if the value has changed // In either case, we don't want to call the event handler if the value // is changed from JS so we redefine a setter for `.value` that updates // our activeElementValue variable, allowing us to ignore those changes // // stopWatching() should be a noop here but we call it just in case we // missed a blur event somehow. stopWatchingForValueChange(); startWatchingForValueChange(target, targetInst); } else if (topLevelType === 'topBlur') { stopWatchingForValueChange(); } } // For IE8 and IE9. function getTargetInstForInputEventIE(topLevelType, targetInst) { if (topLevelType === 'topSelectionChange' || topLevelType === 'topKeyUp' || topLevelType === 'topKeyDown') { // On the selectionchange event, the target is just document which isn't // helpful for us so just check activeElement instead. // // 99% of the time, keydown and keyup aren't necessary. IE8 fails to fire // propertychange on the first input event after setting `value` from a // script and fires only keydown, keypress, keyup. Catching keyup usually // gets it and catching keydown lets us fire an event for the first // keystroke if user does a key repeat (it'll be a little delayed: right // before the second keystroke). Other input methods (e.g., paste) seem to // fire selectionchange normally. if (activeElement && activeElement.value !== activeElementValue) { activeElementValue = activeElement.value; return activeElementInst; } } } /** * SECTION: handle `click` event */ function shouldUseClickEvent(elem) { // Use the `click` event to detect changes to checkbox and radio inputs. // This approach works across all browsers, whereas `change` does not fire // until `blur` in IE8. return elem.nodeName && elem.nodeName.toLowerCase() === 'input' && (elem.type === 'checkbox' || elem.type === 'radio'); } function getTargetInstForClickEvent(topLevelType, targetInst) { if (topLevelType === 'topClick') { return targetInst; } } /** * This plugin creates an `onChange` event that normalizes change events * across form elements. This event fires at a time when it's possible to * change the element's value without seeing a flicker. * * Supported elements are: * - input (see `isTextInputElement`) * - textarea * - select */ var ChangeEventPlugin = { eventTypes: eventTypes, extractEvents: function (topLevelType, targetInst, nativeEvent, nativeEventTarget) { var targetNode = targetInst ? ReactDOMComponentTree.getNodeFromInstance(targetInst) : window; var getTargetInstFunc, handleEventFunc; if (shouldUseChangeEvent(targetNode)) { if (doesChangeEventBubble) { getTargetInstFunc = getTargetInstForChangeEvent; } else { handleEventFunc = handleEventsForChangeEventIE8; } } else if (isTextInputElement(targetNode)) { if (isInputEventSupported) { getTargetInstFunc = getTargetInstForInputEvent; } else { getTargetInstFunc = getTargetInstForInputEventIE; handleEventFunc = handleEventsForInputEventIE; } } else if (shouldUseClickEvent(targetNode)) { getTargetInstFunc = getTargetInstForClickEvent; } if (getTargetInstFunc) { var inst = getTargetInstFunc(topLevelType, targetInst); if (inst) { var event = SyntheticEvent.getPooled(eventTypes.change, inst, nativeEvent, nativeEventTarget); event.type = 'change'; EventPropagators.accumulateTwoPhaseDispatches(event); return event; } } if (handleEventFunc) { handleEventFunc(topLevelType, targetNode, targetInst); } } }; module.exports = ChangeEventPlugin; /***/ }, /* 53 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32), _assign = __webpack_require__(4); var CallbackQueue = __webpack_require__(54); var PooledClass = __webpack_require__(47); var ReactFeatureFlags = __webpack_require__(55); var ReactReconciler = __webpack_require__(56); var Transaction = __webpack_require__(60); var invariant = __webpack_require__(8); var dirtyComponents = []; var updateBatchNumber = 0; var asapCallbackQueue = CallbackQueue.getPooled(); var asapEnqueued = false; var batchingStrategy = null; function ensureInjected() { !(ReactUpdates.ReactReconcileTransaction && batchingStrategy) ? false ? invariant(false, 'ReactUpdates: must inject a reconcile transaction class and batching strategy') : _prodInvariant('123') : void 0; } var NESTED_UPDATES = { initialize: function () { this.dirtyComponentsLength = dirtyComponents.length; }, close: function () { if (this.dirtyComponentsLength !== dirtyComponents.length) { // Additional updates were enqueued by componentDidUpdate handlers or // similar; before our own UPDATE_QUEUEING wrapper closes, we want to run // these new updates so that if A's componentDidUpdate calls setState on // B, B will update before the callback A's updater provided when calling // setState. dirtyComponents.splice(0, this.dirtyComponentsLength); flushBatchedUpdates(); } else { dirtyComponents.length = 0; } } }; var UPDATE_QUEUEING = { initialize: function () { this.callbackQueue.reset(); }, close: function () { this.callbackQueue.notifyAll(); } }; var TRANSACTION_WRAPPERS = [NESTED_UPDATES, UPDATE_QUEUEING]; function ReactUpdatesFlushTransaction() { this.reinitializeTransaction(); this.dirtyComponentsLength = null; this.callbackQueue = CallbackQueue.getPooled(); this.reconcileTransaction = ReactUpdates.ReactReconcileTransaction.getPooled( /* useCreateElement */true); } _assign(ReactUpdatesFlushTransaction.prototype, Transaction, { getTransactionWrappers: function () { return TRANSACTION_WRAPPERS; }, destructor: function () { this.dirtyComponentsLength = null; CallbackQueue.release(this.callbackQueue); this.callbackQueue = null; ReactUpdates.ReactReconcileTransaction.release(this.reconcileTransaction); this.reconcileTransaction = null; }, perform: function (method, scope, a) { // Essentially calls `this.reconcileTransaction.perform(method, scope, a)` // with this transaction's wrappers around it. return Transaction.perform.call(this, this.reconcileTransaction.perform, this.reconcileTransaction, method, scope, a); } }); PooledClass.addPoolingTo(ReactUpdatesFlushTransaction); function batchedUpdates(callback, a, b, c, d, e) { ensureInjected(); return batchingStrategy.batchedUpdates(callback, a, b, c, d, e); } /** * Array comparator for ReactComponents by mount ordering. * * @param {ReactComponent} c1 first component you're comparing * @param {ReactComponent} c2 second component you're comparing * @return {number} Return value usable by Array.prototype.sort(). */ function mountOrderComparator(c1, c2) { return c1._mountOrder - c2._mountOrder; } function runBatchedUpdates(transaction) { var len = transaction.dirtyComponentsLength; !(len === dirtyComponents.length) ? false ? invariant(false, 'Expected flush transaction\'s stored dirty-components length (%s) to match dirty-components array length (%s).', len, dirtyComponents.length) : _prodInvariant('124', len, dirtyComponents.length) : void 0; // Since reconciling a component higher in the owner hierarchy usually (not // always -- see shouldComponentUpdate()) will reconcile children, reconcile // them before their children by sorting the array. dirtyComponents.sort(mountOrderComparator); // Any updates enqueued while reconciling must be performed after this entire // batch. Otherwise, if dirtyComponents is [A, B] where A has children B and // C, B could update twice in a single batch if C's render enqueues an update // to B (since B would have already updated, we should skip it, and the only // way we can know to do so is by checking the batch counter). updateBatchNumber++; for (var i = 0; i < len; i++) { // If a component is unmounted before pending changes apply, it will still // be here, but we assume that it has cleared its _pendingCallbacks and // that performUpdateIfNecessary is a noop. var component = dirtyComponents[i]; // If performUpdateIfNecessary happens to enqueue any new updates, we // shouldn't execute the callbacks until the next render happens, so // stash the callbacks first var callbacks = component._pendingCallbacks; component._pendingCallbacks = null; var markerName; if (ReactFeatureFlags.logTopLevelRenders) { var namedComponent = component; // Duck type TopLevelWrapper. This is probably always true. if (component._currentElement.type.isReactTopLevelWrapper) { namedComponent = component._renderedComponent; } markerName = 'React update: ' + namedComponent.getName(); console.time(markerName); } ReactReconciler.performUpdateIfNecessary(component, transaction.reconcileTransaction, updateBatchNumber); if (markerName) { console.timeEnd(markerName); } if (callbacks) { for (var j = 0; j < callbacks.length; j++) { transaction.callbackQueue.enqueue(callbacks[j], component.getPublicInstance()); } } } } var flushBatchedUpdates = function () { // ReactUpdatesFlushTransaction's wrappers will clear the dirtyComponents // array and perform any updates enqueued by mount-ready handlers (i.e., // componentDidUpdate) but we need to check here too in order to catch // updates enqueued by setState callbacks and asap calls. while (dirtyComponents.length || asapEnqueued) { if (dirtyComponents.length) { var transaction = ReactUpdatesFlushTransaction.getPooled(); transaction.perform(runBatchedUpdates, null, transaction); ReactUpdatesFlushTransaction.release(transaction); } if (asapEnqueued) { asapEnqueued = false; var queue = asapCallbackQueue; asapCallbackQueue = CallbackQueue.getPooled(); queue.notifyAll(); CallbackQueue.release(queue); } } }; /** * Mark a component as needing a rerender, adding an optional callback to a * list of functions which will be executed once the rerender occurs. */ function enqueueUpdate(component) { ensureInjected(); // Various parts of our code (such as ReactCompositeComponent's // _renderValidatedComponent) assume that calls to render aren't nested; // verify that that's the case. (This is called by each top-level update // function, like setState, forceUpdate, etc.; creation and // destruction of top-level components is guarded in ReactMount.) if (!batchingStrategy.isBatchingUpdates) { batchingStrategy.batchedUpdates(enqueueUpdate, component); return; } dirtyComponents.push(component); if (component._updateBatchNumber == null) { component._updateBatchNumber = updateBatchNumber + 1; } } /** * Enqueue a callback to be run at the end of the current batching cycle. Throws * if no updates are currently being performed. */ function asap(callback, context) { !batchingStrategy.isBatchingUpdates ? false ? invariant(false, 'ReactUpdates.asap: Can\'t enqueue an asap callback in a context whereupdates are not being batched.') : _prodInvariant('125') : void 0; asapCallbackQueue.enqueue(callback, context); asapEnqueued = true; } var ReactUpdatesInjection = { injectReconcileTransaction: function (ReconcileTransaction) { !ReconcileTransaction ? false ? invariant(false, 'ReactUpdates: must provide a reconcile transaction class') : _prodInvariant('126') : void 0; ReactUpdates.ReactReconcileTransaction = ReconcileTransaction; }, injectBatchingStrategy: function (_batchingStrategy) { !_batchingStrategy ? false ? invariant(false, 'ReactUpdates: must provide a batching strategy') : _prodInvariant('127') : void 0; !(typeof _batchingStrategy.batchedUpdates === 'function') ? false ? invariant(false, 'ReactUpdates: must provide a batchedUpdates() function') : _prodInvariant('128') : void 0; !(typeof _batchingStrategy.isBatchingUpdates === 'boolean') ? false ? invariant(false, 'ReactUpdates: must provide an isBatchingUpdates boolean attribute') : _prodInvariant('129') : void 0; batchingStrategy = _batchingStrategy; } }; var ReactUpdates = { /** * React references `ReactReconcileTransaction` using this property in order * to allow dependency injection. * * @internal */ ReactReconcileTransaction: null, batchedUpdates: batchedUpdates, enqueueUpdate: enqueueUpdate, flushBatchedUpdates: flushBatchedUpdates, injection: ReactUpdatesInjection, asap: asap }; module.exports = ReactUpdates; /***/ }, /* 54 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var _prodInvariant = __webpack_require__(32); function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } var PooledClass = __webpack_require__(47); var invariant = __webpack_require__(8); /** * A specialized pseudo-event module to help keep track of components waiting to * be notified when their DOM representations are available for use. * * This implements `PooledClass`, so you should never need to instantiate this. * Instead, use `CallbackQueue.getPooled()`. * * @class ReactMountReady * @implements PooledClass * @internal */ var CallbackQueue = function () { function CallbackQueue(arg) { _classCallCheck(this, CallbackQueue); this._callbacks = null; this._contexts = null; this._arg = arg; } /** * Enqueues a callback to be invoked when `notifyAll` is invoked. * * @param {function} callback Invoked when `notifyAll` is invoked. * @param {?object} context Context to call `callback` with. * @internal */ CallbackQueue.prototype.enqueue = function enqueue(callback, context) { this._callbacks = this._callbacks || []; this._callbacks.push(callback); this._contexts = this._contexts || []; this._contexts.push(context); }; /** * Invokes all enqueued callbacks and clears the queue. This is invoked after * the DOM representation of a component has been created or updated. * * @internal */ CallbackQueue.prototype.notifyAll = function notifyAll() { var callbacks = this._callbacks; var contexts = this._contexts; var arg = this._arg; if (callbacks && contexts) { !(callbacks.length === contexts.length) ? false ? invariant(false, 'Mismatched list of contexts in callback queue') : _prodInvariant('24') : void 0; this._callbacks = null; this._contexts = null; for (var i = 0; i < callbacks.length; i++) { callbacks[i].call(contexts[i], arg); } callbacks.length = 0; contexts.length = 0; } }; CallbackQueue.prototype.checkpoint = function checkpoint() { return this._callbacks ? this._callbacks.length : 0; }; CallbackQueue.prototype.rollback = function rollback(len) { if (this._callbacks && this._contexts) { this._callbacks.length = len; this._contexts.length = len; } }; /** * Resets the internal queue. * * @internal */ CallbackQueue.prototype.reset = function reset() { this._callbacks = null; this._contexts = null; }; /** * `PooledClass` looks for this. */ CallbackQueue.prototype.destructor = function destructor() { this.reset(); }; return CallbackQueue; }(); module.exports = PooledClass.addPoolingTo(CallbackQueue); /***/ }, /* 55 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var ReactFeatureFlags = { // When true, call console.time() before and .timeEnd() after each top-level // render (both initial renders and updates). Useful when looking at prod-mode // timeline profiles in Chrome, for example. logTopLevelRenders: false }; module.exports = ReactFeatureFlags; /***/ }, /* 56 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ReactRef = __webpack_require__(57); var ReactInstrumentation = __webpack_require__(59); var warning = __webpack_require__(11); /** * Helper to call ReactRef.attachRefs with this composite component, split out * to avoid allocations in the transaction mount-ready queue. */ function attachRefs() { ReactRef.attachRefs(this, this._currentElement); } var ReactReconciler = { /** * Initializes the component, renders markup, and registers event listeners. * * @param {ReactComponent} internalInstance * @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction * @param {?object} the containing host component instance * @param {?object} info about the host container * @return {?string} Rendered markup to be inserted into the DOM. * @final * @internal */ mountComponent: function (internalInstance, transaction, hostParent, hostContainerInfo, context, parentDebugID // 0 in production and for roots ) { if (false) { if (internalInstance._debugID !== 0) { ReactInstrumentation.debugTool.onBeforeMountComponent(internalInstance._debugID, internalInstance._currentElement, parentDebugID); } } var markup = internalInstance.mountComponent(transaction, hostParent, hostContainerInfo, context, parentDebugID); if (internalInstance._currentElement && internalInstance._currentElement.ref != null) { transaction.getReactMountReady().enqueue(attachRefs, internalInstance); } if (false) { if (internalInstance._debugID !== 0) { ReactInstrumentation.debugTool.onMountComponent(internalInstance._debugID); } } return markup; }, /** * Returns a value that can be passed to * ReactComponentEnvironment.replaceNodeWithMarkup. */ getHostNode: function (internalInstance) { return internalInstance.getHostNode(); }, /** * Releases any resources allocated by `mountComponent`. * * @final * @internal */ unmountComponent: function (internalInstance, safely) { if (false) { if (internalInstance._debugID !== 0) { ReactInstrumentation.debugTool.onBeforeUnmountComponent(internalInstance._debugID); } } ReactRef.detachRefs(internalInstance, internalInstance._currentElement); internalInstance.unmountComponent(safely); if (false) { if (internalInstance._debugID !== 0) { ReactInstrumentation.debugTool.onUnmountComponent(internalInstance._debugID); } } }, /** * Update a component using a new element. * * @param {ReactComponent} internalInstance * @param {ReactElement} nextElement * @param {ReactReconcileTransaction} transaction * @param {object} context * @internal */ receiveComponent: function (internalInstance, nextElement, transaction, context) { var prevElement = internalInstance._currentElement; if (nextElement === prevElement && context === internalInstance._context) { // Since elements are immutable after the owner is rendered, // we can do a cheap identity compare here to determine if this is a // superfluous reconcile. It's possible for state to be mutable but such // change should trigger an update of the owner which would recreate // the element. We explicitly check for the existence of an owner since // it's possible for an element created outside a composite to be // deeply mutated and reused. // TODO: Bailing out early is just a perf optimization right? // TODO: Removing the return statement should affect correctness? return; } if (false) { if (internalInstance._debugID !== 0) { ReactInstrumentation.debugTool.onBeforeUpdateComponent(internalInstance._debugID, nextElement); } } var refsChanged = ReactRef.shouldUpdateRefs(prevElement, nextElement); if (refsChanged) { ReactRef.detachRefs(internalInstance, prevElement); } internalInstance.receiveComponent(nextElement, transaction, context); if (refsChanged && internalInstance._currentElement && internalInstance._currentElement.ref != null) { transaction.getReactMountReady().enqueue(attachRefs, internalInstance); } if (false) { if (internalInstance._debugID !== 0) { ReactInstrumentation.debugTool.onUpdateComponent(internalInstance._debugID); } } }, /** * Flush any dirty changes in a component. * * @param {ReactComponent} internalInstance * @param {ReactReconcileTransaction} transaction * @internal */ performUpdateIfNecessary: function (internalInstance, transaction, updateBatchNumber) { if (internalInstance._updateBatchNumber !== updateBatchNumber) { // The component's enqueued batch number should always be the current // batch or the following one. false ? warning(internalInstance._updateBatchNumber == null || internalInstance._updateBatchNumber === updateBatchNumber + 1, 'performUpdateIfNecessary: Unexpected batch number (current %s, ' + 'pending %s)', updateBatchNumber, internalInstance._updateBatchNumber) : void 0; return; } if (false) { if (internalInstance._debugID !== 0) { ReactInstrumentation.debugTool.onBeforeUpdateComponent(internalInstance._debugID, internalInstance._currentElement); } } internalInstance.performUpdateIfNecessary(transaction); if (false) { if (internalInstance._debugID !== 0) { ReactInstrumentation.debugTool.onUpdateComponent(internalInstance._debugID); } } } }; module.exports = ReactReconciler; /***/ }, /* 57 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var ReactOwner = __webpack_require__(58); var ReactRef = {}; function attachRef(ref, component, owner) { if (typeof ref === 'function') { ref(component.getPublicInstance()); } else { // Legacy ref ReactOwner.addComponentAsRefTo(component, ref, owner); } } function detachRef(ref, component, owner) { if (typeof ref === 'function') { ref(null); } else { // Legacy ref ReactOwner.removeComponentAsRefFrom(component, ref, owner); } } ReactRef.attachRefs = function (instance, element) { if (element === null || typeof element !== 'object') { return; } var ref = element.ref; if (ref != null) { attachRef(ref, instance, element._owner); } }; ReactRef.shouldUpdateRefs = function (prevElement, nextElement) { // If either the owner or a `ref` has changed, make sure the newest owner // has stored a reference to `this`, and the previous owner (if different) // has forgotten the reference to `this`. We use the element instead // of the public this.props because the post processing cannot determine // a ref. The ref conceptually lives on the element. // TODO: Should this even be possible? The owner cannot change because // it's forbidden by shouldUpdateReactComponent. The ref can change // if you swap the keys of but not the refs. Reconsider where this check // is made. It probably belongs where the key checking and // instantiateReactComponent is done. var prevRef = null; var prevOwner = null; if (prevElement !== null && typeof prevElement === 'object') { prevRef = prevElement.ref; prevOwner = prevElement._owner; } var nextRef = null; var nextOwner = null; if (nextElement !== null && typeof nextElement === 'object') { nextRef = nextElement.ref; nextOwner = nextElement._owner; } return prevRef !== nextRef || // If owner changes but we have an unchanged function ref, don't update refs typeof nextRef === 'string' && nextOwner !== prevOwner; }; ReactRef.detachRefs = function (instance, element) { if (element === null || typeof element !== 'object') { return; } var ref = element.ref; if (ref != null) { detachRef(ref, instance, element._owner); } }; module.exports = ReactRef; /***/ }, /* 58 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var invariant = __webpack_require__(8); /** * @param {?object} object * @return {boolean} True if `object` is a valid owner. * @final */ function isValidOwner(object) { return !!(object && typeof object.attachRef === 'function' && typeof object.detachRef === 'function'); } /** * ReactOwners are capable of storing references to owned components. * * All components are capable of //being// referenced by owner components, but * only ReactOwner components are capable of //referencing// owned components. * The named reference is known as a "ref". * * Refs are available when mounted and updated during reconciliation. * * var MyComponent = React.createClass({ * render: function() { * return ( * <div onClick={this.handleClick}> * <CustomComponent ref="custom" /> * </div> * ); * }, * handleClick: function() { * this.refs.custom.handleClick(); * }, * componentDidMount: function() { * this.refs.custom.initialize(); * } * }); * * Refs should rarely be used. When refs are used, they should only be done to * control data that is not handled by React's data flow. * * @class ReactOwner */ var ReactOwner = { /** * Adds a component by ref to an owner component. * * @param {ReactComponent} component Component to reference. * @param {string} ref Name by which to refer to the component. * @param {ReactOwner} owner Component on which to record the ref. * @final * @internal */ addComponentAsRefTo: function (component, ref, owner) { !isValidOwner(owner) ? false ? invariant(false, 'addComponentAsRefTo(...): Only a ReactOwner can have refs. You might be adding a ref to a component that was not created inside a component\'s `render` method, or you have multiple copies of React loaded (details: https://fb.me/react-refs-must-have-owner).') : _prodInvariant('119') : void 0; owner.attachRef(ref, component); }, /** * Removes a component by ref from an owner component. * * @param {ReactComponent} component Component to dereference. * @param {string} ref Name of the ref to remove. * @param {ReactOwner} owner Component on which the ref is recorded. * @final * @internal */ removeComponentAsRefFrom: function (component, ref, owner) { !isValidOwner(owner) ? false ? invariant(false, 'removeComponentAsRefFrom(...): Only a ReactOwner can have refs. You might be removing a ref to a component that was not created inside a component\'s `render` method, or you have multiple copies of React loaded (details: https://fb.me/react-refs-must-have-owner).') : _prodInvariant('120') : void 0; var ownerPublicInstance = owner.getPublicInstance(); // Check that `component`'s owner is still alive and that `component` is still the current ref // because we do not want to detach the ref if another component stole it. if (ownerPublicInstance && ownerPublicInstance.refs[ref] === component.getPublicInstance()) { owner.detachRef(ref); } } }; module.exports = ReactOwner; /***/ }, /* 59 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2016-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; // Trust the developer to only use ReactInstrumentation with a __DEV__ check var debugTool = null; if (false) { var ReactDebugTool = require('./ReactDebugTool'); debugTool = ReactDebugTool; } module.exports = { debugTool: debugTool }; /***/ }, /* 60 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var invariant = __webpack_require__(8); var OBSERVED_ERROR = {}; /** * `Transaction` creates a black box that is able to wrap any method such that * certain invariants are maintained before and after the method is invoked * (Even if an exception is thrown while invoking the wrapped method). Whoever * instantiates a transaction can provide enforcers of the invariants at * creation time. The `Transaction` class itself will supply one additional * automatic invariant for you - the invariant that any transaction instance * should not be run while it is already being run. You would typically create a * single instance of a `Transaction` for reuse multiple times, that potentially * is used to wrap several different methods. Wrappers are extremely simple - * they only require implementing two methods. * * <pre> * wrappers (injected at creation time) * + + * | | * +-----------------|--------|--------------+ * | v | | * | +---------------+ | | * | +--| wrapper1 |---|----+ | * | | +---------------+ v | | * | | +-------------+ | | * | | +----| wrapper2 |--------+ | * | | | +-------------+ | | | * | | | | | | * | v v v v | wrapper * | +---+ +---+ +---------+ +---+ +---+ | invariants * perform(anyMethod) | | | | | | | | | | | | maintained * +----------------->|-|---|-|---|-->|anyMethod|---|---|-|---|-|--------> * | | | | | | | | | | | | * | | | | | | | | | | | | * | | | | | | | | | | | | * | +---+ +---+ +---------+ +---+ +---+ | * | initialize close | * +-----------------------------------------+ * </pre> * * Use cases: * - Preserving the input selection ranges before/after reconciliation. * Restoring selection even in the event of an unexpected error. * - Deactivating events while rearranging the DOM, preventing blurs/focuses, * while guaranteeing that afterwards, the event system is reactivated. * - Flushing a queue of collected DOM mutations to the main UI thread after a * reconciliation takes place in a worker thread. * - Invoking any collected `componentDidUpdate` callbacks after rendering new * content. * - (Future use case): Wrapping particular flushes of the `ReactWorker` queue * to preserve the `scrollTop` (an automatic scroll aware DOM). * - (Future use case): Layout calculations before and after DOM updates. * * Transactional plugin API: * - A module that has an `initialize` method that returns any precomputation. * - and a `close` method that accepts the precomputation. `close` is invoked * when the wrapped process is completed, or has failed. * * @param {Array<TransactionalWrapper>} transactionWrapper Wrapper modules * that implement `initialize` and `close`. * @return {Transaction} Single transaction for reuse in thread. * * @class Transaction */ var TransactionImpl = { /** * Sets up this instance so that it is prepared for collecting metrics. Does * so such that this setup method may be used on an instance that is already * initialized, in a way that does not consume additional memory upon reuse. * That can be useful if you decide to make your subclass of this mixin a * "PooledClass". */ reinitializeTransaction: function () { this.transactionWrappers = this.getTransactionWrappers(); if (this.wrapperInitData) { this.wrapperInitData.length = 0; } else { this.wrapperInitData = []; } this._isInTransaction = false; }, _isInTransaction: false, /** * @abstract * @return {Array<TransactionWrapper>} Array of transaction wrappers. */ getTransactionWrappers: null, isInTransaction: function () { return !!this._isInTransaction; }, /** * Executes the function within a safety window. Use this for the top level * methods that result in large amounts of computation/mutations that would * need to be safety checked. The optional arguments helps prevent the need * to bind in many cases. * * @param {function} method Member of scope to call. * @param {Object} scope Scope to invoke from. * @param {Object?=} a Argument to pass to the method. * @param {Object?=} b Argument to pass to the method. * @param {Object?=} c Argument to pass to the method. * @param {Object?=} d Argument to pass to the method. * @param {Object?=} e Argument to pass to the method. * @param {Object?=} f Argument to pass to the method. * * @return {*} Return value from `method`. */ perform: function (method, scope, a, b, c, d, e, f) { !!this.isInTransaction() ? false ? invariant(false, 'Transaction.perform(...): Cannot initialize a transaction when there is already an outstanding transaction.') : _prodInvariant('27') : void 0; var errorThrown; var ret; try { this._isInTransaction = true; // Catching errors makes debugging more difficult, so we start with // errorThrown set to true before setting it to false after calling // close -- if it's still set to true in the finally block, it means // one of these calls threw. errorThrown = true; this.initializeAll(0); ret = method.call(scope, a, b, c, d, e, f); errorThrown = false; } finally { try { if (errorThrown) { // If `method` throws, prefer to show that stack trace over any thrown // by invoking `closeAll`. try { this.closeAll(0); } catch (err) {} } else { // Since `method` didn't throw, we don't want to silence the exception // here. this.closeAll(0); } } finally { this._isInTransaction = false; } } return ret; }, initializeAll: function (startIndex) { var transactionWrappers = this.transactionWrappers; for (var i = startIndex; i < transactionWrappers.length; i++) { var wrapper = transactionWrappers[i]; try { // Catching errors makes debugging more difficult, so we start with the // OBSERVED_ERROR state before overwriting it with the real return value // of initialize -- if it's still set to OBSERVED_ERROR in the finally // block, it means wrapper.initialize threw. this.wrapperInitData[i] = OBSERVED_ERROR; this.wrapperInitData[i] = wrapper.initialize ? wrapper.initialize.call(this) : null; } finally { if (this.wrapperInitData[i] === OBSERVED_ERROR) { // The initializer for wrapper i threw an error; initialize the // remaining wrappers but silence any exceptions from them to ensure // that the first error is the one to bubble up. try { this.initializeAll(i + 1); } catch (err) {} } } } }, /** * Invokes each of `this.transactionWrappers.close[i]` functions, passing into * them the respective return values of `this.transactionWrappers.init[i]` * (`close`rs that correspond to initializers that failed will not be * invoked). */ closeAll: function (startIndex) { !this.isInTransaction() ? false ? invariant(false, 'Transaction.closeAll(): Cannot close transaction when none are open.') : _prodInvariant('28') : void 0; var transactionWrappers = this.transactionWrappers; for (var i = startIndex; i < transactionWrappers.length; i++) { var wrapper = transactionWrappers[i]; var initData = this.wrapperInitData[i]; var errorThrown; try { // Catching errors makes debugging more difficult, so we start with // errorThrown set to true before setting it to false after calling // close -- if it's still set to true in the finally block, it means // wrapper.close threw. errorThrown = true; if (initData !== OBSERVED_ERROR && wrapper.close) { wrapper.close.call(this, initData); } errorThrown = false; } finally { if (errorThrown) { // The closer for wrapper i threw an error; close the remaining // wrappers but silence any exceptions from them to ensure that the // first error is the one to bubble up. try { this.closeAll(i + 1); } catch (e) {} } } } this.wrapperInitData.length = 0; } }; module.exports = TransactionImpl; /***/ }, /* 61 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; /** * Gets the target node from a native browser event by accounting for * inconsistencies in browser DOM APIs. * * @param {object} nativeEvent Native browser event. * @return {DOMEventTarget} Target node. */ function getEventTarget(nativeEvent) { var target = nativeEvent.target || nativeEvent.srcElement || window; // Normalize SVG <use> element events #4963 if (target.correspondingUseElement) { target = target.correspondingUseElement; } // Safari may fire events on text nodes (Node.TEXT_NODE is 3). // @see http://www.quirksmode.org/js/events_properties.html return target.nodeType === 3 ? target.parentNode : target; } module.exports = getEventTarget; /***/ }, /* 62 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ExecutionEnvironment = __webpack_require__(45); var useHasFeature; if (ExecutionEnvironment.canUseDOM) { useHasFeature = document.implementation && document.implementation.hasFeature && // always returns true in newer browsers as per the standard. // @see http://dom.spec.whatwg.org/#dom-domimplementation-hasfeature document.implementation.hasFeature('', '') !== true; } /** * Checks if an event is supported in the current execution environment. * * NOTE: This will not work correctly for non-generic events such as `change`, * `reset`, `load`, `error`, and `select`. * * Borrows from Modernizr. * * @param {string} eventNameSuffix Event name, e.g. "click". * @param {?boolean} capture Check if the capture phase is supported. * @return {boolean} True if the event is supported. * @internal * @license Modernizr 3.0.0pre (Custom Build) | MIT */ function isEventSupported(eventNameSuffix, capture) { if (!ExecutionEnvironment.canUseDOM || capture && !('addEventListener' in document)) { return false; } var eventName = 'on' + eventNameSuffix; var isSupported = eventName in document; if (!isSupported) { var element = document.createElement('div'); element.setAttribute(eventName, 'return;'); isSupported = typeof element[eventName] === 'function'; } if (!isSupported && useHasFeature && eventNameSuffix === 'wheel') { // This is the only way to test support for the `wheel` event in IE9+. isSupported = document.implementation.hasFeature('Events.wheel', '3.0'); } return isSupported; } module.exports = isEventSupported; /***/ }, /* 63 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; /** * @see http://www.whatwg.org/specs/web-apps/current-work/multipage/the-input-element.html#input-type-attr-summary */ var supportedInputTypes = { 'color': true, 'date': true, 'datetime': true, 'datetime-local': true, 'email': true, 'month': true, 'number': true, 'password': true, 'range': true, 'search': true, 'tel': true, 'text': true, 'time': true, 'url': true, 'week': true }; function isTextInputElement(elem) { var nodeName = elem && elem.nodeName && elem.nodeName.toLowerCase(); if (nodeName === 'input') { return !!supportedInputTypes[elem.type]; } if (nodeName === 'textarea') { return true; } return false; } module.exports = isTextInputElement; /***/ }, /* 64 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; /** * Module that is injectable into `EventPluginHub`, that specifies a * deterministic ordering of `EventPlugin`s. A convenient way to reason about * plugins, without having to package every one of them. This is better than * having plugins be ordered in the same order that they are injected because * that ordering would be influenced by the packaging order. * `ResponderEventPlugin` must occur before `SimpleEventPlugin` so that * preventing default on events is convenient in `SimpleEventPlugin` handlers. */ var DefaultEventPluginOrder = ['ResponderEventPlugin', 'SimpleEventPlugin', 'TapEventPlugin', 'EnterLeaveEventPlugin', 'ChangeEventPlugin', 'SelectEventPlugin', 'BeforeInputEventPlugin']; module.exports = DefaultEventPluginOrder; /***/ }, /* 65 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var EventPropagators = __webpack_require__(38); var ReactDOMComponentTree = __webpack_require__(31); var SyntheticMouseEvent = __webpack_require__(66); var eventTypes = { mouseEnter: { registrationName: 'onMouseEnter', dependencies: ['topMouseOut', 'topMouseOver'] }, mouseLeave: { registrationName: 'onMouseLeave', dependencies: ['topMouseOut', 'topMouseOver'] } }; var EnterLeaveEventPlugin = { eventTypes: eventTypes, /** * For almost every interaction we care about, there will be both a top-level * `mouseover` and `mouseout` event that occurs. Only use `mouseout` so that * we do not extract duplicate events. However, moving the mouse into the * browser from outside will not fire a `mouseout` event. In this case, we use * the `mouseover` top-level event. */ extractEvents: function (topLevelType, targetInst, nativeEvent, nativeEventTarget) { if (topLevelType === 'topMouseOver' && (nativeEvent.relatedTarget || nativeEvent.fromElement)) { return null; } if (topLevelType !== 'topMouseOut' && topLevelType !== 'topMouseOver') { // Must not be a mouse in or mouse out - ignoring. return null; } var win; if (nativeEventTarget.window === nativeEventTarget) { // `nativeEventTarget` is probably a window object. win = nativeEventTarget; } else { // TODO: Figure out why `ownerDocument` is sometimes undefined in IE8. var doc = nativeEventTarget.ownerDocument; if (doc) { win = doc.defaultView || doc.parentWindow; } else { win = window; } } var from; var to; if (topLevelType === 'topMouseOut') { from = targetInst; var related = nativeEvent.relatedTarget || nativeEvent.toElement; to = related ? ReactDOMComponentTree.getClosestInstanceFromNode(related) : null; } else { // Moving to a node from outside the window. from = null; to = targetInst; } if (from === to) { // Nothing pertains to our managed components. return null; } var fromNode = from == null ? win : ReactDOMComponentTree.getNodeFromInstance(from); var toNode = to == null ? win : ReactDOMComponentTree.getNodeFromInstance(to); var leave = SyntheticMouseEvent.getPooled(eventTypes.mouseLeave, from, nativeEvent, nativeEventTarget); leave.type = 'mouseleave'; leave.target = fromNode; leave.relatedTarget = toNode; var enter = SyntheticMouseEvent.getPooled(eventTypes.mouseEnter, to, nativeEvent, nativeEventTarget); enter.type = 'mouseenter'; enter.target = toNode; enter.relatedTarget = fromNode; EventPropagators.accumulateEnterLeaveDispatches(leave, enter, from, to); return [leave, enter]; } }; module.exports = EnterLeaveEventPlugin; /***/ }, /* 66 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticUIEvent = __webpack_require__(67); var ViewportMetrics = __webpack_require__(68); var getEventModifierState = __webpack_require__(69); /** * @interface MouseEvent * @see http://www.w3.org/TR/DOM-Level-3-Events/ */ var MouseEventInterface = { screenX: null, screenY: null, clientX: null, clientY: null, ctrlKey: null, shiftKey: null, altKey: null, metaKey: null, getModifierState: getEventModifierState, button: function (event) { // Webkit, Firefox, IE9+ // which: 1 2 3 // button: 0 1 2 (standard) var button = event.button; if ('which' in event) { return button; } // IE<9 // which: undefined // button: 0 0 0 // button: 1 4 2 (onmouseup) return button === 2 ? 2 : button === 4 ? 1 : 0; }, buttons: null, relatedTarget: function (event) { return event.relatedTarget || (event.fromElement === event.srcElement ? event.toElement : event.fromElement); }, // "Proprietary" Interface. pageX: function (event) { return 'pageX' in event ? event.pageX : event.clientX + ViewportMetrics.currentScrollLeft; }, pageY: function (event) { return 'pageY' in event ? event.pageY : event.clientY + ViewportMetrics.currentScrollTop; } }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticUIEvent} */ function SyntheticMouseEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticUIEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticUIEvent.augmentClass(SyntheticMouseEvent, MouseEventInterface); module.exports = SyntheticMouseEvent; /***/ }, /* 67 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticEvent = __webpack_require__(50); var getEventTarget = __webpack_require__(61); /** * @interface UIEvent * @see http://www.w3.org/TR/DOM-Level-3-Events/ */ var UIEventInterface = { view: function (event) { if (event.view) { return event.view; } var target = getEventTarget(event); if (target.window === target) { // target is a window object return target; } var doc = target.ownerDocument; // TODO: Figure out why `ownerDocument` is sometimes undefined in IE8. if (doc) { return doc.defaultView || doc.parentWindow; } else { return window; } }, detail: function (event) { return event.detail || 0; } }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticEvent} */ function SyntheticUIEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticEvent.augmentClass(SyntheticUIEvent, UIEventInterface); module.exports = SyntheticUIEvent; /***/ }, /* 68 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ViewportMetrics = { currentScrollLeft: 0, currentScrollTop: 0, refreshScrollValues: function (scrollPosition) { ViewportMetrics.currentScrollLeft = scrollPosition.x; ViewportMetrics.currentScrollTop = scrollPosition.y; } }; module.exports = ViewportMetrics; /***/ }, /* 69 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; /** * Translation from modifier key to the associated property in the event. * @see http://www.w3.org/TR/DOM-Level-3-Events/#keys-Modifiers */ var modifierKeyToProp = { 'Alt': 'altKey', 'Control': 'ctrlKey', 'Meta': 'metaKey', 'Shift': 'shiftKey' }; // IE8 does not implement getModifierState so we simply map it to the only // modifier keys exposed by the event itself, does not support Lock-keys. // Currently, all major browsers except Chrome seems to support Lock-keys. function modifierStateGetter(keyArg) { var syntheticEvent = this; var nativeEvent = syntheticEvent.nativeEvent; if (nativeEvent.getModifierState) { return nativeEvent.getModifierState(keyArg); } var keyProp = modifierKeyToProp[keyArg]; return keyProp ? !!nativeEvent[keyProp] : false; } function getEventModifierState(nativeEvent) { return modifierStateGetter; } module.exports = getEventModifierState; /***/ }, /* 70 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var DOMProperty = __webpack_require__(33); var MUST_USE_PROPERTY = DOMProperty.injection.MUST_USE_PROPERTY; var HAS_BOOLEAN_VALUE = DOMProperty.injection.HAS_BOOLEAN_VALUE; var HAS_NUMERIC_VALUE = DOMProperty.injection.HAS_NUMERIC_VALUE; var HAS_POSITIVE_NUMERIC_VALUE = DOMProperty.injection.HAS_POSITIVE_NUMERIC_VALUE; var HAS_OVERLOADED_BOOLEAN_VALUE = DOMProperty.injection.HAS_OVERLOADED_BOOLEAN_VALUE; var HTMLDOMPropertyConfig = { isCustomAttribute: RegExp.prototype.test.bind(new RegExp('^(data|aria)-[' + DOMProperty.ATTRIBUTE_NAME_CHAR + ']*$')), Properties: { /** * Standard Properties */ accept: 0, acceptCharset: 0, accessKey: 0, action: 0, allowFullScreen: HAS_BOOLEAN_VALUE, allowTransparency: 0, alt: 0, // specifies target context for links with `preload` type as: 0, async: HAS_BOOLEAN_VALUE, autoComplete: 0, // autoFocus is polyfilled/normalized by AutoFocusUtils // autoFocus: HAS_BOOLEAN_VALUE, autoPlay: HAS_BOOLEAN_VALUE, capture: HAS_BOOLEAN_VALUE, cellPadding: 0, cellSpacing: 0, charSet: 0, challenge: 0, checked: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE, cite: 0, classID: 0, className: 0, cols: HAS_POSITIVE_NUMERIC_VALUE, colSpan: 0, content: 0, contentEditable: 0, contextMenu: 0, controls: HAS_BOOLEAN_VALUE, coords: 0, crossOrigin: 0, data: 0, // For `<object />` acts as `src`. dateTime: 0, 'default': HAS_BOOLEAN_VALUE, defer: HAS_BOOLEAN_VALUE, dir: 0, disabled: HAS_BOOLEAN_VALUE, download: HAS_OVERLOADED_BOOLEAN_VALUE, draggable: 0, encType: 0, form: 0, formAction: 0, formEncType: 0, formMethod: 0, formNoValidate: HAS_BOOLEAN_VALUE, formTarget: 0, frameBorder: 0, headers: 0, height: 0, hidden: HAS_BOOLEAN_VALUE, high: 0, href: 0, hrefLang: 0, htmlFor: 0, httpEquiv: 0, icon: 0, id: 0, inputMode: 0, integrity: 0, is: 0, keyParams: 0, keyType: 0, kind: 0, label: 0, lang: 0, list: 0, loop: HAS_BOOLEAN_VALUE, low: 0, manifest: 0, marginHeight: 0, marginWidth: 0, max: 0, maxLength: 0, media: 0, mediaGroup: 0, method: 0, min: 0, minLength: 0, // Caution; `option.selected` is not updated if `select.multiple` is // disabled with `removeAttribute`. multiple: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE, muted: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE, name: 0, nonce: 0, noValidate: HAS_BOOLEAN_VALUE, open: HAS_BOOLEAN_VALUE, optimum: 0, pattern: 0, placeholder: 0, playsInline: HAS_BOOLEAN_VALUE, poster: 0, preload: 0, profile: 0, radioGroup: 0, readOnly: HAS_BOOLEAN_VALUE, referrerPolicy: 0, rel: 0, required: HAS_BOOLEAN_VALUE, reversed: HAS_BOOLEAN_VALUE, role: 0, rows: HAS_POSITIVE_NUMERIC_VALUE, rowSpan: HAS_NUMERIC_VALUE, sandbox: 0, scope: 0, scoped: HAS_BOOLEAN_VALUE, scrolling: 0, seamless: HAS_BOOLEAN_VALUE, selected: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE, shape: 0, size: HAS_POSITIVE_NUMERIC_VALUE, sizes: 0, span: HAS_POSITIVE_NUMERIC_VALUE, spellCheck: 0, src: 0, srcDoc: 0, srcLang: 0, srcSet: 0, start: HAS_NUMERIC_VALUE, step: 0, style: 0, summary: 0, tabIndex: 0, target: 0, title: 0, // Setting .type throws on non-<input> tags type: 0, useMap: 0, value: 0, width: 0, wmode: 0, wrap: 0, /** * RDFa Properties */ about: 0, datatype: 0, inlist: 0, prefix: 0, // property is also supported for OpenGraph in meta tags. property: 0, resource: 0, 'typeof': 0, vocab: 0, /** * Non-standard Properties */ // autoCapitalize and autoCorrect are supported in Mobile Safari for // keyboard hints. autoCapitalize: 0, autoCorrect: 0, // autoSave allows WebKit/Blink to persist values of input fields on page reloads autoSave: 0, // color is for Safari mask-icon link color: 0, // itemProp, itemScope, itemType are for // Microdata support. See http://schema.org/docs/gs.html itemProp: 0, itemScope: HAS_BOOLEAN_VALUE, itemType: 0, // itemID and itemRef are for Microdata support as well but // only specified in the WHATWG spec document. See // https://html.spec.whatwg.org/multipage/microdata.html#microdata-dom-api itemID: 0, itemRef: 0, // results show looking glass icon and recent searches on input // search fields in WebKit/Blink results: 0, // IE-only attribute that specifies security restrictions on an iframe // as an alternative to the sandbox attribute on IE<10 security: 0, // IE-only attribute that controls focus behavior unselectable: 0 }, DOMAttributeNames: { acceptCharset: 'accept-charset', className: 'class', htmlFor: 'for', httpEquiv: 'http-equiv' }, DOMPropertyNames: {} }; module.exports = HTMLDOMPropertyConfig; /***/ }, /* 71 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var DOMChildrenOperations = __webpack_require__(72); var ReactDOMIDOperations = __webpack_require__(83); /** * Abstracts away all functionality of the reconciler that requires knowledge of * the browser context. TODO: These callers should be refactored to avoid the * need for this injection. */ var ReactComponentBrowserEnvironment = { processChildrenUpdates: ReactDOMIDOperations.dangerouslyProcessChildrenUpdates, replaceNodeWithMarkup: DOMChildrenOperations.dangerouslyReplaceNodeWithMarkup }; module.exports = ReactComponentBrowserEnvironment; /***/ }, /* 72 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var DOMLazyTree = __webpack_require__(73); var Danger = __webpack_require__(79); var ReactDOMComponentTree = __webpack_require__(31); var ReactInstrumentation = __webpack_require__(59); var createMicrosoftUnsafeLocalFunction = __webpack_require__(76); var setInnerHTML = __webpack_require__(75); var setTextContent = __webpack_require__(77); function getNodeAfter(parentNode, node) { // Special case for text components, which return [open, close] comments // from getHostNode. if (Array.isArray(node)) { node = node[1]; } return node ? node.nextSibling : parentNode.firstChild; } /** * Inserts `childNode` as a child of `parentNode` at the `index`. * * @param {DOMElement} parentNode Parent node in which to insert. * @param {DOMElement} childNode Child node to insert. * @param {number} index Index at which to insert the child. * @internal */ var insertChildAt = createMicrosoftUnsafeLocalFunction(function (parentNode, childNode, referenceNode) { // We rely exclusively on `insertBefore(node, null)` instead of also using // `appendChild(node)`. (Using `undefined` is not allowed by all browsers so // we are careful to use `null`.) parentNode.insertBefore(childNode, referenceNode); }); function insertLazyTreeChildAt(parentNode, childTree, referenceNode) { DOMLazyTree.insertTreeBefore(parentNode, childTree, referenceNode); } function moveChild(parentNode, childNode, referenceNode) { if (Array.isArray(childNode)) { moveDelimitedText(parentNode, childNode[0], childNode[1], referenceNode); } else { insertChildAt(parentNode, childNode, referenceNode); } } function removeChild(parentNode, childNode) { if (Array.isArray(childNode)) { var closingComment = childNode[1]; childNode = childNode[0]; removeDelimitedText(parentNode, childNode, closingComment); parentNode.removeChild(closingComment); } parentNode.removeChild(childNode); } function moveDelimitedText(parentNode, openingComment, closingComment, referenceNode) { var node = openingComment; while (true) { var nextNode = node.nextSibling; insertChildAt(parentNode, node, referenceNode); if (node === closingComment) { break; } node = nextNode; } } function removeDelimitedText(parentNode, startNode, closingComment) { while (true) { var node = startNode.nextSibling; if (node === closingComment) { // The closing comment is removed by ReactMultiChild. break; } else { parentNode.removeChild(node); } } } function replaceDelimitedText(openingComment, closingComment, stringText) { var parentNode = openingComment.parentNode; var nodeAfterComment = openingComment.nextSibling; if (nodeAfterComment === closingComment) { // There are no text nodes between the opening and closing comments; insert // a new one if stringText isn't empty. if (stringText) { insertChildAt(parentNode, document.createTextNode(stringText), nodeAfterComment); } } else { if (stringText) { // Set the text content of the first node after the opening comment, and // remove all following nodes up until the closing comment. setTextContent(nodeAfterComment, stringText); removeDelimitedText(parentNode, nodeAfterComment, closingComment); } else { removeDelimitedText(parentNode, openingComment, closingComment); } } if (false) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: ReactDOMComponentTree.getInstanceFromNode(openingComment)._debugID, type: 'replace text', payload: stringText }); } } var dangerouslyReplaceNodeWithMarkup = Danger.dangerouslyReplaceNodeWithMarkup; if (false) { dangerouslyReplaceNodeWithMarkup = function (oldChild, markup, prevInstance) { Danger.dangerouslyReplaceNodeWithMarkup(oldChild, markup); if (prevInstance._debugID !== 0) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: prevInstance._debugID, type: 'replace with', payload: markup.toString() }); } else { var nextInstance = ReactDOMComponentTree.getInstanceFromNode(markup.node); if (nextInstance._debugID !== 0) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: nextInstance._debugID, type: 'mount', payload: markup.toString() }); } } }; } /** * Operations for updating with DOM children. */ var DOMChildrenOperations = { dangerouslyReplaceNodeWithMarkup: dangerouslyReplaceNodeWithMarkup, replaceDelimitedText: replaceDelimitedText, /** * Updates a component's children by processing a series of updates. The * update configurations are each expected to have a `parentNode` property. * * @param {array<object>} updates List of update configurations. * @internal */ processUpdates: function (parentNode, updates) { if (false) { var parentNodeDebugID = ReactDOMComponentTree.getInstanceFromNode(parentNode)._debugID; } for (var k = 0; k < updates.length; k++) { var update = updates[k]; switch (update.type) { case 'INSERT_MARKUP': insertLazyTreeChildAt(parentNode, update.content, getNodeAfter(parentNode, update.afterNode)); if (false) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: parentNodeDebugID, type: 'insert child', payload: { toIndex: update.toIndex, content: update.content.toString() } }); } break; case 'MOVE_EXISTING': moveChild(parentNode, update.fromNode, getNodeAfter(parentNode, update.afterNode)); if (false) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: parentNodeDebugID, type: 'move child', payload: { fromIndex: update.fromIndex, toIndex: update.toIndex } }); } break; case 'SET_MARKUP': setInnerHTML(parentNode, update.content); if (false) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: parentNodeDebugID, type: 'replace children', payload: update.content.toString() }); } break; case 'TEXT_CONTENT': setTextContent(parentNode, update.content); if (false) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: parentNodeDebugID, type: 'replace text', payload: update.content.toString() }); } break; case 'REMOVE_NODE': removeChild(parentNode, update.fromNode); if (false) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: parentNodeDebugID, type: 'remove child', payload: { fromIndex: update.fromIndex } }); } break; } } } }; module.exports = DOMChildrenOperations; /***/ }, /* 73 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var DOMNamespaces = __webpack_require__(74); var setInnerHTML = __webpack_require__(75); var createMicrosoftUnsafeLocalFunction = __webpack_require__(76); var setTextContent = __webpack_require__(77); var ELEMENT_NODE_TYPE = 1; var DOCUMENT_FRAGMENT_NODE_TYPE = 11; /** * In IE (8-11) and Edge, appending nodes with no children is dramatically * faster than appending a full subtree, so we essentially queue up the * .appendChild calls here and apply them so each node is added to its parent * before any children are added. * * In other browsers, doing so is slower or neutral compared to the other order * (in Firefox, twice as slow) so we only do this inversion in IE. * * See https://github.com/spicyj/innerhtml-vs-createelement-vs-clonenode. */ var enableLazy = typeof document !== 'undefined' && typeof document.documentMode === 'number' || typeof navigator !== 'undefined' && typeof navigator.userAgent === 'string' && /\bEdge\/\d/.test(navigator.userAgent); function insertTreeChildren(tree) { if (!enableLazy) { return; } var node = tree.node; var children = tree.children; if (children.length) { for (var i = 0; i < children.length; i++) { insertTreeBefore(node, children[i], null); } } else if (tree.html != null) { setInnerHTML(node, tree.html); } else if (tree.text != null) { setTextContent(node, tree.text); } } var insertTreeBefore = createMicrosoftUnsafeLocalFunction(function (parentNode, tree, referenceNode) { // DocumentFragments aren't actually part of the DOM after insertion so // appending children won't update the DOM. We need to ensure the fragment // is properly populated first, breaking out of our lazy approach for just // this level. Also, some <object> plugins (like Flash Player) will read // <param> nodes immediately upon insertion into the DOM, so <object> // must also be populated prior to insertion into the DOM. if (tree.node.nodeType === DOCUMENT_FRAGMENT_NODE_TYPE || tree.node.nodeType === ELEMENT_NODE_TYPE && tree.node.nodeName.toLowerCase() === 'object' && (tree.node.namespaceURI == null || tree.node.namespaceURI === DOMNamespaces.html)) { insertTreeChildren(tree); parentNode.insertBefore(tree.node, referenceNode); } else { parentNode.insertBefore(tree.node, referenceNode); insertTreeChildren(tree); } }); function replaceChildWithTree(oldNode, newTree) { oldNode.parentNode.replaceChild(newTree.node, oldNode); insertTreeChildren(newTree); } function queueChild(parentTree, childTree) { if (enableLazy) { parentTree.children.push(childTree); } else { parentTree.node.appendChild(childTree.node); } } function queueHTML(tree, html) { if (enableLazy) { tree.html = html; } else { setInnerHTML(tree.node, html); } } function queueText(tree, text) { if (enableLazy) { tree.text = text; } else { setTextContent(tree.node, text); } } function toString() { return this.node.nodeName; } function DOMLazyTree(node) { return { node: node, children: [], html: null, text: null, toString: toString }; } DOMLazyTree.insertTreeBefore = insertTreeBefore; DOMLazyTree.replaceChildWithTree = replaceChildWithTree; DOMLazyTree.queueChild = queueChild; DOMLazyTree.queueHTML = queueHTML; DOMLazyTree.queueText = queueText; module.exports = DOMLazyTree; /***/ }, /* 74 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var DOMNamespaces = { html: 'http://www.w3.org/1999/xhtml', mathml: 'http://www.w3.org/1998/Math/MathML', svg: 'http://www.w3.org/2000/svg' }; module.exports = DOMNamespaces; /***/ }, /* 75 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ExecutionEnvironment = __webpack_require__(45); var DOMNamespaces = __webpack_require__(74); var WHITESPACE_TEST = /^[ \r\n\t\f]/; var NONVISIBLE_TEST = /<(!--|link|noscript|meta|script|style)[ \r\n\t\f\/>]/; var createMicrosoftUnsafeLocalFunction = __webpack_require__(76); // SVG temp container for IE lacking innerHTML var reusableSVGContainer; /** * Set the innerHTML property of a node, ensuring that whitespace is preserved * even in IE8. * * @param {DOMElement} node * @param {string} html * @internal */ var setInnerHTML = createMicrosoftUnsafeLocalFunction(function (node, html) { // IE does not have innerHTML for SVG nodes, so instead we inject the // new markup in a temp node and then move the child nodes across into // the target node if (node.namespaceURI === DOMNamespaces.svg && !('innerHTML' in node)) { reusableSVGContainer = reusableSVGContainer || document.createElement('div'); reusableSVGContainer.innerHTML = '<svg>' + html + '</svg>'; var svgNode = reusableSVGContainer.firstChild; while (svgNode.firstChild) { node.appendChild(svgNode.firstChild); } } else { node.innerHTML = html; } }); if (ExecutionEnvironment.canUseDOM) { // IE8: When updating a just created node with innerHTML only leading // whitespace is removed. When updating an existing node with innerHTML // whitespace in root TextNodes is also collapsed. // @see quirksmode.org/bugreports/archives/2004/11/innerhtml_and_t.html // Feature detection; only IE8 is known to behave improperly like this. var testElement = document.createElement('div'); testElement.innerHTML = ' '; if (testElement.innerHTML === '') { setInnerHTML = function (node, html) { // Magic theory: IE8 supposedly differentiates between added and updated // nodes when processing innerHTML, innerHTML on updated nodes suffers // from worse whitespace behavior. Re-adding a node like this triggers // the initial and more favorable whitespace behavior. // TODO: What to do on a detached node? if (node.parentNode) { node.parentNode.replaceChild(node, node); } // We also implement a workaround for non-visible tags disappearing into // thin air on IE8, this only happens if there is no visible text // in-front of the non-visible tags. Piggyback on the whitespace fix // and simply check if any non-visible tags appear in the source. if (WHITESPACE_TEST.test(html) || html[0] === '<' && NONVISIBLE_TEST.test(html)) { // Recover leading whitespace by temporarily prepending any character. // \uFEFF has the potential advantage of being zero-width/invisible. // UglifyJS drops U+FEFF chars when parsing, so use String.fromCharCode // in hopes that this is preserved even if "\uFEFF" is transformed to // the actual Unicode character (by Babel, for example). // https://github.com/mishoo/UglifyJS2/blob/v2.4.20/lib/parse.js#L216 node.innerHTML = String.fromCharCode(0xFEFF) + html; // deleteData leaves an empty `TextNode` which offsets the index of all // children. Definitely want to avoid this. var textNode = node.firstChild; if (textNode.data.length === 1) { node.removeChild(textNode); } else { textNode.deleteData(0, 1); } } else { node.innerHTML = html; } }; } testElement = null; } module.exports = setInnerHTML; /***/ }, /* 76 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ /* globals MSApp */ 'use strict'; /** * Create a function which has 'unsafe' privileges (required by windows8 apps) */ var createMicrosoftUnsafeLocalFunction = function (func) { if (typeof MSApp !== 'undefined' && MSApp.execUnsafeLocalFunction) { return function (arg0, arg1, arg2, arg3) { MSApp.execUnsafeLocalFunction(function () { return func(arg0, arg1, arg2, arg3); }); }; } else { return func; } }; module.exports = createMicrosoftUnsafeLocalFunction; /***/ }, /* 77 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ExecutionEnvironment = __webpack_require__(45); var escapeTextContentForBrowser = __webpack_require__(78); var setInnerHTML = __webpack_require__(75); /** * Set the textContent property of a node, ensuring that whitespace is preserved * even in IE8. innerText is a poor substitute for textContent and, among many * issues, inserts <br> instead of the literal newline chars. innerHTML behaves * as it should. * * @param {DOMElement} node * @param {string} text * @internal */ var setTextContent = function (node, text) { if (text) { var firstChild = node.firstChild; if (firstChild && firstChild === node.lastChild && firstChild.nodeType === 3) { firstChild.nodeValue = text; return; } } node.textContent = text; }; if (ExecutionEnvironment.canUseDOM) { if (!('textContent' in document.documentElement)) { setTextContent = function (node, text) { if (node.nodeType === 3) { node.nodeValue = text; return; } setInnerHTML(node, escapeTextContentForBrowser(text)); }; } } module.exports = setTextContent; /***/ }, /* 78 */ /***/ function(module, exports) { /** * Copyright 2016-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * Based on the escape-html library, which is used under the MIT License below: * * Copyright (c) 2012-2013 TJ Holowaychuk * Copyright (c) 2015 Andreas Lubbe * Copyright (c) 2015 Tiancheng "Timothy" Gu * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * 'Software'), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * */ 'use strict'; // code copied and modified from escape-html /** * Module variables. * @private */ var matchHtmlRegExp = /["'&<>]/; /** * Escape special characters in the given string of html. * * @param {string} string The string to escape for inserting into HTML * @return {string} * @public */ function escapeHtml(string) { var str = '' + string; var match = matchHtmlRegExp.exec(str); if (!match) { return str; } var escape; var html = ''; var index = 0; var lastIndex = 0; for (index = match.index; index < str.length; index++) { switch (str.charCodeAt(index)) { case 34: // " escape = '&quot;'; break; case 38: // & escape = '&amp;'; break; case 39: // ' escape = '&#x27;'; // modified from escape-html; used to be '&#39' break; case 60: // < escape = '&lt;'; break; case 62: // > escape = '&gt;'; break; default: continue; } if (lastIndex !== index) { html += str.substring(lastIndex, index); } lastIndex = index + 1; html += escape; } return lastIndex !== index ? html + str.substring(lastIndex, index) : html; } // end code copied and modified from escape-html /** * Escapes text to prevent scripting attacks. * * @param {*} text Text value to escape. * @return {string} An escaped string. */ function escapeTextContentForBrowser(text) { if (typeof text === 'boolean' || typeof text === 'number') { // this shortcircuit helps perf for types that we know will never have // special characters, especially given that this function is used often // for numeric dom ids. return '' + text; } return escapeHtml(text); } module.exports = escapeTextContentForBrowser; /***/ }, /* 79 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var DOMLazyTree = __webpack_require__(73); var ExecutionEnvironment = __webpack_require__(45); var createNodesFromMarkup = __webpack_require__(80); var emptyFunction = __webpack_require__(12); var invariant = __webpack_require__(8); var Danger = { /** * Replaces a node with a string of markup at its current position within its * parent. The markup must render into a single root node. * * @param {DOMElement} oldChild Child node to replace. * @param {string} markup Markup to render in place of the child node. * @internal */ dangerouslyReplaceNodeWithMarkup: function (oldChild, markup) { !ExecutionEnvironment.canUseDOM ? false ? invariant(false, 'dangerouslyReplaceNodeWithMarkup(...): Cannot render markup in a worker thread. Make sure `window` and `document` are available globally before requiring React when unit testing or use ReactDOMServer.renderToString() for server rendering.') : _prodInvariant('56') : void 0; !markup ? false ? invariant(false, 'dangerouslyReplaceNodeWithMarkup(...): Missing markup.') : _prodInvariant('57') : void 0; !(oldChild.nodeName !== 'HTML') ? false ? invariant(false, 'dangerouslyReplaceNodeWithMarkup(...): Cannot replace markup of the <html> node. This is because browser quirks make this unreliable and/or slow. If you want to render to the root you must use server rendering. See ReactDOMServer.renderToString().') : _prodInvariant('58') : void 0; if (typeof markup === 'string') { var newChild = createNodesFromMarkup(markup, emptyFunction)[0]; oldChild.parentNode.replaceChild(newChild, oldChild); } else { DOMLazyTree.replaceChildWithTree(oldChild, markup); } } }; module.exports = Danger; /***/ }, /* 80 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @typechecks */ /*eslint-disable fb-www/unsafe-html*/ var ExecutionEnvironment = __webpack_require__(45); var createArrayFromMixed = __webpack_require__(81); var getMarkupWrap = __webpack_require__(82); var invariant = __webpack_require__(8); /** * Dummy container used to render all markup. */ var dummyNode = ExecutionEnvironment.canUseDOM ? document.createElement('div') : null; /** * Pattern used by `getNodeName`. */ var nodeNamePattern = /^\s*<(\w+)/; /** * Extracts the `nodeName` of the first element in a string of markup. * * @param {string} markup String of markup. * @return {?string} Node name of the supplied markup. */ function getNodeName(markup) { var nodeNameMatch = markup.match(nodeNamePattern); return nodeNameMatch && nodeNameMatch[1].toLowerCase(); } /** * Creates an array containing the nodes rendered from the supplied markup. The * optionally supplied `handleScript` function will be invoked once for each * <script> element that is rendered. If no `handleScript` function is supplied, * an exception is thrown if any <script> elements are rendered. * * @param {string} markup A string of valid HTML markup. * @param {?function} handleScript Invoked once for each rendered <script>. * @return {array<DOMElement|DOMTextNode>} An array of rendered nodes. */ function createNodesFromMarkup(markup, handleScript) { var node = dummyNode; !!!dummyNode ? false ? invariant(false, 'createNodesFromMarkup dummy not initialized') : invariant(false) : void 0; var nodeName = getNodeName(markup); var wrap = nodeName && getMarkupWrap(nodeName); if (wrap) { node.innerHTML = wrap[1] + markup + wrap[2]; var wrapDepth = wrap[0]; while (wrapDepth--) { node = node.lastChild; } } else { node.innerHTML = markup; } var scripts = node.getElementsByTagName('script'); if (scripts.length) { !handleScript ? false ? invariant(false, 'createNodesFromMarkup(...): Unexpected <script> element rendered.') : invariant(false) : void 0; createArrayFromMixed(scripts).forEach(handleScript); } var nodes = Array.from(node.childNodes); while (node.lastChild) { node.removeChild(node.lastChild); } return nodes; } module.exports = createNodesFromMarkup; /***/ }, /* 81 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @typechecks */ var invariant = __webpack_require__(8); /** * Convert array-like objects to arrays. * * This API assumes the caller knows the contents of the data type. For less * well defined inputs use createArrayFromMixed. * * @param {object|function|filelist} obj * @return {array} */ function toArray(obj) { var length = obj.length; // Some browsers builtin objects can report typeof 'function' (e.g. NodeList // in old versions of Safari). !(!Array.isArray(obj) && (typeof obj === 'object' || typeof obj === 'function')) ? false ? invariant(false, 'toArray: Array-like object expected') : invariant(false) : void 0; !(typeof length === 'number') ? false ? invariant(false, 'toArray: Object needs a length property') : invariant(false) : void 0; !(length === 0 || length - 1 in obj) ? false ? invariant(false, 'toArray: Object should have keys for indices') : invariant(false) : void 0; !(typeof obj.callee !== 'function') ? false ? invariant(false, 'toArray: Object can\'t be `arguments`. Use rest params ' + '(function(...args) {}) or Array.from() instead.') : invariant(false) : void 0; // Old IE doesn't give collections access to hasOwnProperty. Assume inputs // without method will throw during the slice call and skip straight to the // fallback. if (obj.hasOwnProperty) { try { return Array.prototype.slice.call(obj); } catch (e) { // IE < 9 does not support Array#slice on collections objects } } // Fall back to copying key by key. This assumes all keys have a value, // so will not preserve sparsely populated inputs. var ret = Array(length); for (var ii = 0; ii < length; ii++) { ret[ii] = obj[ii]; } return ret; } /** * Perform a heuristic test to determine if an object is "array-like". * * A monk asked Joshu, a Zen master, "Has a dog Buddha nature?" * Joshu replied: "Mu." * * This function determines if its argument has "array nature": it returns * true if the argument is an actual array, an `arguments' object, or an * HTMLCollection (e.g. node.childNodes or node.getElementsByTagName()). * * It will return false for other array-like objects like Filelist. * * @param {*} obj * @return {boolean} */ function hasArrayNature(obj) { return ( // not null/false !!obj && ( // arrays are objects, NodeLists are functions in Safari typeof obj == 'object' || typeof obj == 'function') && // quacks like an array 'length' in obj && // not window !('setInterval' in obj) && // no DOM node should be considered an array-like // a 'select' element has 'length' and 'item' properties on IE8 typeof obj.nodeType != 'number' && ( // a real array Array.isArray(obj) || // arguments 'callee' in obj || // HTMLCollection/NodeList 'item' in obj) ); } /** * Ensure that the argument is an array by wrapping it in an array if it is not. * Creates a copy of the argument if it is already an array. * * This is mostly useful idiomatically: * * var createArrayFromMixed = require('createArrayFromMixed'); * * function takesOneOrMoreThings(things) { * things = createArrayFromMixed(things); * ... * } * * This allows you to treat `things' as an array, but accept scalars in the API. * * If you need to convert an array-like object, like `arguments`, into an array * use toArray instead. * * @param {*} obj * @return {array} */ function createArrayFromMixed(obj) { if (!hasArrayNature(obj)) { return [obj]; } else if (Array.isArray(obj)) { return obj.slice(); } else { return toArray(obj); } } module.exports = createArrayFromMixed; /***/ }, /* 82 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ /*eslint-disable fb-www/unsafe-html */ var ExecutionEnvironment = __webpack_require__(45); var invariant = __webpack_require__(8); /** * Dummy container used to detect which wraps are necessary. */ var dummyNode = ExecutionEnvironment.canUseDOM ? document.createElement('div') : null; /** * Some browsers cannot use `innerHTML` to render certain elements standalone, * so we wrap them, render the wrapped nodes, then extract the desired node. * * In IE8, certain elements cannot render alone, so wrap all elements ('*'). */ var shouldWrap = {}; var selectWrap = [1, '<select multiple="true">', '</select>']; var tableWrap = [1, '<table>', '</table>']; var trWrap = [3, '<table><tbody><tr>', '</tr></tbody></table>']; var svgWrap = [1, '<svg xmlns="http://www.w3.org/2000/svg">', '</svg>']; var markupWrap = { '*': [1, '?<div>', '</div>'], 'area': [1, '<map>', '</map>'], 'col': [2, '<table><tbody></tbody><colgroup>', '</colgroup></table>'], 'legend': [1, '<fieldset>', '</fieldset>'], 'param': [1, '<object>', '</object>'], 'tr': [2, '<table><tbody>', '</tbody></table>'], 'optgroup': selectWrap, 'option': selectWrap, 'caption': tableWrap, 'colgroup': tableWrap, 'tbody': tableWrap, 'tfoot': tableWrap, 'thead': tableWrap, 'td': trWrap, 'th': trWrap }; // Initialize the SVG elements since we know they'll always need to be wrapped // consistently. If they are created inside a <div> they will be initialized in // the wrong namespace (and will not display). var svgElements = ['circle', 'clipPath', 'defs', 'ellipse', 'g', 'image', 'line', 'linearGradient', 'mask', 'path', 'pattern', 'polygon', 'polyline', 'radialGradient', 'rect', 'stop', 'text', 'tspan']; svgElements.forEach(function (nodeName) { markupWrap[nodeName] = svgWrap; shouldWrap[nodeName] = true; }); /** * Gets the markup wrap configuration for the supplied `nodeName`. * * NOTE: This lazily detects which wraps are necessary for the current browser. * * @param {string} nodeName Lowercase `nodeName`. * @return {?array} Markup wrap configuration, if applicable. */ function getMarkupWrap(nodeName) { !!!dummyNode ? false ? invariant(false, 'Markup wrapping node not initialized') : invariant(false) : void 0; if (!markupWrap.hasOwnProperty(nodeName)) { nodeName = '*'; } if (!shouldWrap.hasOwnProperty(nodeName)) { if (nodeName === '*') { dummyNode.innerHTML = '<link />'; } else { dummyNode.innerHTML = '<' + nodeName + '></' + nodeName + '>'; } shouldWrap[nodeName] = !dummyNode.firstChild; } return shouldWrap[nodeName] ? markupWrap[nodeName] : null; } module.exports = getMarkupWrap; /***/ }, /* 83 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var DOMChildrenOperations = __webpack_require__(72); var ReactDOMComponentTree = __webpack_require__(31); /** * Operations used to process updates to DOM nodes. */ var ReactDOMIDOperations = { /** * Updates a component's children by processing a series of updates. * * @param {array<object>} updates List of update configurations. * @internal */ dangerouslyProcessChildrenUpdates: function (parentInst, updates) { var node = ReactDOMComponentTree.getNodeFromInstance(parentInst); DOMChildrenOperations.processUpdates(node, updates); } }; module.exports = ReactDOMIDOperations; /***/ }, /* 84 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ /* global hasOwnProperty:true */ 'use strict'; var _prodInvariant = __webpack_require__(32), _assign = __webpack_require__(4); var AutoFocusUtils = __webpack_require__(85); var CSSPropertyOperations = __webpack_require__(87); var DOMLazyTree = __webpack_require__(73); var DOMNamespaces = __webpack_require__(74); var DOMProperty = __webpack_require__(33); var DOMPropertyOperations = __webpack_require__(95); var EventPluginHub = __webpack_require__(39); var EventPluginRegistry = __webpack_require__(40); var ReactBrowserEventEmitter = __webpack_require__(97); var ReactDOMComponentFlags = __webpack_require__(34); var ReactDOMComponentTree = __webpack_require__(31); var ReactDOMInput = __webpack_require__(100); var ReactDOMOption = __webpack_require__(103); var ReactDOMSelect = __webpack_require__(104); var ReactDOMTextarea = __webpack_require__(105); var ReactInstrumentation = __webpack_require__(59); var ReactMultiChild = __webpack_require__(106); var ReactServerRenderingTransaction = __webpack_require__(125); var emptyFunction = __webpack_require__(12); var escapeTextContentForBrowser = __webpack_require__(78); var invariant = __webpack_require__(8); var isEventSupported = __webpack_require__(62); var shallowEqual = __webpack_require__(114); var validateDOMNesting = __webpack_require__(128); var warning = __webpack_require__(11); var Flags = ReactDOMComponentFlags; var deleteListener = EventPluginHub.deleteListener; var getNode = ReactDOMComponentTree.getNodeFromInstance; var listenTo = ReactBrowserEventEmitter.listenTo; var registrationNameModules = EventPluginRegistry.registrationNameModules; // For quickly matching children type, to test if can be treated as content. var CONTENT_TYPES = { 'string': true, 'number': true }; var STYLE = 'style'; var HTML = '__html'; var RESERVED_PROPS = { children: null, dangerouslySetInnerHTML: null, suppressContentEditableWarning: null }; // Node type for document fragments (Node.DOCUMENT_FRAGMENT_NODE). var DOC_FRAGMENT_TYPE = 11; function getDeclarationErrorAddendum(internalInstance) { if (internalInstance) { var owner = internalInstance._currentElement._owner || null; if (owner) { var name = owner.getName(); if (name) { return ' This DOM node was rendered by `' + name + '`.'; } } } return ''; } function friendlyStringify(obj) { if (typeof obj === 'object') { if (Array.isArray(obj)) { return '[' + obj.map(friendlyStringify).join(', ') + ']'; } else { var pairs = []; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var keyEscaped = /^[a-z$_][\w$_]*$/i.test(key) ? key : JSON.stringify(key); pairs.push(keyEscaped + ': ' + friendlyStringify(obj[key])); } } return '{' + pairs.join(', ') + '}'; } } else if (typeof obj === 'string') { return JSON.stringify(obj); } else if (typeof obj === 'function') { return '[function object]'; } // Differs from JSON.stringify in that undefined because undefined and that // inf and nan don't become null return String(obj); } var styleMutationWarning = {}; function checkAndWarnForMutatedStyle(style1, style2, component) { if (style1 == null || style2 == null) { return; } if (shallowEqual(style1, style2)) { return; } var componentName = component._tag; var owner = component._currentElement._owner; var ownerName; if (owner) { ownerName = owner.getName(); } var hash = ownerName + '|' + componentName; if (styleMutationWarning.hasOwnProperty(hash)) { return; } styleMutationWarning[hash] = true; false ? warning(false, '`%s` was passed a style object that has previously been mutated. ' + 'Mutating `style` is deprecated. Consider cloning it beforehand. Check ' + 'the `render` %s. Previous style: %s. Mutated style: %s.', componentName, owner ? 'of `' + ownerName + '`' : 'using <' + componentName + '>', friendlyStringify(style1), friendlyStringify(style2)) : void 0; } /** * @param {object} component * @param {?object} props */ function assertValidProps(component, props) { if (!props) { return; } // Note the use of `==` which checks for null or undefined. if (voidElementTags[component._tag]) { !(props.children == null && props.dangerouslySetInnerHTML == null) ? false ? invariant(false, '%s is a void element tag and must neither have `children` nor use `dangerouslySetInnerHTML`.%s', component._tag, component._currentElement._owner ? ' Check the render method of ' + component._currentElement._owner.getName() + '.' : '') : _prodInvariant('137', component._tag, component._currentElement._owner ? ' Check the render method of ' + component._currentElement._owner.getName() + '.' : '') : void 0; } if (props.dangerouslySetInnerHTML != null) { !(props.children == null) ? false ? invariant(false, 'Can only set one of `children` or `props.dangerouslySetInnerHTML`.') : _prodInvariant('60') : void 0; !(typeof props.dangerouslySetInnerHTML === 'object' && HTML in props.dangerouslySetInnerHTML) ? false ? invariant(false, '`props.dangerouslySetInnerHTML` must be in the form `{__html: ...}`. Please visit https://fb.me/react-invariant-dangerously-set-inner-html for more information.') : _prodInvariant('61') : void 0; } if (false) { process.env.NODE_ENV !== 'production' ? warning(props.innerHTML == null, 'Directly setting property `innerHTML` is not permitted. ' + 'For more information, lookup documentation on `dangerouslySetInnerHTML`.') : void 0; process.env.NODE_ENV !== 'production' ? warning(props.suppressContentEditableWarning || !props.contentEditable || props.children == null, 'A component is `contentEditable` and contains `children` managed by ' + 'React. It is now your responsibility to guarantee that none of ' + 'those nodes are unexpectedly modified or duplicated. This is ' + 'probably not intentional.') : void 0; process.env.NODE_ENV !== 'production' ? warning(props.onFocusIn == null && props.onFocusOut == null, 'React uses onFocus and onBlur instead of onFocusIn and onFocusOut. ' + 'All React events are normalized to bubble, so onFocusIn and onFocusOut ' + 'are not needed/supported by React.') : void 0; } !(props.style == null || typeof props.style === 'object') ? false ? invariant(false, 'The `style` prop expects a mapping from style properties to values, not a string. For example, style={{marginRight: spacing + \'em\'}} when using JSX.%s', getDeclarationErrorAddendum(component)) : _prodInvariant('62', getDeclarationErrorAddendum(component)) : void 0; } function enqueuePutListener(inst, registrationName, listener, transaction) { if (transaction instanceof ReactServerRenderingTransaction) { return; } if (false) { // IE8 has no API for event capturing and the `onScroll` event doesn't // bubble. process.env.NODE_ENV !== 'production' ? warning(registrationName !== 'onScroll' || isEventSupported('scroll', true), 'This browser doesn\'t support the `onScroll` event') : void 0; } var containerInfo = inst._hostContainerInfo; var isDocumentFragment = containerInfo._node && containerInfo._node.nodeType === DOC_FRAGMENT_TYPE; var doc = isDocumentFragment ? containerInfo._node : containerInfo._ownerDocument; listenTo(registrationName, doc); transaction.getReactMountReady().enqueue(putListener, { inst: inst, registrationName: registrationName, listener: listener }); } function putListener() { var listenerToPut = this; EventPluginHub.putListener(listenerToPut.inst, listenerToPut.registrationName, listenerToPut.listener); } function inputPostMount() { var inst = this; ReactDOMInput.postMountWrapper(inst); } function textareaPostMount() { var inst = this; ReactDOMTextarea.postMountWrapper(inst); } function optionPostMount() { var inst = this; ReactDOMOption.postMountWrapper(inst); } var setAndValidateContentChildDev = emptyFunction; if (false) { setAndValidateContentChildDev = function (content) { var hasExistingContent = this._contentDebugID != null; var debugID = this._debugID; // This ID represents the inlined child that has no backing instance: var contentDebugID = -debugID; if (content == null) { if (hasExistingContent) { ReactInstrumentation.debugTool.onUnmountComponent(this._contentDebugID); } this._contentDebugID = null; return; } validateDOMNesting(null, String(content), this, this._ancestorInfo); this._contentDebugID = contentDebugID; if (hasExistingContent) { ReactInstrumentation.debugTool.onBeforeUpdateComponent(contentDebugID, content); ReactInstrumentation.debugTool.onUpdateComponent(contentDebugID); } else { ReactInstrumentation.debugTool.onBeforeMountComponent(contentDebugID, content, debugID); ReactInstrumentation.debugTool.onMountComponent(contentDebugID); ReactInstrumentation.debugTool.onSetChildren(debugID, [contentDebugID]); } }; } // There are so many media events, it makes sense to just // maintain a list rather than create a `trapBubbledEvent` for each var mediaEvents = { topAbort: 'abort', topCanPlay: 'canplay', topCanPlayThrough: 'canplaythrough', topDurationChange: 'durationchange', topEmptied: 'emptied', topEncrypted: 'encrypted', topEnded: 'ended', topError: 'error', topLoadedData: 'loadeddata', topLoadedMetadata: 'loadedmetadata', topLoadStart: 'loadstart', topPause: 'pause', topPlay: 'play', topPlaying: 'playing', topProgress: 'progress', topRateChange: 'ratechange', topSeeked: 'seeked', topSeeking: 'seeking', topStalled: 'stalled', topSuspend: 'suspend', topTimeUpdate: 'timeupdate', topVolumeChange: 'volumechange', topWaiting: 'waiting' }; function trapBubbledEventsLocal() { var inst = this; // If a component renders to null or if another component fatals and causes // the state of the tree to be corrupted, `node` here can be null. !inst._rootNodeID ? false ? invariant(false, 'Must be mounted to trap events') : _prodInvariant('63') : void 0; var node = getNode(inst); !node ? false ? invariant(false, 'trapBubbledEvent(...): Requires node to be rendered.') : _prodInvariant('64') : void 0; switch (inst._tag) { case 'iframe': case 'object': inst._wrapperState.listeners = [ReactBrowserEventEmitter.trapBubbledEvent('topLoad', 'load', node)]; break; case 'video': case 'audio': inst._wrapperState.listeners = []; // Create listener for each media event for (var event in mediaEvents) { if (mediaEvents.hasOwnProperty(event)) { inst._wrapperState.listeners.push(ReactBrowserEventEmitter.trapBubbledEvent(event, mediaEvents[event], node)); } } break; case 'source': inst._wrapperState.listeners = [ReactBrowserEventEmitter.trapBubbledEvent('topError', 'error', node)]; break; case 'img': inst._wrapperState.listeners = [ReactBrowserEventEmitter.trapBubbledEvent('topError', 'error', node), ReactBrowserEventEmitter.trapBubbledEvent('topLoad', 'load', node)]; break; case 'form': inst._wrapperState.listeners = [ReactBrowserEventEmitter.trapBubbledEvent('topReset', 'reset', node), ReactBrowserEventEmitter.trapBubbledEvent('topSubmit', 'submit', node)]; break; case 'input': case 'select': case 'textarea': inst._wrapperState.listeners = [ReactBrowserEventEmitter.trapBubbledEvent('topInvalid', 'invalid', node)]; break; } } function postUpdateSelectWrapper() { ReactDOMSelect.postUpdateWrapper(this); } // For HTML, certain tags should omit their close tag. We keep a whitelist for // those special-case tags. var omittedCloseTags = { 'area': true, 'base': true, 'br': true, 'col': true, 'embed': true, 'hr': true, 'img': true, 'input': true, 'keygen': true, 'link': true, 'meta': true, 'param': true, 'source': true, 'track': true, 'wbr': true }; var newlineEatingTags = { 'listing': true, 'pre': true, 'textarea': true }; // For HTML, certain tags cannot have children. This has the same purpose as // `omittedCloseTags` except that `menuitem` should still have its closing tag. var voidElementTags = _assign({ 'menuitem': true }, omittedCloseTags); // We accept any tag to be rendered but since this gets injected into arbitrary // HTML, we want to make sure that it's a safe tag. // http://www.w3.org/TR/REC-xml/#NT-Name var VALID_TAG_REGEX = /^[a-zA-Z][a-zA-Z:_\.\-\d]*$/; // Simplified subset var validatedTagCache = {}; var hasOwnProperty = {}.hasOwnProperty; function validateDangerousTag(tag) { if (!hasOwnProperty.call(validatedTagCache, tag)) { !VALID_TAG_REGEX.test(tag) ? false ? invariant(false, 'Invalid tag: %s', tag) : _prodInvariant('65', tag) : void 0; validatedTagCache[tag] = true; } } function isCustomComponent(tagName, props) { return tagName.indexOf('-') >= 0 || props.is != null; } var globalIdCounter = 1; /** * Creates a new React class that is idempotent and capable of containing other * React components. It accepts event listeners and DOM properties that are * valid according to `DOMProperty`. * * - Event listeners: `onClick`, `onMouseDown`, etc. * - DOM properties: `className`, `name`, `title`, etc. * * The `style` property functions differently from the DOM API. It accepts an * object mapping of style properties to values. * * @constructor ReactDOMComponent * @extends ReactMultiChild */ function ReactDOMComponent(element) { var tag = element.type; validateDangerousTag(tag); this._currentElement = element; this._tag = tag.toLowerCase(); this._namespaceURI = null; this._renderedChildren = null; this._previousStyle = null; this._previousStyleCopy = null; this._hostNode = null; this._hostParent = null; this._rootNodeID = 0; this._domID = 0; this._hostContainerInfo = null; this._wrapperState = null; this._topLevelWrapper = null; this._flags = 0; if (false) { this._ancestorInfo = null; setAndValidateContentChildDev.call(this, null); } } ReactDOMComponent.displayName = 'ReactDOMComponent'; ReactDOMComponent.Mixin = { /** * Generates root tag markup then recurses. This method has side effects and * is not idempotent. * * @internal * @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction * @param {?ReactDOMComponent} the parent component instance * @param {?object} info about the host container * @param {object} context * @return {string} The computed markup. */ mountComponent: function (transaction, hostParent, hostContainerInfo, context) { this._rootNodeID = globalIdCounter++; this._domID = hostContainerInfo._idCounter++; this._hostParent = hostParent; this._hostContainerInfo = hostContainerInfo; var props = this._currentElement.props; switch (this._tag) { case 'audio': case 'form': case 'iframe': case 'img': case 'link': case 'object': case 'source': case 'video': this._wrapperState = { listeners: null }; transaction.getReactMountReady().enqueue(trapBubbledEventsLocal, this); break; case 'input': ReactDOMInput.mountWrapper(this, props, hostParent); props = ReactDOMInput.getHostProps(this, props); transaction.getReactMountReady().enqueue(trapBubbledEventsLocal, this); break; case 'option': ReactDOMOption.mountWrapper(this, props, hostParent); props = ReactDOMOption.getHostProps(this, props); break; case 'select': ReactDOMSelect.mountWrapper(this, props, hostParent); props = ReactDOMSelect.getHostProps(this, props); transaction.getReactMountReady().enqueue(trapBubbledEventsLocal, this); break; case 'textarea': ReactDOMTextarea.mountWrapper(this, props, hostParent); props = ReactDOMTextarea.getHostProps(this, props); transaction.getReactMountReady().enqueue(trapBubbledEventsLocal, this); break; } assertValidProps(this, props); // We create tags in the namespace of their parent container, except HTML // tags get no namespace. var namespaceURI; var parentTag; if (hostParent != null) { namespaceURI = hostParent._namespaceURI; parentTag = hostParent._tag; } else if (hostContainerInfo._tag) { namespaceURI = hostContainerInfo._namespaceURI; parentTag = hostContainerInfo._tag; } if (namespaceURI == null || namespaceURI === DOMNamespaces.svg && parentTag === 'foreignobject') { namespaceURI = DOMNamespaces.html; } if (namespaceURI === DOMNamespaces.html) { if (this._tag === 'svg') { namespaceURI = DOMNamespaces.svg; } else if (this._tag === 'math') { namespaceURI = DOMNamespaces.mathml; } } this._namespaceURI = namespaceURI; if (false) { var parentInfo; if (hostParent != null) { parentInfo = hostParent._ancestorInfo; } else if (hostContainerInfo._tag) { parentInfo = hostContainerInfo._ancestorInfo; } if (parentInfo) { // parentInfo should always be present except for the top-level // component when server rendering validateDOMNesting(this._tag, null, this, parentInfo); } this._ancestorInfo = validateDOMNesting.updatedAncestorInfo(parentInfo, this._tag, this); } var mountImage; if (transaction.useCreateElement) { var ownerDocument = hostContainerInfo._ownerDocument; var el; if (namespaceURI === DOMNamespaces.html) { if (this._tag === 'script') { // Create the script via .innerHTML so its "parser-inserted" flag is // set to true and it does not execute var div = ownerDocument.createElement('div'); var type = this._currentElement.type; div.innerHTML = '<' + type + '></' + type + '>'; el = div.removeChild(div.firstChild); } else if (props.is) { el = ownerDocument.createElement(this._currentElement.type, props.is); } else { // Separate else branch instead of using `props.is || undefined` above becuase of a Firefox bug. // See discussion in https://github.com/facebook/react/pull/6896 // and discussion in https://bugzilla.mozilla.org/show_bug.cgi?id=1276240 el = ownerDocument.createElement(this._currentElement.type); } } else { el = ownerDocument.createElementNS(namespaceURI, this._currentElement.type); } ReactDOMComponentTree.precacheNode(this, el); this._flags |= Flags.hasCachedChildNodes; if (!this._hostParent) { DOMPropertyOperations.setAttributeForRoot(el); } this._updateDOMProperties(null, props, transaction); var lazyTree = DOMLazyTree(el); this._createInitialChildren(transaction, props, context, lazyTree); mountImage = lazyTree; } else { var tagOpen = this._createOpenTagMarkupAndPutListeners(transaction, props); var tagContent = this._createContentMarkup(transaction, props, context); if (!tagContent && omittedCloseTags[this._tag]) { mountImage = tagOpen + '/>'; } else { mountImage = tagOpen + '>' + tagContent + '</' + this._currentElement.type + '>'; } } switch (this._tag) { case 'input': transaction.getReactMountReady().enqueue(inputPostMount, this); if (props.autoFocus) { transaction.getReactMountReady().enqueue(AutoFocusUtils.focusDOMComponent, this); } break; case 'textarea': transaction.getReactMountReady().enqueue(textareaPostMount, this); if (props.autoFocus) { transaction.getReactMountReady().enqueue(AutoFocusUtils.focusDOMComponent, this); } break; case 'select': if (props.autoFocus) { transaction.getReactMountReady().enqueue(AutoFocusUtils.focusDOMComponent, this); } break; case 'button': if (props.autoFocus) { transaction.getReactMountReady().enqueue(AutoFocusUtils.focusDOMComponent, this); } break; case 'option': transaction.getReactMountReady().enqueue(optionPostMount, this); break; } return mountImage; }, /** * Creates markup for the open tag and all attributes. * * This method has side effects because events get registered. * * Iterating over object properties is faster than iterating over arrays. * @see http://jsperf.com/obj-vs-arr-iteration * * @private * @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction * @param {object} props * @return {string} Markup of opening tag. */ _createOpenTagMarkupAndPutListeners: function (transaction, props) { var ret = '<' + this._currentElement.type; for (var propKey in props) { if (!props.hasOwnProperty(propKey)) { continue; } var propValue = props[propKey]; if (propValue == null) { continue; } if (registrationNameModules.hasOwnProperty(propKey)) { if (propValue) { enqueuePutListener(this, propKey, propValue, transaction); } } else { if (propKey === STYLE) { if (propValue) { if (false) { // See `_updateDOMProperties`. style block this._previousStyle = propValue; } propValue = this._previousStyleCopy = _assign({}, props.style); } propValue = CSSPropertyOperations.createMarkupForStyles(propValue, this); } var markup = null; if (this._tag != null && isCustomComponent(this._tag, props)) { if (!RESERVED_PROPS.hasOwnProperty(propKey)) { markup = DOMPropertyOperations.createMarkupForCustomAttribute(propKey, propValue); } } else { markup = DOMPropertyOperations.createMarkupForProperty(propKey, propValue); } if (markup) { ret += ' ' + markup; } } } // For static pages, no need to put React ID and checksum. Saves lots of // bytes. if (transaction.renderToStaticMarkup) { return ret; } if (!this._hostParent) { ret += ' ' + DOMPropertyOperations.createMarkupForRoot(); } ret += ' ' + DOMPropertyOperations.createMarkupForID(this._domID); return ret; }, /** * Creates markup for the content between the tags. * * @private * @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction * @param {object} props * @param {object} context * @return {string} Content markup. */ _createContentMarkup: function (transaction, props, context) { var ret = ''; // Intentional use of != to avoid catching zero/false. var innerHTML = props.dangerouslySetInnerHTML; if (innerHTML != null) { if (innerHTML.__html != null) { ret = innerHTML.__html; } } else { var contentToUse = CONTENT_TYPES[typeof props.children] ? props.children : null; var childrenToUse = contentToUse != null ? null : props.children; if (contentToUse != null) { // TODO: Validate that text is allowed as a child of this node ret = escapeTextContentForBrowser(contentToUse); if (false) { setAndValidateContentChildDev.call(this, contentToUse); } } else if (childrenToUse != null) { var mountImages = this.mountChildren(childrenToUse, transaction, context); ret = mountImages.join(''); } } if (newlineEatingTags[this._tag] && ret.charAt(0) === '\n') { // text/html ignores the first character in these tags if it's a newline // Prefer to break application/xml over text/html (for now) by adding // a newline specifically to get eaten by the parser. (Alternately for // textareas, replacing "^\n" with "\r\n" doesn't get eaten, and the first // \r is normalized out by HTMLTextAreaElement#value.) // See: <http://www.w3.org/TR/html-polyglot/#newlines-in-textarea-and-pre> // See: <http://www.w3.org/TR/html5/syntax.html#element-restrictions> // See: <http://www.w3.org/TR/html5/syntax.html#newlines> // See: Parsing of "textarea" "listing" and "pre" elements // from <http://www.w3.org/TR/html5/syntax.html#parsing-main-inbody> return '\n' + ret; } else { return ret; } }, _createInitialChildren: function (transaction, props, context, lazyTree) { // Intentional use of != to avoid catching zero/false. var innerHTML = props.dangerouslySetInnerHTML; if (innerHTML != null) { if (innerHTML.__html != null) { DOMLazyTree.queueHTML(lazyTree, innerHTML.__html); } } else { var contentToUse = CONTENT_TYPES[typeof props.children] ? props.children : null; var childrenToUse = contentToUse != null ? null : props.children; // TODO: Validate that text is allowed as a child of this node if (contentToUse != null) { // Avoid setting textContent when the text is empty. In IE11 setting // textContent on a text area will cause the placeholder to not // show within the textarea until it has been focused and blurred again. // https://github.com/facebook/react/issues/6731#issuecomment-254874553 if (contentToUse !== '') { if (false) { setAndValidateContentChildDev.call(this, contentToUse); } DOMLazyTree.queueText(lazyTree, contentToUse); } } else if (childrenToUse != null) { var mountImages = this.mountChildren(childrenToUse, transaction, context); for (var i = 0; i < mountImages.length; i++) { DOMLazyTree.queueChild(lazyTree, mountImages[i]); } } } }, /** * Receives a next element and updates the component. * * @internal * @param {ReactElement} nextElement * @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction * @param {object} context */ receiveComponent: function (nextElement, transaction, context) { var prevElement = this._currentElement; this._currentElement = nextElement; this.updateComponent(transaction, prevElement, nextElement, context); }, /** * Updates a DOM component after it has already been allocated and * attached to the DOM. Reconciles the root DOM node, then recurses. * * @param {ReactReconcileTransaction} transaction * @param {ReactElement} prevElement * @param {ReactElement} nextElement * @internal * @overridable */ updateComponent: function (transaction, prevElement, nextElement, context) { var lastProps = prevElement.props; var nextProps = this._currentElement.props; switch (this._tag) { case 'input': lastProps = ReactDOMInput.getHostProps(this, lastProps); nextProps = ReactDOMInput.getHostProps(this, nextProps); break; case 'option': lastProps = ReactDOMOption.getHostProps(this, lastProps); nextProps = ReactDOMOption.getHostProps(this, nextProps); break; case 'select': lastProps = ReactDOMSelect.getHostProps(this, lastProps); nextProps = ReactDOMSelect.getHostProps(this, nextProps); break; case 'textarea': lastProps = ReactDOMTextarea.getHostProps(this, lastProps); nextProps = ReactDOMTextarea.getHostProps(this, nextProps); break; } assertValidProps(this, nextProps); this._updateDOMProperties(lastProps, nextProps, transaction); this._updateDOMChildren(lastProps, nextProps, transaction, context); switch (this._tag) { case 'input': // Update the wrapper around inputs *after* updating props. This has to // happen after `_updateDOMProperties`. Otherwise HTML5 input validations // raise warnings and prevent the new value from being assigned. ReactDOMInput.updateWrapper(this); break; case 'textarea': ReactDOMTextarea.updateWrapper(this); break; case 'select': // <select> value update needs to occur after <option> children // reconciliation transaction.getReactMountReady().enqueue(postUpdateSelectWrapper, this); break; } }, /** * Reconciles the properties by detecting differences in property values and * updating the DOM as necessary. This function is probably the single most * critical path for performance optimization. * * TODO: Benchmark whether checking for changed values in memory actually * improves performance (especially statically positioned elements). * TODO: Benchmark the effects of putting this at the top since 99% of props * do not change for a given reconciliation. * TODO: Benchmark areas that can be improved with caching. * * @private * @param {object} lastProps * @param {object} nextProps * @param {?DOMElement} node */ _updateDOMProperties: function (lastProps, nextProps, transaction) { var propKey; var styleName; var styleUpdates; for (propKey in lastProps) { if (nextProps.hasOwnProperty(propKey) || !lastProps.hasOwnProperty(propKey) || lastProps[propKey] == null) { continue; } if (propKey === STYLE) { var lastStyle = this._previousStyleCopy; for (styleName in lastStyle) { if (lastStyle.hasOwnProperty(styleName)) { styleUpdates = styleUpdates || {}; styleUpdates[styleName] = ''; } } this._previousStyleCopy = null; } else if (registrationNameModules.hasOwnProperty(propKey)) { if (lastProps[propKey]) { // Only call deleteListener if there was a listener previously or // else willDeleteListener gets called when there wasn't actually a // listener (e.g., onClick={null}) deleteListener(this, propKey); } } else if (isCustomComponent(this._tag, lastProps)) { if (!RESERVED_PROPS.hasOwnProperty(propKey)) { DOMPropertyOperations.deleteValueForAttribute(getNode(this), propKey); } } else if (DOMProperty.properties[propKey] || DOMProperty.isCustomAttribute(propKey)) { DOMPropertyOperations.deleteValueForProperty(getNode(this), propKey); } } for (propKey in nextProps) { var nextProp = nextProps[propKey]; var lastProp = propKey === STYLE ? this._previousStyleCopy : lastProps != null ? lastProps[propKey] : undefined; if (!nextProps.hasOwnProperty(propKey) || nextProp === lastProp || nextProp == null && lastProp == null) { continue; } if (propKey === STYLE) { if (nextProp) { if (false) { checkAndWarnForMutatedStyle(this._previousStyleCopy, this._previousStyle, this); this._previousStyle = nextProp; } nextProp = this._previousStyleCopy = _assign({}, nextProp); } else { this._previousStyleCopy = null; } if (lastProp) { // Unset styles on `lastProp` but not on `nextProp`. for (styleName in lastProp) { if (lastProp.hasOwnProperty(styleName) && (!nextProp || !nextProp.hasOwnProperty(styleName))) { styleUpdates = styleUpdates || {}; styleUpdates[styleName] = ''; } } // Update styles that changed since `lastProp`. for (styleName in nextProp) { if (nextProp.hasOwnProperty(styleName) && lastProp[styleName] !== nextProp[styleName]) { styleUpdates = styleUpdates || {}; styleUpdates[styleName] = nextProp[styleName]; } } } else { // Relies on `updateStylesByID` not mutating `styleUpdates`. styleUpdates = nextProp; } } else if (registrationNameModules.hasOwnProperty(propKey)) { if (nextProp) { enqueuePutListener(this, propKey, nextProp, transaction); } else if (lastProp) { deleteListener(this, propKey); } } else if (isCustomComponent(this._tag, nextProps)) { if (!RESERVED_PROPS.hasOwnProperty(propKey)) { DOMPropertyOperations.setValueForAttribute(getNode(this), propKey, nextProp); } } else if (DOMProperty.properties[propKey] || DOMProperty.isCustomAttribute(propKey)) { var node = getNode(this); // If we're updating to null or undefined, we should remove the property // from the DOM node instead of inadvertently setting to a string. This // brings us in line with the same behavior we have on initial render. if (nextProp != null) { DOMPropertyOperations.setValueForProperty(node, propKey, nextProp); } else { DOMPropertyOperations.deleteValueForProperty(node, propKey); } } } if (styleUpdates) { CSSPropertyOperations.setValueForStyles(getNode(this), styleUpdates, this); } }, /** * Reconciles the children with the various properties that affect the * children content. * * @param {object} lastProps * @param {object} nextProps * @param {ReactReconcileTransaction} transaction * @param {object} context */ _updateDOMChildren: function (lastProps, nextProps, transaction, context) { var lastContent = CONTENT_TYPES[typeof lastProps.children] ? lastProps.children : null; var nextContent = CONTENT_TYPES[typeof nextProps.children] ? nextProps.children : null; var lastHtml = lastProps.dangerouslySetInnerHTML && lastProps.dangerouslySetInnerHTML.__html; var nextHtml = nextProps.dangerouslySetInnerHTML && nextProps.dangerouslySetInnerHTML.__html; // Note the use of `!=` which checks for null or undefined. var lastChildren = lastContent != null ? null : lastProps.children; var nextChildren = nextContent != null ? null : nextProps.children; // If we're switching from children to content/html or vice versa, remove // the old content var lastHasContentOrHtml = lastContent != null || lastHtml != null; var nextHasContentOrHtml = nextContent != null || nextHtml != null; if (lastChildren != null && nextChildren == null) { this.updateChildren(null, transaction, context); } else if (lastHasContentOrHtml && !nextHasContentOrHtml) { this.updateTextContent(''); if (false) { ReactInstrumentation.debugTool.onSetChildren(this._debugID, []); } } if (nextContent != null) { if (lastContent !== nextContent) { this.updateTextContent('' + nextContent); if (false) { setAndValidateContentChildDev.call(this, nextContent); } } } else if (nextHtml != null) { if (lastHtml !== nextHtml) { this.updateMarkup('' + nextHtml); } if (false) { ReactInstrumentation.debugTool.onSetChildren(this._debugID, []); } } else if (nextChildren != null) { if (false) { setAndValidateContentChildDev.call(this, null); } this.updateChildren(nextChildren, transaction, context); } }, getHostNode: function () { return getNode(this); }, /** * Destroys all event registrations for this instance. Does not remove from * the DOM. That must be done by the parent. * * @internal */ unmountComponent: function (safely) { switch (this._tag) { case 'audio': case 'form': case 'iframe': case 'img': case 'link': case 'object': case 'source': case 'video': var listeners = this._wrapperState.listeners; if (listeners) { for (var i = 0; i < listeners.length; i++) { listeners[i].remove(); } } break; case 'html': case 'head': case 'body': /** * Components like <html> <head> and <body> can't be removed or added * easily in a cross-browser way, however it's valuable to be able to * take advantage of React's reconciliation for styling and <title> * management. So we just document it and throw in dangerous cases. */ true ? false ? invariant(false, '<%s> tried to unmount. Because of cross-browser quirks it is impossible to unmount some top-level components (eg <html>, <head>, and <body>) reliably and efficiently. To fix this, have a single top-level component that never unmounts render these elements.', this._tag) : _prodInvariant('66', this._tag) : void 0; break; } this.unmountChildren(safely); ReactDOMComponentTree.uncacheNode(this); EventPluginHub.deleteAllListeners(this); this._rootNodeID = 0; this._domID = 0; this._wrapperState = null; if (false) { setAndValidateContentChildDev.call(this, null); } }, getPublicInstance: function () { return getNode(this); } }; _assign(ReactDOMComponent.prototype, ReactDOMComponent.Mixin, ReactMultiChild.Mixin); module.exports = ReactDOMComponent; /***/ }, /* 85 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ReactDOMComponentTree = __webpack_require__(31); var focusNode = __webpack_require__(86); var AutoFocusUtils = { focusDOMComponent: function () { focusNode(ReactDOMComponentTree.getNodeFromInstance(this)); } }; module.exports = AutoFocusUtils; /***/ }, /* 86 */ /***/ function(module, exports) { /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; /** * @param {DOMElement} node input/textarea to focus */ function focusNode(node) { // IE8 can throw "Can't move focus to the control because it is invisible, // not enabled, or of a type that does not accept the focus." for all kinds of // reasons that are too expensive and fragile to test. try { node.focus(); } catch (e) {} } module.exports = focusNode; /***/ }, /* 87 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var CSSProperty = __webpack_require__(88); var ExecutionEnvironment = __webpack_require__(45); var ReactInstrumentation = __webpack_require__(59); var camelizeStyleName = __webpack_require__(89); var dangerousStyleValue = __webpack_require__(91); var hyphenateStyleName = __webpack_require__(92); var memoizeStringOnly = __webpack_require__(94); var warning = __webpack_require__(11); var processStyleName = memoizeStringOnly(function (styleName) { return hyphenateStyleName(styleName); }); var hasShorthandPropertyBug = false; var styleFloatAccessor = 'cssFloat'; if (ExecutionEnvironment.canUseDOM) { var tempStyle = document.createElement('div').style; try { // IE8 throws "Invalid argument." if resetting shorthand style properties. tempStyle.font = ''; } catch (e) { hasShorthandPropertyBug = true; } // IE8 only supports accessing cssFloat (standard) as styleFloat if (document.documentElement.style.cssFloat === undefined) { styleFloatAccessor = 'styleFloat'; } } if (false) { // 'msTransform' is correct, but the other prefixes should be capitalized var badVendoredStyleNamePattern = /^(?:webkit|moz|o)[A-Z]/; // style values shouldn't contain a semicolon var badStyleValueWithSemicolonPattern = /;\s*$/; var warnedStyleNames = {}; var warnedStyleValues = {}; var warnedForNaNValue = false; var warnHyphenatedStyleName = function (name, owner) { if (warnedStyleNames.hasOwnProperty(name) && warnedStyleNames[name]) { return; } warnedStyleNames[name] = true; process.env.NODE_ENV !== 'production' ? warning(false, 'Unsupported style property %s. Did you mean %s?%s', name, camelizeStyleName(name), checkRenderMessage(owner)) : void 0; }; var warnBadVendoredStyleName = function (name, owner) { if (warnedStyleNames.hasOwnProperty(name) && warnedStyleNames[name]) { return; } warnedStyleNames[name] = true; process.env.NODE_ENV !== 'production' ? warning(false, 'Unsupported vendor-prefixed style property %s. Did you mean %s?%s', name, name.charAt(0).toUpperCase() + name.slice(1), checkRenderMessage(owner)) : void 0; }; var warnStyleValueWithSemicolon = function (name, value, owner) { if (warnedStyleValues.hasOwnProperty(value) && warnedStyleValues[value]) { return; } warnedStyleValues[value] = true; process.env.NODE_ENV !== 'production' ? warning(false, 'Style property values shouldn\'t contain a semicolon.%s ' + 'Try "%s: %s" instead.', checkRenderMessage(owner), name, value.replace(badStyleValueWithSemicolonPattern, '')) : void 0; }; var warnStyleValueIsNaN = function (name, value, owner) { if (warnedForNaNValue) { return; } warnedForNaNValue = true; process.env.NODE_ENV !== 'production' ? warning(false, '`NaN` is an invalid value for the `%s` css style property.%s', name, checkRenderMessage(owner)) : void 0; }; var checkRenderMessage = function (owner) { if (owner) { var name = owner.getName(); if (name) { return ' Check the render method of `' + name + '`.'; } } return ''; }; /** * @param {string} name * @param {*} value * @param {ReactDOMComponent} component */ var warnValidStyle = function (name, value, component) { var owner; if (component) { owner = component._currentElement._owner; } if (name.indexOf('-') > -1) { warnHyphenatedStyleName(name, owner); } else if (badVendoredStyleNamePattern.test(name)) { warnBadVendoredStyleName(name, owner); } else if (badStyleValueWithSemicolonPattern.test(value)) { warnStyleValueWithSemicolon(name, value, owner); } if (typeof value === 'number' && isNaN(value)) { warnStyleValueIsNaN(name, value, owner); } }; } /** * Operations for dealing with CSS properties. */ var CSSPropertyOperations = { /** * Serializes a mapping of style properties for use as inline styles: * * > createMarkupForStyles({width: '200px', height: 0}) * "width:200px;height:0;" * * Undefined values are ignored so that declarative programming is easier. * The result should be HTML-escaped before insertion into the DOM. * * @param {object} styles * @param {ReactDOMComponent} component * @return {?string} */ createMarkupForStyles: function (styles, component) { var serialized = ''; for (var styleName in styles) { if (!styles.hasOwnProperty(styleName)) { continue; } var styleValue = styles[styleName]; if (false) { warnValidStyle(styleName, styleValue, component); } if (styleValue != null) { serialized += processStyleName(styleName) + ':'; serialized += dangerousStyleValue(styleName, styleValue, component) + ';'; } } return serialized || null; }, /** * Sets the value for multiple styles on a node. If a value is specified as * '' (empty string), the corresponding style property will be unset. * * @param {DOMElement} node * @param {object} styles * @param {ReactDOMComponent} component */ setValueForStyles: function (node, styles, component) { if (false) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: component._debugID, type: 'update styles', payload: styles }); } var style = node.style; for (var styleName in styles) { if (!styles.hasOwnProperty(styleName)) { continue; } if (false) { warnValidStyle(styleName, styles[styleName], component); } var styleValue = dangerousStyleValue(styleName, styles[styleName], component); if (styleName === 'float' || styleName === 'cssFloat') { styleName = styleFloatAccessor; } if (styleValue) { style[styleName] = styleValue; } else { var expansion = hasShorthandPropertyBug && CSSProperty.shorthandPropertyExpansions[styleName]; if (expansion) { // Shorthand property that IE8 won't like unsetting, so unset each // component to placate it for (var individualStyleName in expansion) { style[individualStyleName] = ''; } } else { style[styleName] = ''; } } } } }; module.exports = CSSPropertyOperations; /***/ }, /* 88 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; /** * CSS properties which accept numbers but are not in units of "px". */ var isUnitlessNumber = { animationIterationCount: true, borderImageOutset: true, borderImageSlice: true, borderImageWidth: true, boxFlex: true, boxFlexGroup: true, boxOrdinalGroup: true, columnCount: true, flex: true, flexGrow: true, flexPositive: true, flexShrink: true, flexNegative: true, flexOrder: true, gridRow: true, gridColumn: true, fontWeight: true, lineClamp: true, lineHeight: true, opacity: true, order: true, orphans: true, tabSize: true, widows: true, zIndex: true, zoom: true, // SVG-related properties fillOpacity: true, floodOpacity: true, stopOpacity: true, strokeDasharray: true, strokeDashoffset: true, strokeMiterlimit: true, strokeOpacity: true, strokeWidth: true }; /** * @param {string} prefix vendor-specific prefix, eg: Webkit * @param {string} key style name, eg: transitionDuration * @return {string} style name prefixed with `prefix`, properly camelCased, eg: * WebkitTransitionDuration */ function prefixKey(prefix, key) { return prefix + key.charAt(0).toUpperCase() + key.substring(1); } /** * Support style names that may come passed in prefixed by adding permutations * of vendor prefixes. */ var prefixes = ['Webkit', 'ms', 'Moz', 'O']; // Using Object.keys here, or else the vanilla for-in loop makes IE8 go into an // infinite loop, because it iterates over the newly added props too. Object.keys(isUnitlessNumber).forEach(function (prop) { prefixes.forEach(function (prefix) { isUnitlessNumber[prefixKey(prefix, prop)] = isUnitlessNumber[prop]; }); }); /** * Most style properties can be unset by doing .style[prop] = '' but IE8 * doesn't like doing that with shorthand properties so for the properties that * IE8 breaks on, which are listed here, we instead unset each of the * individual properties. See http://bugs.jquery.com/ticket/12385. * The 4-value 'clock' properties like margin, padding, border-width seem to * behave without any problems. Curiously, list-style works too without any * special prodding. */ var shorthandPropertyExpansions = { background: { backgroundAttachment: true, backgroundColor: true, backgroundImage: true, backgroundPositionX: true, backgroundPositionY: true, backgroundRepeat: true }, backgroundPosition: { backgroundPositionX: true, backgroundPositionY: true }, border: { borderWidth: true, borderStyle: true, borderColor: true }, borderBottom: { borderBottomWidth: true, borderBottomStyle: true, borderBottomColor: true }, borderLeft: { borderLeftWidth: true, borderLeftStyle: true, borderLeftColor: true }, borderRight: { borderRightWidth: true, borderRightStyle: true, borderRightColor: true }, borderTop: { borderTopWidth: true, borderTopStyle: true, borderTopColor: true }, font: { fontStyle: true, fontVariant: true, fontWeight: true, fontSize: true, lineHeight: true, fontFamily: true }, outline: { outlineWidth: true, outlineStyle: true, outlineColor: true } }; var CSSProperty = { isUnitlessNumber: isUnitlessNumber, shorthandPropertyExpansions: shorthandPropertyExpansions }; module.exports = CSSProperty; /***/ }, /* 89 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @typechecks */ 'use strict'; var camelize = __webpack_require__(90); var msPattern = /^-ms-/; /** * Camelcases a hyphenated CSS property name, for example: * * > camelizeStyleName('background-color') * < "backgroundColor" * > camelizeStyleName('-moz-transition') * < "MozTransition" * > camelizeStyleName('-ms-transition') * < "msTransition" * * As Andi Smith suggests * (http://www.andismith.com/blog/2012/02/modernizr-prefixed/), an `-ms` prefix * is converted to lowercase `ms`. * * @param {string} string * @return {string} */ function camelizeStyleName(string) { return camelize(string.replace(msPattern, 'ms-')); } module.exports = camelizeStyleName; /***/ }, /* 90 */ /***/ function(module, exports) { "use strict"; /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @typechecks */ var _hyphenPattern = /-(.)/g; /** * Camelcases a hyphenated string, for example: * * > camelize('background-color') * < "backgroundColor" * * @param {string} string * @return {string} */ function camelize(string) { return string.replace(_hyphenPattern, function (_, character) { return character.toUpperCase(); }); } module.exports = camelize; /***/ }, /* 91 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var CSSProperty = __webpack_require__(88); var warning = __webpack_require__(11); var isUnitlessNumber = CSSProperty.isUnitlessNumber; var styleWarnings = {}; /** * Convert a value into the proper css writable value. The style name `name` * should be logical (no hyphens), as specified * in `CSSProperty.isUnitlessNumber`. * * @param {string} name CSS property name such as `topMargin`. * @param {*} value CSS property value such as `10px`. * @param {ReactDOMComponent} component * @return {string} Normalized style value with dimensions applied. */ function dangerousStyleValue(name, value, component) { // Note that we've removed escapeTextForBrowser() calls here since the // whole string will be escaped when the attribute is injected into // the markup. If you provide unsafe user data here they can inject // arbitrary CSS which may be problematic (I couldn't repro this): // https://www.owasp.org/index.php/XSS_Filter_Evasion_Cheat_Sheet // http://www.thespanner.co.uk/2007/11/26/ultimate-xss-css-injection/ // This is not an XSS hole but instead a potential CSS injection issue // which has lead to a greater discussion about how we're going to // trust URLs moving forward. See #2115901 var isEmpty = value == null || typeof value === 'boolean' || value === ''; if (isEmpty) { return ''; } var isNonNumeric = isNaN(value); if (isNonNumeric || value === 0 || isUnitlessNumber.hasOwnProperty(name) && isUnitlessNumber[name]) { return '' + value; // cast to string } if (typeof value === 'string') { if (false) { // Allow '0' to pass through without warning. 0 is already special and // doesn't require units, so we don't need to warn about it. if (component && value !== '0') { var owner = component._currentElement._owner; var ownerName = owner ? owner.getName() : null; if (ownerName && !styleWarnings[ownerName]) { styleWarnings[ownerName] = {}; } var warned = false; if (ownerName) { var warnings = styleWarnings[ownerName]; warned = warnings[name]; if (!warned) { warnings[name] = true; } } if (!warned) { process.env.NODE_ENV !== 'production' ? warning(false, 'a `%s` tag (owner: `%s`) was passed a numeric string value ' + 'for CSS property `%s` (value: `%s`) which will be treated ' + 'as a unitless number in a future version of React.', component._currentElement.type, ownerName || 'unknown', name, value) : void 0; } } } value = value.trim(); } return value + 'px'; } module.exports = dangerousStyleValue; /***/ }, /* 92 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @typechecks */ 'use strict'; var hyphenate = __webpack_require__(93); var msPattern = /^ms-/; /** * Hyphenates a camelcased CSS property name, for example: * * > hyphenateStyleName('backgroundColor') * < "background-color" * > hyphenateStyleName('MozTransition') * < "-moz-transition" * > hyphenateStyleName('msTransition') * < "-ms-transition" * * As Modernizr suggests (http://modernizr.com/docs/#prefixed), an `ms` prefix * is converted to `-ms-`. * * @param {string} string * @return {string} */ function hyphenateStyleName(string) { return hyphenate(string).replace(msPattern, '-ms-'); } module.exports = hyphenateStyleName; /***/ }, /* 93 */ /***/ function(module, exports) { 'use strict'; /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @typechecks */ var _uppercasePattern = /([A-Z])/g; /** * Hyphenates a camelcased string, for example: * * > hyphenate('backgroundColor') * < "background-color" * * For CSS style names, use `hyphenateStyleName` instead which works properly * with all vendor prefixes, including `ms`. * * @param {string} string * @return {string} */ function hyphenate(string) { return string.replace(_uppercasePattern, '-$1').toLowerCase(); } module.exports = hyphenate; /***/ }, /* 94 */ /***/ function(module, exports) { /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * * @typechecks static-only */ 'use strict'; /** * Memoizes the return value of a function that accepts one string argument. */ function memoizeStringOnly(callback) { var cache = {}; return function (string) { if (!cache.hasOwnProperty(string)) { cache[string] = callback.call(this, string); } return cache[string]; }; } module.exports = memoizeStringOnly; /***/ }, /* 95 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var DOMProperty = __webpack_require__(33); var ReactDOMComponentTree = __webpack_require__(31); var ReactInstrumentation = __webpack_require__(59); var quoteAttributeValueForBrowser = __webpack_require__(96); var warning = __webpack_require__(11); var VALID_ATTRIBUTE_NAME_REGEX = new RegExp('^[' + DOMProperty.ATTRIBUTE_NAME_START_CHAR + '][' + DOMProperty.ATTRIBUTE_NAME_CHAR + ']*$'); var illegalAttributeNameCache = {}; var validatedAttributeNameCache = {}; function isAttributeNameSafe(attributeName) { if (validatedAttributeNameCache.hasOwnProperty(attributeName)) { return true; } if (illegalAttributeNameCache.hasOwnProperty(attributeName)) { return false; } if (VALID_ATTRIBUTE_NAME_REGEX.test(attributeName)) { validatedAttributeNameCache[attributeName] = true; return true; } illegalAttributeNameCache[attributeName] = true; false ? warning(false, 'Invalid attribute name: `%s`', attributeName) : void 0; return false; } function shouldIgnoreValue(propertyInfo, value) { return value == null || propertyInfo.hasBooleanValue && !value || propertyInfo.hasNumericValue && isNaN(value) || propertyInfo.hasPositiveNumericValue && value < 1 || propertyInfo.hasOverloadedBooleanValue && value === false; } /** * Operations for dealing with DOM properties. */ var DOMPropertyOperations = { /** * Creates markup for the ID property. * * @param {string} id Unescaped ID. * @return {string} Markup string. */ createMarkupForID: function (id) { return DOMProperty.ID_ATTRIBUTE_NAME + '=' + quoteAttributeValueForBrowser(id); }, setAttributeForID: function (node, id) { node.setAttribute(DOMProperty.ID_ATTRIBUTE_NAME, id); }, createMarkupForRoot: function () { return DOMProperty.ROOT_ATTRIBUTE_NAME + '=""'; }, setAttributeForRoot: function (node) { node.setAttribute(DOMProperty.ROOT_ATTRIBUTE_NAME, ''); }, /** * Creates markup for a property. * * @param {string} name * @param {*} value * @return {?string} Markup string, or null if the property was invalid. */ createMarkupForProperty: function (name, value) { var propertyInfo = DOMProperty.properties.hasOwnProperty(name) ? DOMProperty.properties[name] : null; if (propertyInfo) { if (shouldIgnoreValue(propertyInfo, value)) { return ''; } var attributeName = propertyInfo.attributeName; if (propertyInfo.hasBooleanValue || propertyInfo.hasOverloadedBooleanValue && value === true) { return attributeName + '=""'; } return attributeName + '=' + quoteAttributeValueForBrowser(value); } else if (DOMProperty.isCustomAttribute(name)) { if (value == null) { return ''; } return name + '=' + quoteAttributeValueForBrowser(value); } return null; }, /** * Creates markup for a custom property. * * @param {string} name * @param {*} value * @return {string} Markup string, or empty string if the property was invalid. */ createMarkupForCustomAttribute: function (name, value) { if (!isAttributeNameSafe(name) || value == null) { return ''; } return name + '=' + quoteAttributeValueForBrowser(value); }, /** * Sets the value for a property on a node. * * @param {DOMElement} node * @param {string} name * @param {*} value */ setValueForProperty: function (node, name, value) { var propertyInfo = DOMProperty.properties.hasOwnProperty(name) ? DOMProperty.properties[name] : null; if (propertyInfo) { var mutationMethod = propertyInfo.mutationMethod; if (mutationMethod) { mutationMethod(node, value); } else if (shouldIgnoreValue(propertyInfo, value)) { this.deleteValueForProperty(node, name); return; } else if (propertyInfo.mustUseProperty) { // Contrary to `setAttribute`, object properties are properly // `toString`ed by IE8/9. node[propertyInfo.propertyName] = value; } else { var attributeName = propertyInfo.attributeName; var namespace = propertyInfo.attributeNamespace; // `setAttribute` with objects becomes only `[object]` in IE8/9, // ('' + value) makes it output the correct toString()-value. if (namespace) { node.setAttributeNS(namespace, attributeName, '' + value); } else if (propertyInfo.hasBooleanValue || propertyInfo.hasOverloadedBooleanValue && value === true) { node.setAttribute(attributeName, ''); } else { node.setAttribute(attributeName, '' + value); } } } else if (DOMProperty.isCustomAttribute(name)) { DOMPropertyOperations.setValueForAttribute(node, name, value); return; } if (false) { var payload = {}; payload[name] = value; ReactInstrumentation.debugTool.onHostOperation({ instanceID: ReactDOMComponentTree.getInstanceFromNode(node)._debugID, type: 'update attribute', payload: payload }); } }, setValueForAttribute: function (node, name, value) { if (!isAttributeNameSafe(name)) { return; } if (value == null) { node.removeAttribute(name); } else { node.setAttribute(name, '' + value); } if (false) { var payload = {}; payload[name] = value; ReactInstrumentation.debugTool.onHostOperation({ instanceID: ReactDOMComponentTree.getInstanceFromNode(node)._debugID, type: 'update attribute', payload: payload }); } }, /** * Deletes an attributes from a node. * * @param {DOMElement} node * @param {string} name */ deleteValueForAttribute: function (node, name) { node.removeAttribute(name); if (false) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: ReactDOMComponentTree.getInstanceFromNode(node)._debugID, type: 'remove attribute', payload: name }); } }, /** * Deletes the value for a property on a node. * * @param {DOMElement} node * @param {string} name */ deleteValueForProperty: function (node, name) { var propertyInfo = DOMProperty.properties.hasOwnProperty(name) ? DOMProperty.properties[name] : null; if (propertyInfo) { var mutationMethod = propertyInfo.mutationMethod; if (mutationMethod) { mutationMethod(node, undefined); } else if (propertyInfo.mustUseProperty) { var propName = propertyInfo.propertyName; if (propertyInfo.hasBooleanValue) { node[propName] = false; } else { node[propName] = ''; } } else { node.removeAttribute(propertyInfo.attributeName); } } else if (DOMProperty.isCustomAttribute(name)) { node.removeAttribute(name); } if (false) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: ReactDOMComponentTree.getInstanceFromNode(node)._debugID, type: 'remove attribute', payload: name }); } } }; module.exports = DOMPropertyOperations; /***/ }, /* 96 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var escapeTextContentForBrowser = __webpack_require__(78); /** * Escapes attribute value to prevent scripting attacks. * * @param {*} value Value to escape. * @return {string} An escaped string. */ function quoteAttributeValueForBrowser(value) { return '"' + escapeTextContentForBrowser(value) + '"'; } module.exports = quoteAttributeValueForBrowser; /***/ }, /* 97 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var EventPluginRegistry = __webpack_require__(40); var ReactEventEmitterMixin = __webpack_require__(98); var ViewportMetrics = __webpack_require__(68); var getVendorPrefixedEventName = __webpack_require__(99); var isEventSupported = __webpack_require__(62); /** * Summary of `ReactBrowserEventEmitter` event handling: * * - Top-level delegation is used to trap most native browser events. This * may only occur in the main thread and is the responsibility of * ReactEventListener, which is injected and can therefore support pluggable * event sources. This is the only work that occurs in the main thread. * * - We normalize and de-duplicate events to account for browser quirks. This * may be done in the worker thread. * * - Forward these native events (with the associated top-level type used to * trap it) to `EventPluginHub`, which in turn will ask plugins if they want * to extract any synthetic events. * * - The `EventPluginHub` will then process each event by annotating them with * "dispatches", a sequence of listeners and IDs that care about that event. * * - The `EventPluginHub` then dispatches the events. * * Overview of React and the event system: * * +------------+ . * | DOM | . * +------------+ . * | . * v . * +------------+ . * | ReactEvent | . * | Listener | . * +------------+ . +-----------+ * | . +--------+|SimpleEvent| * | . | |Plugin | * +-----|------+ . v +-----------+ * | | | . +--------------+ +------------+ * | +-----------.--->|EventPluginHub| | Event | * | | . | | +-----------+ | Propagators| * | ReactEvent | . | | |TapEvent | |------------| * | Emitter | . | |<---+|Plugin | |other plugin| * | | . | | +-----------+ | utilities | * | +-----------.--->| | +------------+ * | | | . +--------------+ * +-----|------+ . ^ +-----------+ * | . | |Enter/Leave| * + . +-------+|Plugin | * +-------------+ . +-----------+ * | application | . * |-------------| . * | | . * | | . * +-------------+ . * . * React Core . General Purpose Event Plugin System */ var hasEventPageXY; var alreadyListeningTo = {}; var isMonitoringScrollValue = false; var reactTopListenersCounter = 0; // For events like 'submit' which don't consistently bubble (which we trap at a // lower node than `document`), binding at `document` would cause duplicate // events so we don't include them here var topEventMapping = { topAbort: 'abort', topAnimationEnd: getVendorPrefixedEventName('animationend') || 'animationend', topAnimationIteration: getVendorPrefixedEventName('animationiteration') || 'animationiteration', topAnimationStart: getVendorPrefixedEventName('animationstart') || 'animationstart', topBlur: 'blur', topCanPlay: 'canplay', topCanPlayThrough: 'canplaythrough', topChange: 'change', topClick: 'click', topCompositionEnd: 'compositionend', topCompositionStart: 'compositionstart', topCompositionUpdate: 'compositionupdate', topContextMenu: 'contextmenu', topCopy: 'copy', topCut: 'cut', topDoubleClick: 'dblclick', topDrag: 'drag', topDragEnd: 'dragend', topDragEnter: 'dragenter', topDragExit: 'dragexit', topDragLeave: 'dragleave', topDragOver: 'dragover', topDragStart: 'dragstart', topDrop: 'drop', topDurationChange: 'durationchange', topEmptied: 'emptied', topEncrypted: 'encrypted', topEnded: 'ended', topError: 'error', topFocus: 'focus', topInput: 'input', topKeyDown: 'keydown', topKeyPress: 'keypress', topKeyUp: 'keyup', topLoadedData: 'loadeddata', topLoadedMetadata: 'loadedmetadata', topLoadStart: 'loadstart', topMouseDown: 'mousedown', topMouseMove: 'mousemove', topMouseOut: 'mouseout', topMouseOver: 'mouseover', topMouseUp: 'mouseup', topPaste: 'paste', topPause: 'pause', topPlay: 'play', topPlaying: 'playing', topProgress: 'progress', topRateChange: 'ratechange', topScroll: 'scroll', topSeeked: 'seeked', topSeeking: 'seeking', topSelectionChange: 'selectionchange', topStalled: 'stalled', topSuspend: 'suspend', topTextInput: 'textInput', topTimeUpdate: 'timeupdate', topTouchCancel: 'touchcancel', topTouchEnd: 'touchend', topTouchMove: 'touchmove', topTouchStart: 'touchstart', topTransitionEnd: getVendorPrefixedEventName('transitionend') || 'transitionend', topVolumeChange: 'volumechange', topWaiting: 'waiting', topWheel: 'wheel' }; /** * To ensure no conflicts with other potential React instances on the page */ var topListenersIDKey = '_reactListenersID' + String(Math.random()).slice(2); function getListeningForDocument(mountAt) { // In IE8, `mountAt` is a host object and doesn't have `hasOwnProperty` // directly. if (!Object.prototype.hasOwnProperty.call(mountAt, topListenersIDKey)) { mountAt[topListenersIDKey] = reactTopListenersCounter++; alreadyListeningTo[mountAt[topListenersIDKey]] = {}; } return alreadyListeningTo[mountAt[topListenersIDKey]]; } /** * `ReactBrowserEventEmitter` is used to attach top-level event listeners. For * example: * * EventPluginHub.putListener('myID', 'onClick', myFunction); * * This would allocate a "registration" of `('onClick', myFunction)` on 'myID'. * * @internal */ var ReactBrowserEventEmitter = _assign({}, ReactEventEmitterMixin, { /** * Injectable event backend */ ReactEventListener: null, injection: { /** * @param {object} ReactEventListener */ injectReactEventListener: function (ReactEventListener) { ReactEventListener.setHandleTopLevel(ReactBrowserEventEmitter.handleTopLevel); ReactBrowserEventEmitter.ReactEventListener = ReactEventListener; } }, /** * Sets whether or not any created callbacks should be enabled. * * @param {boolean} enabled True if callbacks should be enabled. */ setEnabled: function (enabled) { if (ReactBrowserEventEmitter.ReactEventListener) { ReactBrowserEventEmitter.ReactEventListener.setEnabled(enabled); } }, /** * @return {boolean} True if callbacks are enabled. */ isEnabled: function () { return !!(ReactBrowserEventEmitter.ReactEventListener && ReactBrowserEventEmitter.ReactEventListener.isEnabled()); }, /** * We listen for bubbled touch events on the document object. * * Firefox v8.01 (and possibly others) exhibited strange behavior when * mounting `onmousemove` events at some node that was not the document * element. The symptoms were that if your mouse is not moving over something * contained within that mount point (for example on the background) the * top-level listeners for `onmousemove` won't be called. However, if you * register the `mousemove` on the document object, then it will of course * catch all `mousemove`s. This along with iOS quirks, justifies restricting * top-level listeners to the document object only, at least for these * movement types of events and possibly all events. * * @see http://www.quirksmode.org/blog/archives/2010/09/click_event_del.html * * Also, `keyup`/`keypress`/`keydown` do not bubble to the window on IE, but * they bubble to document. * * @param {string} registrationName Name of listener (e.g. `onClick`). * @param {object} contentDocumentHandle Document which owns the container */ listenTo: function (registrationName, contentDocumentHandle) { var mountAt = contentDocumentHandle; var isListening = getListeningForDocument(mountAt); var dependencies = EventPluginRegistry.registrationNameDependencies[registrationName]; for (var i = 0; i < dependencies.length; i++) { var dependency = dependencies[i]; if (!(isListening.hasOwnProperty(dependency) && isListening[dependency])) { if (dependency === 'topWheel') { if (isEventSupported('wheel')) { ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent('topWheel', 'wheel', mountAt); } else if (isEventSupported('mousewheel')) { ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent('topWheel', 'mousewheel', mountAt); } else { // Firefox needs to capture a different mouse scroll event. // @see http://www.quirksmode.org/dom/events/tests/scroll.html ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent('topWheel', 'DOMMouseScroll', mountAt); } } else if (dependency === 'topScroll') { if (isEventSupported('scroll', true)) { ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent('topScroll', 'scroll', mountAt); } else { ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent('topScroll', 'scroll', ReactBrowserEventEmitter.ReactEventListener.WINDOW_HANDLE); } } else if (dependency === 'topFocus' || dependency === 'topBlur') { if (isEventSupported('focus', true)) { ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent('topFocus', 'focus', mountAt); ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent('topBlur', 'blur', mountAt); } else if (isEventSupported('focusin')) { // IE has `focusin` and `focusout` events which bubble. // @see http://www.quirksmode.org/blog/archives/2008/04/delegating_the.html ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent('topFocus', 'focusin', mountAt); ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent('topBlur', 'focusout', mountAt); } // to make sure blur and focus event listeners are only attached once isListening.topBlur = true; isListening.topFocus = true; } else if (topEventMapping.hasOwnProperty(dependency)) { ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(dependency, topEventMapping[dependency], mountAt); } isListening[dependency] = true; } } }, trapBubbledEvent: function (topLevelType, handlerBaseName, handle) { return ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(topLevelType, handlerBaseName, handle); }, trapCapturedEvent: function (topLevelType, handlerBaseName, handle) { return ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(topLevelType, handlerBaseName, handle); }, /** * Protect against document.createEvent() returning null * Some popup blocker extensions appear to do this: * https://github.com/facebook/react/issues/6887 */ supportsEventPageXY: function () { if (!document.createEvent) { return false; } var ev = document.createEvent('MouseEvent'); return ev != null && 'pageX' in ev; }, /** * Listens to window scroll and resize events. We cache scroll values so that * application code can access them without triggering reflows. * * ViewportMetrics is only used by SyntheticMouse/TouchEvent and only when * pageX/pageY isn't supported (legacy browsers). * * NOTE: Scroll events do not bubble. * * @see http://www.quirksmode.org/dom/events/scroll.html */ ensureScrollValueMonitoring: function () { if (hasEventPageXY === undefined) { hasEventPageXY = ReactBrowserEventEmitter.supportsEventPageXY(); } if (!hasEventPageXY && !isMonitoringScrollValue) { var refresh = ViewportMetrics.refreshScrollValues; ReactBrowserEventEmitter.ReactEventListener.monitorScrollValue(refresh); isMonitoringScrollValue = true; } } }); module.exports = ReactBrowserEventEmitter; /***/ }, /* 98 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var EventPluginHub = __webpack_require__(39); function runEventQueueInBatch(events) { EventPluginHub.enqueueEvents(events); EventPluginHub.processEventQueue(false); } var ReactEventEmitterMixin = { /** * Streams a fired top-level event to `EventPluginHub` where plugins have the * opportunity to create `ReactEvent`s to be dispatched. */ handleTopLevel: function (topLevelType, targetInst, nativeEvent, nativeEventTarget) { var events = EventPluginHub.extractEvents(topLevelType, targetInst, nativeEvent, nativeEventTarget); runEventQueueInBatch(events); } }; module.exports = ReactEventEmitterMixin; /***/ }, /* 99 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ExecutionEnvironment = __webpack_require__(45); /** * Generate a mapping of standard vendor prefixes using the defined style property and event name. * * @param {string} styleProp * @param {string} eventName * @returns {object} */ function makePrefixMap(styleProp, eventName) { var prefixes = {}; prefixes[styleProp.toLowerCase()] = eventName.toLowerCase(); prefixes['Webkit' + styleProp] = 'webkit' + eventName; prefixes['Moz' + styleProp] = 'moz' + eventName; prefixes['ms' + styleProp] = 'MS' + eventName; prefixes['O' + styleProp] = 'o' + eventName.toLowerCase(); return prefixes; } /** * A list of event names to a configurable list of vendor prefixes. */ var vendorPrefixes = { animationend: makePrefixMap('Animation', 'AnimationEnd'), animationiteration: makePrefixMap('Animation', 'AnimationIteration'), animationstart: makePrefixMap('Animation', 'AnimationStart'), transitionend: makePrefixMap('Transition', 'TransitionEnd') }; /** * Event names that have already been detected and prefixed (if applicable). */ var prefixedEventNames = {}; /** * Element to check for prefixes on. */ var style = {}; /** * Bootstrap if a DOM exists. */ if (ExecutionEnvironment.canUseDOM) { style = document.createElement('div').style; // On some platforms, in particular some releases of Android 4.x, // the un-prefixed "animation" and "transition" properties are defined on the // style object but the events that fire will still be prefixed, so we need // to check if the un-prefixed events are usable, and if not remove them from the map. if (!('AnimationEvent' in window)) { delete vendorPrefixes.animationend.animation; delete vendorPrefixes.animationiteration.animation; delete vendorPrefixes.animationstart.animation; } // Same as above if (!('TransitionEvent' in window)) { delete vendorPrefixes.transitionend.transition; } } /** * Attempts to determine the correct vendor prefixed event name. * * @param {string} eventName * @returns {string} */ function getVendorPrefixedEventName(eventName) { if (prefixedEventNames[eventName]) { return prefixedEventNames[eventName]; } else if (!vendorPrefixes[eventName]) { return eventName; } var prefixMap = vendorPrefixes[eventName]; for (var styleProp in prefixMap) { if (prefixMap.hasOwnProperty(styleProp) && styleProp in style) { return prefixedEventNames[eventName] = prefixMap[styleProp]; } } return ''; } module.exports = getVendorPrefixedEventName; /***/ }, /* 100 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32), _assign = __webpack_require__(4); var DOMPropertyOperations = __webpack_require__(95); var LinkedValueUtils = __webpack_require__(101); var ReactDOMComponentTree = __webpack_require__(31); var ReactUpdates = __webpack_require__(53); var invariant = __webpack_require__(8); var warning = __webpack_require__(11); var didWarnValueLink = false; var didWarnCheckedLink = false; var didWarnValueDefaultValue = false; var didWarnCheckedDefaultChecked = false; var didWarnControlledToUncontrolled = false; var didWarnUncontrolledToControlled = false; function forceUpdateIfMounted() { if (this._rootNodeID) { // DOM component is still mounted; update ReactDOMInput.updateWrapper(this); } } function isControlled(props) { var usesChecked = props.type === 'checkbox' || props.type === 'radio'; return usesChecked ? props.checked != null : props.value != null; } /** * Implements an <input> host component that allows setting these optional * props: `checked`, `value`, `defaultChecked`, and `defaultValue`. * * If `checked` or `value` are not supplied (or null/undefined), user actions * that affect the checked state or value will trigger updates to the element. * * If they are supplied (and not null/undefined), the rendered element will not * trigger updates to the element. Instead, the props must change in order for * the rendered element to be updated. * * The rendered element will be initialized as unchecked (or `defaultChecked`) * with an empty value (or `defaultValue`). * * @see http://www.w3.org/TR/2012/WD-html5-20121025/the-input-element.html */ var ReactDOMInput = { getHostProps: function (inst, props) { var value = LinkedValueUtils.getValue(props); var checked = LinkedValueUtils.getChecked(props); var hostProps = _assign({ // Make sure we set .type before any other properties (setting .value // before .type means .value is lost in IE11 and below) type: undefined, // Make sure we set .step before .value (setting .value before .step // means .value is rounded on mount, based upon step precision) step: undefined, // Make sure we set .min & .max before .value (to ensure proper order // in corner cases such as min or max deriving from value, e.g. Issue #7170) min: undefined, max: undefined }, props, { defaultChecked: undefined, defaultValue: undefined, value: value != null ? value : inst._wrapperState.initialValue, checked: checked != null ? checked : inst._wrapperState.initialChecked, onChange: inst._wrapperState.onChange }); return hostProps; }, mountWrapper: function (inst, props) { if (false) { LinkedValueUtils.checkPropTypes('input', props, inst._currentElement._owner); var owner = inst._currentElement._owner; if (props.valueLink !== undefined && !didWarnValueLink) { process.env.NODE_ENV !== 'production' ? warning(false, '`valueLink` prop on `input` is deprecated; set `value` and `onChange` instead.') : void 0; didWarnValueLink = true; } if (props.checkedLink !== undefined && !didWarnCheckedLink) { process.env.NODE_ENV !== 'production' ? warning(false, '`checkedLink` prop on `input` is deprecated; set `value` and `onChange` instead.') : void 0; didWarnCheckedLink = true; } if (props.checked !== undefined && props.defaultChecked !== undefined && !didWarnCheckedDefaultChecked) { process.env.NODE_ENV !== 'production' ? warning(false, '%s contains an input of type %s with both checked and defaultChecked props. ' + 'Input elements must be either controlled or uncontrolled ' + '(specify either the checked prop, or the defaultChecked prop, but not ' + 'both). Decide between using a controlled or uncontrolled input ' + 'element and remove one of these props. More info: ' + 'https://fb.me/react-controlled-components', owner && owner.getName() || 'A component', props.type) : void 0; didWarnCheckedDefaultChecked = true; } if (props.value !== undefined && props.defaultValue !== undefined && !didWarnValueDefaultValue) { process.env.NODE_ENV !== 'production' ? warning(false, '%s contains an input of type %s with both value and defaultValue props. ' + 'Input elements must be either controlled or uncontrolled ' + '(specify either the value prop, or the defaultValue prop, but not ' + 'both). Decide between using a controlled or uncontrolled input ' + 'element and remove one of these props. More info: ' + 'https://fb.me/react-controlled-components', owner && owner.getName() || 'A component', props.type) : void 0; didWarnValueDefaultValue = true; } } var defaultValue = props.defaultValue; inst._wrapperState = { initialChecked: props.checked != null ? props.checked : props.defaultChecked, initialValue: props.value != null ? props.value : defaultValue, listeners: null, onChange: _handleChange.bind(inst) }; if (false) { inst._wrapperState.controlled = isControlled(props); } }, updateWrapper: function (inst) { var props = inst._currentElement.props; if (false) { var controlled = isControlled(props); var owner = inst._currentElement._owner; if (!inst._wrapperState.controlled && controlled && !didWarnUncontrolledToControlled) { process.env.NODE_ENV !== 'production' ? warning(false, '%s is changing an uncontrolled input of type %s to be controlled. ' + 'Input elements should not switch from uncontrolled to controlled (or vice versa). ' + 'Decide between using a controlled or uncontrolled input ' + 'element for the lifetime of the component. More info: https://fb.me/react-controlled-components', owner && owner.getName() || 'A component', props.type) : void 0; didWarnUncontrolledToControlled = true; } if (inst._wrapperState.controlled && !controlled && !didWarnControlledToUncontrolled) { process.env.NODE_ENV !== 'production' ? warning(false, '%s is changing a controlled input of type %s to be uncontrolled. ' + 'Input elements should not switch from controlled to uncontrolled (or vice versa). ' + 'Decide between using a controlled or uncontrolled input ' + 'element for the lifetime of the component. More info: https://fb.me/react-controlled-components', owner && owner.getName() || 'A component', props.type) : void 0; didWarnControlledToUncontrolled = true; } } // TODO: Shouldn't this be getChecked(props)? var checked = props.checked; if (checked != null) { DOMPropertyOperations.setValueForProperty(ReactDOMComponentTree.getNodeFromInstance(inst), 'checked', checked || false); } var node = ReactDOMComponentTree.getNodeFromInstance(inst); var value = LinkedValueUtils.getValue(props); if (value != null) { // Cast `value` to a string to ensure the value is set correctly. While // browsers typically do this as necessary, jsdom doesn't. var newValue = '' + value; // To avoid side effects (such as losing text selection), only set value if changed if (newValue !== node.value) { node.value = newValue; } } else { if (props.value == null && props.defaultValue != null) { // In Chrome, assigning defaultValue to certain input types triggers input validation. // For number inputs, the display value loses trailing decimal points. For email inputs, // Chrome raises "The specified value <x> is not a valid email address". // // Here we check to see if the defaultValue has actually changed, avoiding these problems // when the user is inputting text // // https://github.com/facebook/react/issues/7253 if (node.defaultValue !== '' + props.defaultValue) { node.defaultValue = '' + props.defaultValue; } } if (props.checked == null && props.defaultChecked != null) { node.defaultChecked = !!props.defaultChecked; } } }, postMountWrapper: function (inst) { var props = inst._currentElement.props; // This is in postMount because we need access to the DOM node, which is not // available until after the component has mounted. var node = ReactDOMComponentTree.getNodeFromInstance(inst); // Detach value from defaultValue. We won't do anything if we're working on // submit or reset inputs as those values & defaultValues are linked. They // are not resetable nodes so this operation doesn't matter and actually // removes browser-default values (eg "Submit Query") when no value is // provided. switch (props.type) { case 'submit': case 'reset': break; case 'color': case 'date': case 'datetime': case 'datetime-local': case 'month': case 'time': case 'week': // This fixes the no-show issue on iOS Safari and Android Chrome: // https://github.com/facebook/react/issues/7233 node.value = ''; node.value = node.defaultValue; break; default: node.value = node.value; break; } // Normally, we'd just do `node.checked = node.checked` upon initial mount, less this bug // this is needed to work around a chrome bug where setting defaultChecked // will sometimes influence the value of checked (even after detachment). // Reference: https://bugs.chromium.org/p/chromium/issues/detail?id=608416 // We need to temporarily unset name to avoid disrupting radio button groups. var name = node.name; if (name !== '') { node.name = ''; } node.defaultChecked = !node.defaultChecked; node.defaultChecked = !node.defaultChecked; if (name !== '') { node.name = name; } } }; function _handleChange(event) { var props = this._currentElement.props; var returnValue = LinkedValueUtils.executeOnChange(props, event); // Here we use asap to wait until all updates have propagated, which // is important when using controlled components within layers: // https://github.com/facebook/react/issues/1698 ReactUpdates.asap(forceUpdateIfMounted, this); var name = props.name; if (props.type === 'radio' && name != null) { var rootNode = ReactDOMComponentTree.getNodeFromInstance(this); var queryRoot = rootNode; while (queryRoot.parentNode) { queryRoot = queryRoot.parentNode; } // If `rootNode.form` was non-null, then we could try `form.elements`, // but that sometimes behaves strangely in IE8. We could also try using // `form.getElementsByName`, but that will only return direct children // and won't include inputs that use the HTML5 `form=` attribute. Since // the input might not even be in a form, let's just use the global // `querySelectorAll` to ensure we don't miss anything. var group = queryRoot.querySelectorAll('input[name=' + JSON.stringify('' + name) + '][type="radio"]'); for (var i = 0; i < group.length; i++) { var otherNode = group[i]; if (otherNode === rootNode || otherNode.form !== rootNode.form) { continue; } // This will throw if radio buttons rendered by different copies of React // and the same name are rendered into the same form (same as #1939). // That's probably okay; we don't support it just as we don't support // mixing React radio buttons with non-React ones. var otherInstance = ReactDOMComponentTree.getInstanceFromNode(otherNode); !otherInstance ? false ? invariant(false, 'ReactDOMInput: Mixing React and non-React radio inputs with the same `name` is not supported.') : _prodInvariant('90') : void 0; // If this is a controlled radio button group, forcing the input that // was previously checked to update will cause it to be come re-checked // as appropriate. ReactUpdates.asap(forceUpdateIfMounted, otherInstance); } } return returnValue; } module.exports = ReactDOMInput; /***/ }, /* 101 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var React = __webpack_require__(3); var ReactPropTypesSecret = __webpack_require__(102); var invariant = __webpack_require__(8); var warning = __webpack_require__(11); var hasReadOnlyValue = { 'button': true, 'checkbox': true, 'image': true, 'hidden': true, 'radio': true, 'reset': true, 'submit': true }; function _assertSingleLink(inputProps) { !(inputProps.checkedLink == null || inputProps.valueLink == null) ? false ? invariant(false, 'Cannot provide a checkedLink and a valueLink. If you want to use checkedLink, you probably don\'t want to use valueLink and vice versa.') : _prodInvariant('87') : void 0; } function _assertValueLink(inputProps) { _assertSingleLink(inputProps); !(inputProps.value == null && inputProps.onChange == null) ? false ? invariant(false, 'Cannot provide a valueLink and a value or onChange event. If you want to use value or onChange, you probably don\'t want to use valueLink.') : _prodInvariant('88') : void 0; } function _assertCheckedLink(inputProps) { _assertSingleLink(inputProps); !(inputProps.checked == null && inputProps.onChange == null) ? false ? invariant(false, 'Cannot provide a checkedLink and a checked property or onChange event. If you want to use checked or onChange, you probably don\'t want to use checkedLink') : _prodInvariant('89') : void 0; } var propTypes = { value: function (props, propName, componentName) { if (!props[propName] || hasReadOnlyValue[props.type] || props.onChange || props.readOnly || props.disabled) { return null; } return new Error('You provided a `value` prop to a form field without an ' + '`onChange` handler. This will render a read-only field. If ' + 'the field should be mutable use `defaultValue`. Otherwise, ' + 'set either `onChange` or `readOnly`.'); }, checked: function (props, propName, componentName) { if (!props[propName] || props.onChange || props.readOnly || props.disabled) { return null; } return new Error('You provided a `checked` prop to a form field without an ' + '`onChange` handler. This will render a read-only field. If ' + 'the field should be mutable use `defaultChecked`. Otherwise, ' + 'set either `onChange` or `readOnly`.'); }, onChange: React.PropTypes.func }; var loggedTypeFailures = {}; function getDeclarationErrorAddendum(owner) { if (owner) { var name = owner.getName(); if (name) { return ' Check the render method of `' + name + '`.'; } } return ''; } /** * Provide a linked `value` attribute for controlled forms. You should not use * this outside of the ReactDOM controlled form components. */ var LinkedValueUtils = { checkPropTypes: function (tagName, props, owner) { for (var propName in propTypes) { if (propTypes.hasOwnProperty(propName)) { var error = propTypes[propName](props, propName, tagName, 'prop', null, ReactPropTypesSecret); } if (error instanceof Error && !(error.message in loggedTypeFailures)) { // Only monitor this failure once because there tends to be a lot of the // same error. loggedTypeFailures[error.message] = true; var addendum = getDeclarationErrorAddendum(owner); false ? warning(false, 'Failed form propType: %s%s', error.message, addendum) : void 0; } } }, /** * @param {object} inputProps Props for form component * @return {*} current value of the input either from value prop or link. */ getValue: function (inputProps) { if (inputProps.valueLink) { _assertValueLink(inputProps); return inputProps.valueLink.value; } return inputProps.value; }, /** * @param {object} inputProps Props for form component * @return {*} current checked status of the input either from checked prop * or link. */ getChecked: function (inputProps) { if (inputProps.checkedLink) { _assertCheckedLink(inputProps); return inputProps.checkedLink.value; } return inputProps.checked; }, /** * @param {object} inputProps Props for form component * @param {SyntheticEvent} event change event to handle */ executeOnChange: function (inputProps, event) { if (inputProps.valueLink) { _assertValueLink(inputProps); return inputProps.valueLink.requestChange(event.target.value); } else if (inputProps.checkedLink) { _assertCheckedLink(inputProps); return inputProps.checkedLink.requestChange(event.target.checked); } else if (inputProps.onChange) { return inputProps.onChange.call(undefined, event); } } }; module.exports = LinkedValueUtils; /***/ }, /* 102 */ 26, /* 103 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var React = __webpack_require__(3); var ReactDOMComponentTree = __webpack_require__(31); var ReactDOMSelect = __webpack_require__(104); var warning = __webpack_require__(11); var didWarnInvalidOptionChildren = false; function flattenChildren(children) { var content = ''; // Flatten children and warn if they aren't strings or numbers; // invalid types are ignored. React.Children.forEach(children, function (child) { if (child == null) { return; } if (typeof child === 'string' || typeof child === 'number') { content += child; } else if (!didWarnInvalidOptionChildren) { didWarnInvalidOptionChildren = true; false ? warning(false, 'Only strings and numbers are supported as <option> children.') : void 0; } }); return content; } /** * Implements an <option> host component that warns when `selected` is set. */ var ReactDOMOption = { mountWrapper: function (inst, props, hostParent) { // TODO (yungsters): Remove support for `selected` in <option>. if (false) { process.env.NODE_ENV !== 'production' ? warning(props.selected == null, 'Use the `defaultValue` or `value` props on <select> instead of ' + 'setting `selected` on <option>.') : void 0; } // Look up whether this option is 'selected' var selectValue = null; if (hostParent != null) { var selectParent = hostParent; if (selectParent._tag === 'optgroup') { selectParent = selectParent._hostParent; } if (selectParent != null && selectParent._tag === 'select') { selectValue = ReactDOMSelect.getSelectValueContext(selectParent); } } // If the value is null (e.g., no specified value or after initial mount) // or missing (e.g., for <datalist>), we don't change props.selected var selected = null; if (selectValue != null) { var value; if (props.value != null) { value = props.value + ''; } else { value = flattenChildren(props.children); } selected = false; if (Array.isArray(selectValue)) { // multiple for (var i = 0; i < selectValue.length; i++) { if ('' + selectValue[i] === value) { selected = true; break; } } } else { selected = '' + selectValue === value; } } inst._wrapperState = { selected: selected }; }, postMountWrapper: function (inst) { // value="" should make a value attribute (#6219) var props = inst._currentElement.props; if (props.value != null) { var node = ReactDOMComponentTree.getNodeFromInstance(inst); node.setAttribute('value', props.value); } }, getHostProps: function (inst, props) { var hostProps = _assign({ selected: undefined, children: undefined }, props); // Read state only from initial mount because <select> updates value // manually; we need the initial state only for server rendering if (inst._wrapperState.selected != null) { hostProps.selected = inst._wrapperState.selected; } var content = flattenChildren(props.children); if (content) { hostProps.children = content; } return hostProps; } }; module.exports = ReactDOMOption; /***/ }, /* 104 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var LinkedValueUtils = __webpack_require__(101); var ReactDOMComponentTree = __webpack_require__(31); var ReactUpdates = __webpack_require__(53); var warning = __webpack_require__(11); var didWarnValueLink = false; var didWarnValueDefaultValue = false; function updateOptionsIfPendingUpdateAndMounted() { if (this._rootNodeID && this._wrapperState.pendingUpdate) { this._wrapperState.pendingUpdate = false; var props = this._currentElement.props; var value = LinkedValueUtils.getValue(props); if (value != null) { updateOptions(this, Boolean(props.multiple), value); } } } function getDeclarationErrorAddendum(owner) { if (owner) { var name = owner.getName(); if (name) { return ' Check the render method of `' + name + '`.'; } } return ''; } var valuePropNames = ['value', 'defaultValue']; /** * Validation function for `value` and `defaultValue`. * @private */ function checkSelectPropTypes(inst, props) { var owner = inst._currentElement._owner; LinkedValueUtils.checkPropTypes('select', props, owner); if (props.valueLink !== undefined && !didWarnValueLink) { false ? warning(false, '`valueLink` prop on `select` is deprecated; set `value` and `onChange` instead.') : void 0; didWarnValueLink = true; } for (var i = 0; i < valuePropNames.length; i++) { var propName = valuePropNames[i]; if (props[propName] == null) { continue; } var isArray = Array.isArray(props[propName]); if (props.multiple && !isArray) { false ? warning(false, 'The `%s` prop supplied to <select> must be an array if ' + '`multiple` is true.%s', propName, getDeclarationErrorAddendum(owner)) : void 0; } else if (!props.multiple && isArray) { false ? warning(false, 'The `%s` prop supplied to <select> must be a scalar ' + 'value if `multiple` is false.%s', propName, getDeclarationErrorAddendum(owner)) : void 0; } } } /** * @param {ReactDOMComponent} inst * @param {boolean} multiple * @param {*} propValue A stringable (with `multiple`, a list of stringables). * @private */ function updateOptions(inst, multiple, propValue) { var selectedValue, i; var options = ReactDOMComponentTree.getNodeFromInstance(inst).options; if (multiple) { selectedValue = {}; for (i = 0; i < propValue.length; i++) { selectedValue['' + propValue[i]] = true; } for (i = 0; i < options.length; i++) { var selected = selectedValue.hasOwnProperty(options[i].value); if (options[i].selected !== selected) { options[i].selected = selected; } } } else { // Do not set `select.value` as exact behavior isn't consistent across all // browsers for all cases. selectedValue = '' + propValue; for (i = 0; i < options.length; i++) { if (options[i].value === selectedValue) { options[i].selected = true; return; } } if (options.length) { options[0].selected = true; } } } /** * Implements a <select> host component that allows optionally setting the * props `value` and `defaultValue`. If `multiple` is false, the prop must be a * stringable. If `multiple` is true, the prop must be an array of stringables. * * If `value` is not supplied (or null/undefined), user actions that change the * selected option will trigger updates to the rendered options. * * If it is supplied (and not null/undefined), the rendered options will not * update in response to user actions. Instead, the `value` prop must change in * order for the rendered options to update. * * If `defaultValue` is provided, any options with the supplied values will be * selected. */ var ReactDOMSelect = { getHostProps: function (inst, props) { return _assign({}, props, { onChange: inst._wrapperState.onChange, value: undefined }); }, mountWrapper: function (inst, props) { if (false) { checkSelectPropTypes(inst, props); } var value = LinkedValueUtils.getValue(props); inst._wrapperState = { pendingUpdate: false, initialValue: value != null ? value : props.defaultValue, listeners: null, onChange: _handleChange.bind(inst), wasMultiple: Boolean(props.multiple) }; if (props.value !== undefined && props.defaultValue !== undefined && !didWarnValueDefaultValue) { false ? warning(false, 'Select elements must be either controlled or uncontrolled ' + '(specify either the value prop, or the defaultValue prop, but not ' + 'both). Decide between using a controlled or uncontrolled select ' + 'element and remove one of these props. More info: ' + 'https://fb.me/react-controlled-components') : void 0; didWarnValueDefaultValue = true; } }, getSelectValueContext: function (inst) { // ReactDOMOption looks at this initial value so the initial generated // markup has correct `selected` attributes return inst._wrapperState.initialValue; }, postUpdateWrapper: function (inst) { var props = inst._currentElement.props; // After the initial mount, we control selected-ness manually so don't pass // this value down inst._wrapperState.initialValue = undefined; var wasMultiple = inst._wrapperState.wasMultiple; inst._wrapperState.wasMultiple = Boolean(props.multiple); var value = LinkedValueUtils.getValue(props); if (value != null) { inst._wrapperState.pendingUpdate = false; updateOptions(inst, Boolean(props.multiple), value); } else if (wasMultiple !== Boolean(props.multiple)) { // For simplicity, reapply `defaultValue` if `multiple` is toggled. if (props.defaultValue != null) { updateOptions(inst, Boolean(props.multiple), props.defaultValue); } else { // Revert the select back to its default unselected state. updateOptions(inst, Boolean(props.multiple), props.multiple ? [] : ''); } } } }; function _handleChange(event) { var props = this._currentElement.props; var returnValue = LinkedValueUtils.executeOnChange(props, event); if (this._rootNodeID) { this._wrapperState.pendingUpdate = true; } ReactUpdates.asap(updateOptionsIfPendingUpdateAndMounted, this); return returnValue; } module.exports = ReactDOMSelect; /***/ }, /* 105 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32), _assign = __webpack_require__(4); var LinkedValueUtils = __webpack_require__(101); var ReactDOMComponentTree = __webpack_require__(31); var ReactUpdates = __webpack_require__(53); var invariant = __webpack_require__(8); var warning = __webpack_require__(11); var didWarnValueLink = false; var didWarnValDefaultVal = false; function forceUpdateIfMounted() { if (this._rootNodeID) { // DOM component is still mounted; update ReactDOMTextarea.updateWrapper(this); } } /** * Implements a <textarea> host component that allows setting `value`, and * `defaultValue`. This differs from the traditional DOM API because value is * usually set as PCDATA children. * * If `value` is not supplied (or null/undefined), user actions that affect the * value will trigger updates to the element. * * If `value` is supplied (and not null/undefined), the rendered element will * not trigger updates to the element. Instead, the `value` prop must change in * order for the rendered element to be updated. * * The rendered element will be initialized with an empty value, the prop * `defaultValue` if specified, or the children content (deprecated). */ var ReactDOMTextarea = { getHostProps: function (inst, props) { !(props.dangerouslySetInnerHTML == null) ? false ? invariant(false, '`dangerouslySetInnerHTML` does not make sense on <textarea>.') : _prodInvariant('91') : void 0; // Always set children to the same thing. In IE9, the selection range will // get reset if `textContent` is mutated. We could add a check in setTextContent // to only set the value if/when the value differs from the node value (which would // completely solve this IE9 bug), but Sebastian+Ben seemed to like this solution. // The value can be a boolean or object so that's why it's forced to be a string. var hostProps = _assign({}, props, { value: undefined, defaultValue: undefined, children: '' + inst._wrapperState.initialValue, onChange: inst._wrapperState.onChange }); return hostProps; }, mountWrapper: function (inst, props) { if (false) { LinkedValueUtils.checkPropTypes('textarea', props, inst._currentElement._owner); if (props.valueLink !== undefined && !didWarnValueLink) { process.env.NODE_ENV !== 'production' ? warning(false, '`valueLink` prop on `textarea` is deprecated; set `value` and `onChange` instead.') : void 0; didWarnValueLink = true; } if (props.value !== undefined && props.defaultValue !== undefined && !didWarnValDefaultVal) { process.env.NODE_ENV !== 'production' ? warning(false, 'Textarea elements must be either controlled or uncontrolled ' + '(specify either the value prop, or the defaultValue prop, but not ' + 'both). Decide between using a controlled or uncontrolled textarea ' + 'and remove one of these props. More info: ' + 'https://fb.me/react-controlled-components') : void 0; didWarnValDefaultVal = true; } } var value = LinkedValueUtils.getValue(props); var initialValue = value; // Only bother fetching default value if we're going to use it if (value == null) { var defaultValue = props.defaultValue; // TODO (yungsters): Remove support for children content in <textarea>. var children = props.children; if (children != null) { if (false) { process.env.NODE_ENV !== 'production' ? warning(false, 'Use the `defaultValue` or `value` props instead of setting ' + 'children on <textarea>.') : void 0; } !(defaultValue == null) ? false ? invariant(false, 'If you supply `defaultValue` on a <textarea>, do not pass children.') : _prodInvariant('92') : void 0; if (Array.isArray(children)) { !(children.length <= 1) ? false ? invariant(false, '<textarea> can only have at most one child.') : _prodInvariant('93') : void 0; children = children[0]; } defaultValue = '' + children; } if (defaultValue == null) { defaultValue = ''; } initialValue = defaultValue; } inst._wrapperState = { initialValue: '' + initialValue, listeners: null, onChange: _handleChange.bind(inst) }; }, updateWrapper: function (inst) { var props = inst._currentElement.props; var node = ReactDOMComponentTree.getNodeFromInstance(inst); var value = LinkedValueUtils.getValue(props); if (value != null) { // Cast `value` to a string to ensure the value is set correctly. While // browsers typically do this as necessary, jsdom doesn't. var newValue = '' + value; // To avoid side effects (such as losing text selection), only set value if changed if (newValue !== node.value) { node.value = newValue; } if (props.defaultValue == null) { node.defaultValue = newValue; } } if (props.defaultValue != null) { node.defaultValue = props.defaultValue; } }, postMountWrapper: function (inst) { // This is in postMount because we need access to the DOM node, which is not // available until after the component has mounted. var node = ReactDOMComponentTree.getNodeFromInstance(inst); var textContent = node.textContent; // Only set node.value if textContent is equal to the expected // initial value. In IE10/IE11 there is a bug where the placeholder attribute // will populate textContent as well. // https://developer.microsoft.com/microsoft-edge/platform/issues/101525/ if (textContent === inst._wrapperState.initialValue) { node.value = textContent; } } }; function _handleChange(event) { var props = this._currentElement.props; var returnValue = LinkedValueUtils.executeOnChange(props, event); ReactUpdates.asap(forceUpdateIfMounted, this); return returnValue; } module.exports = ReactDOMTextarea; /***/ }, /* 106 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var ReactComponentEnvironment = __webpack_require__(107); var ReactInstanceMap = __webpack_require__(108); var ReactInstrumentation = __webpack_require__(59); var ReactCurrentOwner = __webpack_require__(10); var ReactReconciler = __webpack_require__(56); var ReactChildReconciler = __webpack_require__(109); var emptyFunction = __webpack_require__(12); var flattenChildren = __webpack_require__(124); var invariant = __webpack_require__(8); /** * Make an update for markup to be rendered and inserted at a supplied index. * * @param {string} markup Markup that renders into an element. * @param {number} toIndex Destination index. * @private */ function makeInsertMarkup(markup, afterNode, toIndex) { // NOTE: Null values reduce hidden classes. return { type: 'INSERT_MARKUP', content: markup, fromIndex: null, fromNode: null, toIndex: toIndex, afterNode: afterNode }; } /** * Make an update for moving an existing element to another index. * * @param {number} fromIndex Source index of the existing element. * @param {number} toIndex Destination index of the element. * @private */ function makeMove(child, afterNode, toIndex) { // NOTE: Null values reduce hidden classes. return { type: 'MOVE_EXISTING', content: null, fromIndex: child._mountIndex, fromNode: ReactReconciler.getHostNode(child), toIndex: toIndex, afterNode: afterNode }; } /** * Make an update for removing an element at an index. * * @param {number} fromIndex Index of the element to remove. * @private */ function makeRemove(child, node) { // NOTE: Null values reduce hidden classes. return { type: 'REMOVE_NODE', content: null, fromIndex: child._mountIndex, fromNode: node, toIndex: null, afterNode: null }; } /** * Make an update for setting the markup of a node. * * @param {string} markup Markup that renders into an element. * @private */ function makeSetMarkup(markup) { // NOTE: Null values reduce hidden classes. return { type: 'SET_MARKUP', content: markup, fromIndex: null, fromNode: null, toIndex: null, afterNode: null }; } /** * Make an update for setting the text content. * * @param {string} textContent Text content to set. * @private */ function makeTextContent(textContent) { // NOTE: Null values reduce hidden classes. return { type: 'TEXT_CONTENT', content: textContent, fromIndex: null, fromNode: null, toIndex: null, afterNode: null }; } /** * Push an update, if any, onto the queue. Creates a new queue if none is * passed and always returns the queue. Mutative. */ function enqueue(queue, update) { if (update) { queue = queue || []; queue.push(update); } return queue; } /** * Processes any enqueued updates. * * @private */ function processQueue(inst, updateQueue) { ReactComponentEnvironment.processChildrenUpdates(inst, updateQueue); } var setChildrenForInstrumentation = emptyFunction; if (false) { var getDebugID = function (inst) { if (!inst._debugID) { // Check for ART-like instances. TODO: This is silly/gross. var internal; if (internal = ReactInstanceMap.get(inst)) { inst = internal; } } return inst._debugID; }; setChildrenForInstrumentation = function (children) { var debugID = getDebugID(this); // TODO: React Native empty components are also multichild. // This means they still get into this method but don't have _debugID. if (debugID !== 0) { ReactInstrumentation.debugTool.onSetChildren(debugID, children ? Object.keys(children).map(function (key) { return children[key]._debugID; }) : []); } }; } /** * ReactMultiChild are capable of reconciling multiple children. * * @class ReactMultiChild * @internal */ var ReactMultiChild = { /** * Provides common functionality for components that must reconcile multiple * children. This is used by `ReactDOMComponent` to mount, update, and * unmount child components. * * @lends {ReactMultiChild.prototype} */ Mixin: { _reconcilerInstantiateChildren: function (nestedChildren, transaction, context) { if (false) { var selfDebugID = getDebugID(this); if (this._currentElement) { try { ReactCurrentOwner.current = this._currentElement._owner; return ReactChildReconciler.instantiateChildren(nestedChildren, transaction, context, selfDebugID); } finally { ReactCurrentOwner.current = null; } } } return ReactChildReconciler.instantiateChildren(nestedChildren, transaction, context); }, _reconcilerUpdateChildren: function (prevChildren, nextNestedChildrenElements, mountImages, removedNodes, transaction, context) { var nextChildren; var selfDebugID = 0; if (false) { selfDebugID = getDebugID(this); if (this._currentElement) { try { ReactCurrentOwner.current = this._currentElement._owner; nextChildren = flattenChildren(nextNestedChildrenElements, selfDebugID); } finally { ReactCurrentOwner.current = null; } ReactChildReconciler.updateChildren(prevChildren, nextChildren, mountImages, removedNodes, transaction, this, this._hostContainerInfo, context, selfDebugID); return nextChildren; } } nextChildren = flattenChildren(nextNestedChildrenElements, selfDebugID); ReactChildReconciler.updateChildren(prevChildren, nextChildren, mountImages, removedNodes, transaction, this, this._hostContainerInfo, context, selfDebugID); return nextChildren; }, /** * Generates a "mount image" for each of the supplied children. In the case * of `ReactDOMComponent`, a mount image is a string of markup. * * @param {?object} nestedChildren Nested child maps. * @return {array} An array of mounted representations. * @internal */ mountChildren: function (nestedChildren, transaction, context) { var children = this._reconcilerInstantiateChildren(nestedChildren, transaction, context); this._renderedChildren = children; var mountImages = []; var index = 0; for (var name in children) { if (children.hasOwnProperty(name)) { var child = children[name]; var selfDebugID = 0; if (false) { selfDebugID = getDebugID(this); } var mountImage = ReactReconciler.mountComponent(child, transaction, this, this._hostContainerInfo, context, selfDebugID); child._mountIndex = index++; mountImages.push(mountImage); } } if (false) { setChildrenForInstrumentation.call(this, children); } return mountImages; }, /** * Replaces any rendered children with a text content string. * * @param {string} nextContent String of content. * @internal */ updateTextContent: function (nextContent) { var prevChildren = this._renderedChildren; // Remove any rendered children. ReactChildReconciler.unmountChildren(prevChildren, false); for (var name in prevChildren) { if (prevChildren.hasOwnProperty(name)) { true ? false ? invariant(false, 'updateTextContent called on non-empty component.') : _prodInvariant('118') : void 0; } } // Set new text content. var updates = [makeTextContent(nextContent)]; processQueue(this, updates); }, /** * Replaces any rendered children with a markup string. * * @param {string} nextMarkup String of markup. * @internal */ updateMarkup: function (nextMarkup) { var prevChildren = this._renderedChildren; // Remove any rendered children. ReactChildReconciler.unmountChildren(prevChildren, false); for (var name in prevChildren) { if (prevChildren.hasOwnProperty(name)) { true ? false ? invariant(false, 'updateTextContent called on non-empty component.') : _prodInvariant('118') : void 0; } } var updates = [makeSetMarkup(nextMarkup)]; processQueue(this, updates); }, /** * Updates the rendered children with new children. * * @param {?object} nextNestedChildrenElements Nested child element maps. * @param {ReactReconcileTransaction} transaction * @internal */ updateChildren: function (nextNestedChildrenElements, transaction, context) { // Hook used by React ART this._updateChildren(nextNestedChildrenElements, transaction, context); }, /** * @param {?object} nextNestedChildrenElements Nested child element maps. * @param {ReactReconcileTransaction} transaction * @final * @protected */ _updateChildren: function (nextNestedChildrenElements, transaction, context) { var prevChildren = this._renderedChildren; var removedNodes = {}; var mountImages = []; var nextChildren = this._reconcilerUpdateChildren(prevChildren, nextNestedChildrenElements, mountImages, removedNodes, transaction, context); if (!nextChildren && !prevChildren) { return; } var updates = null; var name; // `nextIndex` will increment for each child in `nextChildren`, but // `lastIndex` will be the last index visited in `prevChildren`. var nextIndex = 0; var lastIndex = 0; // `nextMountIndex` will increment for each newly mounted child. var nextMountIndex = 0; var lastPlacedNode = null; for (name in nextChildren) { if (!nextChildren.hasOwnProperty(name)) { continue; } var prevChild = prevChildren && prevChildren[name]; var nextChild = nextChildren[name]; if (prevChild === nextChild) { updates = enqueue(updates, this.moveChild(prevChild, lastPlacedNode, nextIndex, lastIndex)); lastIndex = Math.max(prevChild._mountIndex, lastIndex); prevChild._mountIndex = nextIndex; } else { if (prevChild) { // Update `lastIndex` before `_mountIndex` gets unset by unmounting. lastIndex = Math.max(prevChild._mountIndex, lastIndex); // The `removedNodes` loop below will actually remove the child. } // The child must be instantiated before it's mounted. updates = enqueue(updates, this._mountChildAtIndex(nextChild, mountImages[nextMountIndex], lastPlacedNode, nextIndex, transaction, context)); nextMountIndex++; } nextIndex++; lastPlacedNode = ReactReconciler.getHostNode(nextChild); } // Remove children that are no longer present. for (name in removedNodes) { if (removedNodes.hasOwnProperty(name)) { updates = enqueue(updates, this._unmountChild(prevChildren[name], removedNodes[name])); } } if (updates) { processQueue(this, updates); } this._renderedChildren = nextChildren; if (false) { setChildrenForInstrumentation.call(this, nextChildren); } }, /** * Unmounts all rendered children. This should be used to clean up children * when this component is unmounted. It does not actually perform any * backend operations. * * @internal */ unmountChildren: function (safely) { var renderedChildren = this._renderedChildren; ReactChildReconciler.unmountChildren(renderedChildren, safely); this._renderedChildren = null; }, /** * Moves a child component to the supplied index. * * @param {ReactComponent} child Component to move. * @param {number} toIndex Destination index of the element. * @param {number} lastIndex Last index visited of the siblings of `child`. * @protected */ moveChild: function (child, afterNode, toIndex, lastIndex) { // If the index of `child` is less than `lastIndex`, then it needs to // be moved. Otherwise, we do not need to move it because a child will be // inserted or moved before `child`. if (child._mountIndex < lastIndex) { return makeMove(child, afterNode, toIndex); } }, /** * Creates a child component. * * @param {ReactComponent} child Component to create. * @param {string} mountImage Markup to insert. * @protected */ createChild: function (child, afterNode, mountImage) { return makeInsertMarkup(mountImage, afterNode, child._mountIndex); }, /** * Removes a child component. * * @param {ReactComponent} child Child to remove. * @protected */ removeChild: function (child, node) { return makeRemove(child, node); }, /** * Mounts a child with the supplied name. * * NOTE: This is part of `updateChildren` and is here for readability. * * @param {ReactComponent} child Component to mount. * @param {string} name Name of the child. * @param {number} index Index at which to insert the child. * @param {ReactReconcileTransaction} transaction * @private */ _mountChildAtIndex: function (child, mountImage, afterNode, index, transaction, context) { child._mountIndex = index; return this.createChild(child, afterNode, mountImage); }, /** * Unmounts a rendered child. * * NOTE: This is part of `updateChildren` and is here for readability. * * @param {ReactComponent} child Component to unmount. * @private */ _unmountChild: function (child, node) { var update = this.removeChild(child, node); child._mountIndex = null; return update; } } }; module.exports = ReactMultiChild; /***/ }, /* 107 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var invariant = __webpack_require__(8); var injected = false; var ReactComponentEnvironment = { /** * Optionally injectable hook for swapping out mount images in the middle of * the tree. */ replaceNodeWithMarkup: null, /** * Optionally injectable hook for processing a queue of child updates. Will * later move into MultiChildComponents. */ processChildrenUpdates: null, injection: { injectEnvironment: function (environment) { !!injected ? false ? invariant(false, 'ReactCompositeComponent: injectEnvironment() can only be called once.') : _prodInvariant('104') : void 0; ReactComponentEnvironment.replaceNodeWithMarkup = environment.replaceNodeWithMarkup; ReactComponentEnvironment.processChildrenUpdates = environment.processChildrenUpdates; injected = true; } } }; module.exports = ReactComponentEnvironment; /***/ }, /* 108 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; /** * `ReactInstanceMap` maintains a mapping from a public facing stateful * instance (key) and the internal representation (value). This allows public * methods to accept the user facing instance as an argument and map them back * to internal methods. */ // TODO: Replace this with ES6: var ReactInstanceMap = new Map(); var ReactInstanceMap = { /** * This API should be called `delete` but we'd have to make sure to always * transform these to strings for IE support. When this transform is fully * supported we can rename it. */ remove: function (key) { key._reactInternalInstance = undefined; }, get: function (key) { return key._reactInternalInstance; }, has: function (key) { return key._reactInternalInstance !== undefined; }, set: function (key, value) { key._reactInternalInstance = value; } }; module.exports = ReactInstanceMap; /***/ }, /* 109 */ /***/ function(module, exports, __webpack_require__) { /* WEBPACK VAR INJECTION */(function(process) {/** * Copyright 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ReactReconciler = __webpack_require__(56); var instantiateReactComponent = __webpack_require__(111); var KeyEscapeUtils = __webpack_require__(119); var shouldUpdateReactComponent = __webpack_require__(115); var traverseAllChildren = __webpack_require__(120); var warning = __webpack_require__(11); var ReactComponentTreeHook; if (typeof process !== 'undefined' && process.env && ("production") === 'test') { // Temporary hack. // Inline requires don't work well with Jest: // https://github.com/facebook/react/issues/7240 // Remove the inline requires when we don't need them anymore: // https://github.com/facebook/react/pull/7178 ReactComponentTreeHook = __webpack_require__(123); } function instantiateChild(childInstances, child, name, selfDebugID) { // We found a component instance. var keyUnique = childInstances[name] === undefined; if (false) { if (!ReactComponentTreeHook) { ReactComponentTreeHook = require('react/lib/ReactComponentTreeHook'); } if (!keyUnique) { process.env.NODE_ENV !== 'production' ? warning(false, 'flattenChildren(...): Encountered two children with the same key, ' + '`%s`. Child keys must be unique; when two children share a key, only ' + 'the first child will be used.%s', KeyEscapeUtils.unescape(name), ReactComponentTreeHook.getStackAddendumByID(selfDebugID)) : void 0; } } if (child != null && keyUnique) { childInstances[name] = instantiateReactComponent(child, true); } } /** * ReactChildReconciler provides helpers for initializing or updating a set of * children. Its output is suitable for passing it onto ReactMultiChild which * does diffed reordering and insertion. */ var ReactChildReconciler = { /** * Generates a "mount image" for each of the supplied children. In the case * of `ReactDOMComponent`, a mount image is a string of markup. * * @param {?object} nestedChildNodes Nested child maps. * @return {?object} A set of child instances. * @internal */ instantiateChildren: function (nestedChildNodes, transaction, context, selfDebugID // 0 in production and for roots ) { if (nestedChildNodes == null) { return null; } var childInstances = {}; if (false) { traverseAllChildren(nestedChildNodes, function (childInsts, child, name) { return instantiateChild(childInsts, child, name, selfDebugID); }, childInstances); } else { traverseAllChildren(nestedChildNodes, instantiateChild, childInstances); } return childInstances; }, /** * Updates the rendered children and returns a new set of children. * * @param {?object} prevChildren Previously initialized set of children. * @param {?object} nextChildren Flat child element maps. * @param {ReactReconcileTransaction} transaction * @param {object} context * @return {?object} A new set of child instances. * @internal */ updateChildren: function (prevChildren, nextChildren, mountImages, removedNodes, transaction, hostParent, hostContainerInfo, context, selfDebugID // 0 in production and for roots ) { // We currently don't have a way to track moves here but if we use iterators // instead of for..in we can zip the iterators and check if an item has // moved. // TODO: If nothing has changed, return the prevChildren object so that we // can quickly bailout if nothing has changed. if (!nextChildren && !prevChildren) { return; } var name; var prevChild; for (name in nextChildren) { if (!nextChildren.hasOwnProperty(name)) { continue; } prevChild = prevChildren && prevChildren[name]; var prevElement = prevChild && prevChild._currentElement; var nextElement = nextChildren[name]; if (prevChild != null && shouldUpdateReactComponent(prevElement, nextElement)) { ReactReconciler.receiveComponent(prevChild, nextElement, transaction, context); nextChildren[name] = prevChild; } else { if (prevChild) { removedNodes[name] = ReactReconciler.getHostNode(prevChild); ReactReconciler.unmountComponent(prevChild, false); } // The child must be instantiated before it's mounted. var nextChildInstance = instantiateReactComponent(nextElement, true); nextChildren[name] = nextChildInstance; // Creating mount image now ensures refs are resolved in right order // (see https://github.com/facebook/react/pull/7101 for explanation). var nextChildMountImage = ReactReconciler.mountComponent(nextChildInstance, transaction, hostParent, hostContainerInfo, context, selfDebugID); mountImages.push(nextChildMountImage); } } // Unmount children that are no longer present. for (name in prevChildren) { if (prevChildren.hasOwnProperty(name) && !(nextChildren && nextChildren.hasOwnProperty(name))) { prevChild = prevChildren[name]; removedNodes[name] = ReactReconciler.getHostNode(prevChild); ReactReconciler.unmountComponent(prevChild, false); } } }, /** * Unmounts all rendered children. This should be used to clean up children * when this component is unmounted. * * @param {?object} renderedChildren Previously initialized set of children. * @internal */ unmountChildren: function (renderedChildren, safely) { for (var name in renderedChildren) { if (renderedChildren.hasOwnProperty(name)) { var renderedChild = renderedChildren[name]; ReactReconciler.unmountComponent(renderedChild, safely); } } } }; module.exports = ReactChildReconciler; /* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(110))) /***/ }, /* 110 */ /***/ function(module, exports) { // shim for using process in browser var process = module.exports = {}; // cached from whatever global is present so that test runners that stub it // don't break things. But we need to wrap it in a try catch in case it is // wrapped in strict mode code which doesn't define any globals. It's inside a // function because try/catches deoptimize in certain engines. var cachedSetTimeout; var cachedClearTimeout; function defaultSetTimout() { throw new Error('setTimeout has not been defined'); } function defaultClearTimeout () { throw new Error('clearTimeout has not been defined'); } (function () { try { if (typeof setTimeout === 'function') { cachedSetTimeout = setTimeout; } else { cachedSetTimeout = defaultSetTimout; } } catch (e) { cachedSetTimeout = defaultSetTimout; } try { if (typeof clearTimeout === 'function') { cachedClearTimeout = clearTimeout; } else { cachedClearTimeout = defaultClearTimeout; } } catch (e) { cachedClearTimeout = defaultClearTimeout; } } ()) function runTimeout(fun) { if (cachedSetTimeout === setTimeout) { //normal enviroments in sane situations return setTimeout(fun, 0); } // if setTimeout wasn't available but was latter defined if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { cachedSetTimeout = setTimeout; return setTimeout(fun, 0); } try { // when when somebody has screwed with setTimeout but no I.E. maddness return cachedSetTimeout(fun, 0); } catch(e){ try { // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally return cachedSetTimeout.call(null, fun, 0); } catch(e){ // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error return cachedSetTimeout.call(this, fun, 0); } } } function runClearTimeout(marker) { if (cachedClearTimeout === clearTimeout) { //normal enviroments in sane situations return clearTimeout(marker); } // if clearTimeout wasn't available but was latter defined if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { cachedClearTimeout = clearTimeout; return clearTimeout(marker); } try { // when when somebody has screwed with setTimeout but no I.E. maddness return cachedClearTimeout(marker); } catch (e){ try { // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally return cachedClearTimeout.call(null, marker); } catch (e){ // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. // Some versions of I.E. have different rules for clearTimeout vs setTimeout return cachedClearTimeout.call(this, marker); } } } var queue = []; var draining = false; var currentQueue; var queueIndex = -1; function cleanUpNextTick() { if (!draining || !currentQueue) { return; } draining = false; if (currentQueue.length) { queue = currentQueue.concat(queue); } else { queueIndex = -1; } if (queue.length) { drainQueue(); } } function drainQueue() { if (draining) { return; } var timeout = runTimeout(cleanUpNextTick); draining = true; var len = queue.length; while(len) { currentQueue = queue; queue = []; while (++queueIndex < len) { if (currentQueue) { currentQueue[queueIndex].run(); } } queueIndex = -1; len = queue.length; } currentQueue = null; draining = false; runClearTimeout(timeout); } process.nextTick = function (fun) { var args = new Array(arguments.length - 1); if (arguments.length > 1) { for (var i = 1; i < arguments.length; i++) { args[i - 1] = arguments[i]; } } queue.push(new Item(fun, args)); if (queue.length === 1 && !draining) { runTimeout(drainQueue); } }; // v8 likes predictible objects function Item(fun, array) { this.fun = fun; this.array = array; } Item.prototype.run = function () { this.fun.apply(null, this.array); }; process.title = 'browser'; process.browser = true; process.env = {}; process.argv = []; process.version = ''; // empty string to avoid regexp issues process.versions = {}; function noop() {} process.on = noop; process.addListener = noop; process.once = noop; process.off = noop; process.removeListener = noop; process.removeAllListeners = noop; process.emit = noop; process.binding = function (name) { throw new Error('process.binding is not supported'); }; process.cwd = function () { return '/' }; process.chdir = function (dir) { throw new Error('process.chdir is not supported'); }; process.umask = function() { return 0; }; /***/ }, /* 111 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32), _assign = __webpack_require__(4); var ReactCompositeComponent = __webpack_require__(112); var ReactEmptyComponent = __webpack_require__(116); var ReactHostComponent = __webpack_require__(117); var getNextDebugID = __webpack_require__(118); var invariant = __webpack_require__(8); var warning = __webpack_require__(11); // To avoid a cyclic dependency, we create the final class in this module var ReactCompositeComponentWrapper = function (element) { this.construct(element); }; _assign(ReactCompositeComponentWrapper.prototype, ReactCompositeComponent, { _instantiateReactComponent: instantiateReactComponent }); function getDeclarationErrorAddendum(owner) { if (owner) { var name = owner.getName(); if (name) { return ' Check the render method of `' + name + '`.'; } } return ''; } /** * Check if the type reference is a known internal type. I.e. not a user * provided composite type. * * @param {function} type * @return {boolean} Returns true if this is a valid internal type. */ function isInternalComponentType(type) { return typeof type === 'function' && typeof type.prototype !== 'undefined' && typeof type.prototype.mountComponent === 'function' && typeof type.prototype.receiveComponent === 'function'; } /** * Given a ReactNode, create an instance that will actually be mounted. * * @param {ReactNode} node * @param {boolean} shouldHaveDebugID * @return {object} A new instance of the element's constructor. * @protected */ function instantiateReactComponent(node, shouldHaveDebugID) { var instance; if (node === null || node === false) { instance = ReactEmptyComponent.create(instantiateReactComponent); } else if (typeof node === 'object') { var element = node; var type = element.type; if (typeof type !== 'function' && typeof type !== 'string') { var info = ''; if (false) { if (type === undefined || typeof type === 'object' && type !== null && Object.keys(type).length === 0) { info += ' You likely forgot to export your component from the file ' + 'it\'s defined in.'; } } info += getDeclarationErrorAddendum(element._owner); true ? false ? invariant(false, 'Element type is invalid: expected a string (for built-in components) or a class/function (for composite components) but got: %s.%s', type == null ? type : typeof type, info) : _prodInvariant('130', type == null ? type : typeof type, info) : void 0; } // Special case string values if (typeof element.type === 'string') { instance = ReactHostComponent.createInternalComponent(element); } else if (isInternalComponentType(element.type)) { // This is temporarily available for custom components that are not string // representations. I.e. ART. Once those are updated to use the string // representation, we can drop this code path. instance = new element.type(element); // We renamed this. Allow the old name for compat. :( if (!instance.getHostNode) { instance.getHostNode = instance.getNativeNode; } } else { instance = new ReactCompositeComponentWrapper(element); } } else if (typeof node === 'string' || typeof node === 'number') { instance = ReactHostComponent.createInstanceForText(node); } else { true ? false ? invariant(false, 'Encountered invalid React node of type %s', typeof node) : _prodInvariant('131', typeof node) : void 0; } if (false) { process.env.NODE_ENV !== 'production' ? warning(typeof instance.mountComponent === 'function' && typeof instance.receiveComponent === 'function' && typeof instance.getHostNode === 'function' && typeof instance.unmountComponent === 'function', 'Only React Components can be mounted.') : void 0; } // These two fields are used by the DOM and ART diffing algorithms // respectively. Instead of using expandos on components, we should be // storing the state needed by the diffing algorithms elsewhere. instance._mountIndex = 0; instance._mountImage = null; if (false) { instance._debugID = shouldHaveDebugID ? getNextDebugID() : 0; } // Internal instances should fully constructed at this point, so they should // not get any new fields added to them at this point. if (false) { if (Object.preventExtensions) { Object.preventExtensions(instance); } } return instance; } module.exports = instantiateReactComponent; /***/ }, /* 112 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32), _assign = __webpack_require__(4); var React = __webpack_require__(3); var ReactComponentEnvironment = __webpack_require__(107); var ReactCurrentOwner = __webpack_require__(10); var ReactErrorUtils = __webpack_require__(42); var ReactInstanceMap = __webpack_require__(108); var ReactInstrumentation = __webpack_require__(59); var ReactNodeTypes = __webpack_require__(113); var ReactReconciler = __webpack_require__(56); if (false) { var checkReactTypeSpec = require('./checkReactTypeSpec'); } var emptyObject = __webpack_require__(20); var invariant = __webpack_require__(8); var shallowEqual = __webpack_require__(114); var shouldUpdateReactComponent = __webpack_require__(115); var warning = __webpack_require__(11); var CompositeTypes = { ImpureClass: 0, PureClass: 1, StatelessFunctional: 2 }; function StatelessComponent(Component) {} StatelessComponent.prototype.render = function () { var Component = ReactInstanceMap.get(this)._currentElement.type; var element = Component(this.props, this.context, this.updater); warnIfInvalidElement(Component, element); return element; }; function warnIfInvalidElement(Component, element) { if (false) { process.env.NODE_ENV !== 'production' ? warning(element === null || element === false || React.isValidElement(element), '%s(...): A valid React element (or null) must be returned. You may have ' + 'returned undefined, an array or some other invalid object.', Component.displayName || Component.name || 'Component') : void 0; process.env.NODE_ENV !== 'production' ? warning(!Component.childContextTypes, '%s(...): childContextTypes cannot be defined on a functional component.', Component.displayName || Component.name || 'Component') : void 0; } } function shouldConstruct(Component) { return !!(Component.prototype && Component.prototype.isReactComponent); } function isPureComponent(Component) { return !!(Component.prototype && Component.prototype.isPureReactComponent); } // Separated into a function to contain deoptimizations caused by try/finally. function measureLifeCyclePerf(fn, debugID, timerType) { if (debugID === 0) { // Top-level wrappers (see ReactMount) and empty components (see // ReactDOMEmptyComponent) are invisible to hooks and devtools. // Both are implementation details that should go away in the future. return fn(); } ReactInstrumentation.debugTool.onBeginLifeCycleTimer(debugID, timerType); try { return fn(); } finally { ReactInstrumentation.debugTool.onEndLifeCycleTimer(debugID, timerType); } } /** * ------------------ The Life-Cycle of a Composite Component ------------------ * * - constructor: Initialization of state. The instance is now retained. * - componentWillMount * - render * - [children's constructors] * - [children's componentWillMount and render] * - [children's componentDidMount] * - componentDidMount * * Update Phases: * - componentWillReceiveProps (only called if parent updated) * - shouldComponentUpdate * - componentWillUpdate * - render * - [children's constructors or receive props phases] * - componentDidUpdate * * - componentWillUnmount * - [children's componentWillUnmount] * - [children destroyed] * - (destroyed): The instance is now blank, released by React and ready for GC. * * ----------------------------------------------------------------------------- */ /** * An incrementing ID assigned to each component when it is mounted. This is * used to enforce the order in which `ReactUpdates` updates dirty components. * * @private */ var nextMountID = 1; /** * @lends {ReactCompositeComponent.prototype} */ var ReactCompositeComponent = { /** * Base constructor for all composite component. * * @param {ReactElement} element * @final * @internal */ construct: function (element) { this._currentElement = element; this._rootNodeID = 0; this._compositeType = null; this._instance = null; this._hostParent = null; this._hostContainerInfo = null; // See ReactUpdateQueue this._updateBatchNumber = null; this._pendingElement = null; this._pendingStateQueue = null; this._pendingReplaceState = false; this._pendingForceUpdate = false; this._renderedNodeType = null; this._renderedComponent = null; this._context = null; this._mountOrder = 0; this._topLevelWrapper = null; // See ReactUpdates and ReactUpdateQueue. this._pendingCallbacks = null; // ComponentWillUnmount shall only be called once this._calledComponentWillUnmount = false; if (false) { this._warnedAboutRefsInRender = false; } }, /** * Initializes the component, renders markup, and registers event listeners. * * @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction * @param {?object} hostParent * @param {?object} hostContainerInfo * @param {?object} context * @return {?string} Rendered markup to be inserted into the DOM. * @final * @internal */ mountComponent: function (transaction, hostParent, hostContainerInfo, context) { var _this = this; this._context = context; this._mountOrder = nextMountID++; this._hostParent = hostParent; this._hostContainerInfo = hostContainerInfo; var publicProps = this._currentElement.props; var publicContext = this._processContext(context); var Component = this._currentElement.type; var updateQueue = transaction.getUpdateQueue(); // Initialize the public class var doConstruct = shouldConstruct(Component); var inst = this._constructComponent(doConstruct, publicProps, publicContext, updateQueue); var renderedElement; // Support functional components if (!doConstruct && (inst == null || inst.render == null)) { renderedElement = inst; warnIfInvalidElement(Component, renderedElement); !(inst === null || inst === false || React.isValidElement(inst)) ? false ? invariant(false, '%s(...): A valid React element (or null) must be returned. You may have returned undefined, an array or some other invalid object.', Component.displayName || Component.name || 'Component') : _prodInvariant('105', Component.displayName || Component.name || 'Component') : void 0; inst = new StatelessComponent(Component); this._compositeType = CompositeTypes.StatelessFunctional; } else { if (isPureComponent(Component)) { this._compositeType = CompositeTypes.PureClass; } else { this._compositeType = CompositeTypes.ImpureClass; } } if (false) { // This will throw later in _renderValidatedComponent, but add an early // warning now to help debugging if (inst.render == null) { process.env.NODE_ENV !== 'production' ? warning(false, '%s(...): No `render` method found on the returned component ' + 'instance: you may have forgotten to define `render`.', Component.displayName || Component.name || 'Component') : void 0; } var propsMutated = inst.props !== publicProps; var componentName = Component.displayName || Component.name || 'Component'; process.env.NODE_ENV !== 'production' ? warning(inst.props === undefined || !propsMutated, '%s(...): When calling super() in `%s`, make sure to pass ' + 'up the same props that your component\'s constructor was passed.', componentName, componentName) : void 0; } // These should be set up in the constructor, but as a convenience for // simpler class abstractions, we set them up after the fact. inst.props = publicProps; inst.context = publicContext; inst.refs = emptyObject; inst.updater = updateQueue; this._instance = inst; // Store a reference from the instance back to the internal representation ReactInstanceMap.set(inst, this); if (false) { // Since plain JS classes are defined without any special initialization // logic, we can not catch common errors early. Therefore, we have to // catch them here, at initialization time, instead. process.env.NODE_ENV !== 'production' ? warning(!inst.getInitialState || inst.getInitialState.isReactClassApproved || inst.state, 'getInitialState was defined on %s, a plain JavaScript class. ' + 'This is only supported for classes created using React.createClass. ' + 'Did you mean to define a state property instead?', this.getName() || 'a component') : void 0; process.env.NODE_ENV !== 'production' ? warning(!inst.getDefaultProps || inst.getDefaultProps.isReactClassApproved, 'getDefaultProps was defined on %s, a plain JavaScript class. ' + 'This is only supported for classes created using React.createClass. ' + 'Use a static property to define defaultProps instead.', this.getName() || 'a component') : void 0; process.env.NODE_ENV !== 'production' ? warning(!inst.propTypes, 'propTypes was defined as an instance property on %s. Use a static ' + 'property to define propTypes instead.', this.getName() || 'a component') : void 0; process.env.NODE_ENV !== 'production' ? warning(!inst.contextTypes, 'contextTypes was defined as an instance property on %s. Use a ' + 'static property to define contextTypes instead.', this.getName() || 'a component') : void 0; process.env.NODE_ENV !== 'production' ? warning(typeof inst.componentShouldUpdate !== 'function', '%s has a method called ' + 'componentShouldUpdate(). Did you mean shouldComponentUpdate()? ' + 'The name is phrased as a question because the function is ' + 'expected to return a value.', this.getName() || 'A component') : void 0; process.env.NODE_ENV !== 'production' ? warning(typeof inst.componentDidUnmount !== 'function', '%s has a method called ' + 'componentDidUnmount(). But there is no such lifecycle method. ' + 'Did you mean componentWillUnmount()?', this.getName() || 'A component') : void 0; process.env.NODE_ENV !== 'production' ? warning(typeof inst.componentWillRecieveProps !== 'function', '%s has a method called ' + 'componentWillRecieveProps(). Did you mean componentWillReceiveProps()?', this.getName() || 'A component') : void 0; } var initialState = inst.state; if (initialState === undefined) { inst.state = initialState = null; } !(typeof initialState === 'object' && !Array.isArray(initialState)) ? false ? invariant(false, '%s.state: must be set to an object or null', this.getName() || 'ReactCompositeComponent') : _prodInvariant('106', this.getName() || 'ReactCompositeComponent') : void 0; this._pendingStateQueue = null; this._pendingReplaceState = false; this._pendingForceUpdate = false; var markup; if (inst.unstable_handleError) { markup = this.performInitialMountWithErrorHandling(renderedElement, hostParent, hostContainerInfo, transaction, context); } else { markup = this.performInitialMount(renderedElement, hostParent, hostContainerInfo, transaction, context); } if (inst.componentDidMount) { if (false) { transaction.getReactMountReady().enqueue(function () { measureLifeCyclePerf(function () { return inst.componentDidMount(); }, _this._debugID, 'componentDidMount'); }); } else { transaction.getReactMountReady().enqueue(inst.componentDidMount, inst); } } return markup; }, _constructComponent: function (doConstruct, publicProps, publicContext, updateQueue) { if (false) { ReactCurrentOwner.current = this; try { return this._constructComponentWithoutOwner(doConstruct, publicProps, publicContext, updateQueue); } finally { ReactCurrentOwner.current = null; } } else { return this._constructComponentWithoutOwner(doConstruct, publicProps, publicContext, updateQueue); } }, _constructComponentWithoutOwner: function (doConstruct, publicProps, publicContext, updateQueue) { var Component = this._currentElement.type; if (doConstruct) { if (false) { return measureLifeCyclePerf(function () { return new Component(publicProps, publicContext, updateQueue); }, this._debugID, 'ctor'); } else { return new Component(publicProps, publicContext, updateQueue); } } // This can still be an instance in case of factory components // but we'll count this as time spent rendering as the more common case. if (false) { return measureLifeCyclePerf(function () { return Component(publicProps, publicContext, updateQueue); }, this._debugID, 'render'); } else { return Component(publicProps, publicContext, updateQueue); } }, performInitialMountWithErrorHandling: function (renderedElement, hostParent, hostContainerInfo, transaction, context) { var markup; var checkpoint = transaction.checkpoint(); try { markup = this.performInitialMount(renderedElement, hostParent, hostContainerInfo, transaction, context); } catch (e) { // Roll back to checkpoint, handle error (which may add items to the transaction), and take a new checkpoint transaction.rollback(checkpoint); this._instance.unstable_handleError(e); if (this._pendingStateQueue) { this._instance.state = this._processPendingState(this._instance.props, this._instance.context); } checkpoint = transaction.checkpoint(); this._renderedComponent.unmountComponent(true); transaction.rollback(checkpoint); // Try again - we've informed the component about the error, so they can render an error message this time. // If this throws again, the error will bubble up (and can be caught by a higher error boundary). markup = this.performInitialMount(renderedElement, hostParent, hostContainerInfo, transaction, context); } return markup; }, performInitialMount: function (renderedElement, hostParent, hostContainerInfo, transaction, context) { var inst = this._instance; var debugID = 0; if (false) { debugID = this._debugID; } if (inst.componentWillMount) { if (false) { measureLifeCyclePerf(function () { return inst.componentWillMount(); }, debugID, 'componentWillMount'); } else { inst.componentWillMount(); } // When mounting, calls to `setState` by `componentWillMount` will set // `this._pendingStateQueue` without triggering a re-render. if (this._pendingStateQueue) { inst.state = this._processPendingState(inst.props, inst.context); } } // If not a stateless component, we now render if (renderedElement === undefined) { renderedElement = this._renderValidatedComponent(); } var nodeType = ReactNodeTypes.getType(renderedElement); this._renderedNodeType = nodeType; var child = this._instantiateReactComponent(renderedElement, nodeType !== ReactNodeTypes.EMPTY /* shouldHaveDebugID */ ); this._renderedComponent = child; var markup = ReactReconciler.mountComponent(child, transaction, hostParent, hostContainerInfo, this._processChildContext(context), debugID); if (false) { if (debugID !== 0) { var childDebugIDs = child._debugID !== 0 ? [child._debugID] : []; ReactInstrumentation.debugTool.onSetChildren(debugID, childDebugIDs); } } return markup; }, getHostNode: function () { return ReactReconciler.getHostNode(this._renderedComponent); }, /** * Releases any resources allocated by `mountComponent`. * * @final * @internal */ unmountComponent: function (safely) { if (!this._renderedComponent) { return; } var inst = this._instance; if (inst.componentWillUnmount && !inst._calledComponentWillUnmount) { inst._calledComponentWillUnmount = true; if (safely) { var name = this.getName() + '.componentWillUnmount()'; ReactErrorUtils.invokeGuardedCallback(name, inst.componentWillUnmount.bind(inst)); } else { if (false) { measureLifeCyclePerf(function () { return inst.componentWillUnmount(); }, this._debugID, 'componentWillUnmount'); } else { inst.componentWillUnmount(); } } } if (this._renderedComponent) { ReactReconciler.unmountComponent(this._renderedComponent, safely); this._renderedNodeType = null; this._renderedComponent = null; this._instance = null; } // Reset pending fields // Even if this component is scheduled for another update in ReactUpdates, // it would still be ignored because these fields are reset. this._pendingStateQueue = null; this._pendingReplaceState = false; this._pendingForceUpdate = false; this._pendingCallbacks = null; this._pendingElement = null; // These fields do not really need to be reset since this object is no // longer accessible. this._context = null; this._rootNodeID = 0; this._topLevelWrapper = null; // Delete the reference from the instance to this internal representation // which allow the internals to be properly cleaned up even if the user // leaks a reference to the public instance. ReactInstanceMap.remove(inst); // Some existing components rely on inst.props even after they've been // destroyed (in event handlers). // TODO: inst.props = null; // TODO: inst.state = null; // TODO: inst.context = null; }, /** * Filters the context object to only contain keys specified in * `contextTypes` * * @param {object} context * @return {?object} * @private */ _maskContext: function (context) { var Component = this._currentElement.type; var contextTypes = Component.contextTypes; if (!contextTypes) { return emptyObject; } var maskedContext = {}; for (var contextName in contextTypes) { maskedContext[contextName] = context[contextName]; } return maskedContext; }, /** * Filters the context object to only contain keys specified in * `contextTypes`, and asserts that they are valid. * * @param {object} context * @return {?object} * @private */ _processContext: function (context) { var maskedContext = this._maskContext(context); if (false) { var Component = this._currentElement.type; if (Component.contextTypes) { this._checkContextTypes(Component.contextTypes, maskedContext, 'context'); } } return maskedContext; }, /** * @param {object} currentContext * @return {object} * @private */ _processChildContext: function (currentContext) { var Component = this._currentElement.type; var inst = this._instance; var childContext; if (inst.getChildContext) { if (false) { ReactInstrumentation.debugTool.onBeginProcessingChildContext(); try { childContext = inst.getChildContext(); } finally { ReactInstrumentation.debugTool.onEndProcessingChildContext(); } } else { childContext = inst.getChildContext(); } } if (childContext) { !(typeof Component.childContextTypes === 'object') ? false ? invariant(false, '%s.getChildContext(): childContextTypes must be defined in order to use getChildContext().', this.getName() || 'ReactCompositeComponent') : _prodInvariant('107', this.getName() || 'ReactCompositeComponent') : void 0; if (false) { this._checkContextTypes(Component.childContextTypes, childContext, 'childContext'); } for (var name in childContext) { !(name in Component.childContextTypes) ? false ? invariant(false, '%s.getChildContext(): key "%s" is not defined in childContextTypes.', this.getName() || 'ReactCompositeComponent', name) : _prodInvariant('108', this.getName() || 'ReactCompositeComponent', name) : void 0; } return _assign({}, currentContext, childContext); } return currentContext; }, /** * Assert that the context types are valid * * @param {object} typeSpecs Map of context field to a ReactPropType * @param {object} values Runtime values that need to be type-checked * @param {string} location e.g. "prop", "context", "child context" * @private */ _checkContextTypes: function (typeSpecs, values, location) { if (false) { checkReactTypeSpec(typeSpecs, values, location, this.getName(), null, this._debugID); } }, receiveComponent: function (nextElement, transaction, nextContext) { var prevElement = this._currentElement; var prevContext = this._context; this._pendingElement = null; this.updateComponent(transaction, prevElement, nextElement, prevContext, nextContext); }, /** * If any of `_pendingElement`, `_pendingStateQueue`, or `_pendingForceUpdate` * is set, update the component. * * @param {ReactReconcileTransaction} transaction * @internal */ performUpdateIfNecessary: function (transaction) { if (this._pendingElement != null) { ReactReconciler.receiveComponent(this, this._pendingElement, transaction, this._context); } else if (this._pendingStateQueue !== null || this._pendingForceUpdate) { this.updateComponent(transaction, this._currentElement, this._currentElement, this._context, this._context); } else { this._updateBatchNumber = null; } }, /** * Perform an update to a mounted component. The componentWillReceiveProps and * shouldComponentUpdate methods are called, then (assuming the update isn't * skipped) the remaining update lifecycle methods are called and the DOM * representation is updated. * * By default, this implements React's rendering and reconciliation algorithm. * Sophisticated clients may wish to override this. * * @param {ReactReconcileTransaction} transaction * @param {ReactElement} prevParentElement * @param {ReactElement} nextParentElement * @internal * @overridable */ updateComponent: function (transaction, prevParentElement, nextParentElement, prevUnmaskedContext, nextUnmaskedContext) { var inst = this._instance; !(inst != null) ? false ? invariant(false, 'Attempted to update component `%s` that has already been unmounted (or failed to mount).', this.getName() || 'ReactCompositeComponent') : _prodInvariant('136', this.getName() || 'ReactCompositeComponent') : void 0; var willReceive = false; var nextContext; // Determine if the context has changed or not if (this._context === nextUnmaskedContext) { nextContext = inst.context; } else { nextContext = this._processContext(nextUnmaskedContext); willReceive = true; } var prevProps = prevParentElement.props; var nextProps = nextParentElement.props; // Not a simple state update but a props update if (prevParentElement !== nextParentElement) { willReceive = true; } // An update here will schedule an update but immediately set // _pendingStateQueue which will ensure that any state updates gets // immediately reconciled instead of waiting for the next batch. if (willReceive && inst.componentWillReceiveProps) { if (false) { measureLifeCyclePerf(function () { return inst.componentWillReceiveProps(nextProps, nextContext); }, this._debugID, 'componentWillReceiveProps'); } else { inst.componentWillReceiveProps(nextProps, nextContext); } } var nextState = this._processPendingState(nextProps, nextContext); var shouldUpdate = true; if (!this._pendingForceUpdate) { if (inst.shouldComponentUpdate) { if (false) { shouldUpdate = measureLifeCyclePerf(function () { return inst.shouldComponentUpdate(nextProps, nextState, nextContext); }, this._debugID, 'shouldComponentUpdate'); } else { shouldUpdate = inst.shouldComponentUpdate(nextProps, nextState, nextContext); } } else { if (this._compositeType === CompositeTypes.PureClass) { shouldUpdate = !shallowEqual(prevProps, nextProps) || !shallowEqual(inst.state, nextState); } } } if (false) { process.env.NODE_ENV !== 'production' ? warning(shouldUpdate !== undefined, '%s.shouldComponentUpdate(): Returned undefined instead of a ' + 'boolean value. Make sure to return true or false.', this.getName() || 'ReactCompositeComponent') : void 0; } this._updateBatchNumber = null; if (shouldUpdate) { this._pendingForceUpdate = false; // Will set `this.props`, `this.state` and `this.context`. this._performComponentUpdate(nextParentElement, nextProps, nextState, nextContext, transaction, nextUnmaskedContext); } else { // If it's determined that a component should not update, we still want // to set props and state but we shortcut the rest of the update. this._currentElement = nextParentElement; this._context = nextUnmaskedContext; inst.props = nextProps; inst.state = nextState; inst.context = nextContext; } }, _processPendingState: function (props, context) { var inst = this._instance; var queue = this._pendingStateQueue; var replace = this._pendingReplaceState; this._pendingReplaceState = false; this._pendingStateQueue = null; if (!queue) { return inst.state; } if (replace && queue.length === 1) { return queue[0]; } var nextState = _assign({}, replace ? queue[0] : inst.state); for (var i = replace ? 1 : 0; i < queue.length; i++) { var partial = queue[i]; _assign(nextState, typeof partial === 'function' ? partial.call(inst, nextState, props, context) : partial); } return nextState; }, /** * Merges new props and state, notifies delegate methods of update and * performs update. * * @param {ReactElement} nextElement Next element * @param {object} nextProps Next public object to set as properties. * @param {?object} nextState Next object to set as state. * @param {?object} nextContext Next public object to set as context. * @param {ReactReconcileTransaction} transaction * @param {?object} unmaskedContext * @private */ _performComponentUpdate: function (nextElement, nextProps, nextState, nextContext, transaction, unmaskedContext) { var _this2 = this; var inst = this._instance; var hasComponentDidUpdate = Boolean(inst.componentDidUpdate); var prevProps; var prevState; var prevContext; if (hasComponentDidUpdate) { prevProps = inst.props; prevState = inst.state; prevContext = inst.context; } if (inst.componentWillUpdate) { if (false) { measureLifeCyclePerf(function () { return inst.componentWillUpdate(nextProps, nextState, nextContext); }, this._debugID, 'componentWillUpdate'); } else { inst.componentWillUpdate(nextProps, nextState, nextContext); } } this._currentElement = nextElement; this._context = unmaskedContext; inst.props = nextProps; inst.state = nextState; inst.context = nextContext; this._updateRenderedComponent(transaction, unmaskedContext); if (hasComponentDidUpdate) { if (false) { transaction.getReactMountReady().enqueue(function () { measureLifeCyclePerf(inst.componentDidUpdate.bind(inst, prevProps, prevState, prevContext), _this2._debugID, 'componentDidUpdate'); }); } else { transaction.getReactMountReady().enqueue(inst.componentDidUpdate.bind(inst, prevProps, prevState, prevContext), inst); } } }, /** * Call the component's `render` method and update the DOM accordingly. * * @param {ReactReconcileTransaction} transaction * @internal */ _updateRenderedComponent: function (transaction, context) { var prevComponentInstance = this._renderedComponent; var prevRenderedElement = prevComponentInstance._currentElement; var nextRenderedElement = this._renderValidatedComponent(); var debugID = 0; if (false) { debugID = this._debugID; } if (shouldUpdateReactComponent(prevRenderedElement, nextRenderedElement)) { ReactReconciler.receiveComponent(prevComponentInstance, nextRenderedElement, transaction, this._processChildContext(context)); } else { var oldHostNode = ReactReconciler.getHostNode(prevComponentInstance); ReactReconciler.unmountComponent(prevComponentInstance, false); var nodeType = ReactNodeTypes.getType(nextRenderedElement); this._renderedNodeType = nodeType; var child = this._instantiateReactComponent(nextRenderedElement, nodeType !== ReactNodeTypes.EMPTY /* shouldHaveDebugID */ ); this._renderedComponent = child; var nextMarkup = ReactReconciler.mountComponent(child, transaction, this._hostParent, this._hostContainerInfo, this._processChildContext(context), debugID); if (false) { if (debugID !== 0) { var childDebugIDs = child._debugID !== 0 ? [child._debugID] : []; ReactInstrumentation.debugTool.onSetChildren(debugID, childDebugIDs); } } this._replaceNodeWithMarkup(oldHostNode, nextMarkup, prevComponentInstance); } }, /** * Overridden in shallow rendering. * * @protected */ _replaceNodeWithMarkup: function (oldHostNode, nextMarkup, prevInstance) { ReactComponentEnvironment.replaceNodeWithMarkup(oldHostNode, nextMarkup, prevInstance); }, /** * @protected */ _renderValidatedComponentWithoutOwnerOrContext: function () { var inst = this._instance; var renderedElement; if (false) { renderedElement = measureLifeCyclePerf(function () { return inst.render(); }, this._debugID, 'render'); } else { renderedElement = inst.render(); } if (false) { // We allow auto-mocks to proceed as if they're returning null. if (renderedElement === undefined && inst.render._isMockFunction) { // This is probably bad practice. Consider warning here and // deprecating this convenience. renderedElement = null; } } return renderedElement; }, /** * @private */ _renderValidatedComponent: function () { var renderedElement; if (("production") !== 'production' || this._compositeType !== CompositeTypes.StatelessFunctional) { ReactCurrentOwner.current = this; try { renderedElement = this._renderValidatedComponentWithoutOwnerOrContext(); } finally { ReactCurrentOwner.current = null; } } else { renderedElement = this._renderValidatedComponentWithoutOwnerOrContext(); } !( // TODO: An `isValidNode` function would probably be more appropriate renderedElement === null || renderedElement === false || React.isValidElement(renderedElement)) ? false ? invariant(false, '%s.render(): A valid React element (or null) must be returned. You may have returned undefined, an array or some other invalid object.', this.getName() || 'ReactCompositeComponent') : _prodInvariant('109', this.getName() || 'ReactCompositeComponent') : void 0; return renderedElement; }, /** * Lazily allocates the refs object and stores `component` as `ref`. * * @param {string} ref Reference name. * @param {component} component Component to store as `ref`. * @final * @private */ attachRef: function (ref, component) { var inst = this.getPublicInstance(); !(inst != null) ? false ? invariant(false, 'Stateless function components cannot have refs.') : _prodInvariant('110') : void 0; var publicComponentInstance = component.getPublicInstance(); if (false) { var componentName = component && component.getName ? component.getName() : 'a component'; process.env.NODE_ENV !== 'production' ? warning(publicComponentInstance != null || component._compositeType !== CompositeTypes.StatelessFunctional, 'Stateless function components cannot be given refs ' + '(See ref "%s" in %s created by %s). ' + 'Attempts to access this ref will fail.', ref, componentName, this.getName()) : void 0; } var refs = inst.refs === emptyObject ? inst.refs = {} : inst.refs; refs[ref] = publicComponentInstance; }, /** * Detaches a reference name. * * @param {string} ref Name to dereference. * @final * @private */ detachRef: function (ref) { var refs = this.getPublicInstance().refs; delete refs[ref]; }, /** * Get a text description of the component that can be used to identify it * in error messages. * @return {string} The name or null. * @internal */ getName: function () { var type = this._currentElement.type; var constructor = this._instance && this._instance.constructor; return type.displayName || constructor && constructor.displayName || type.name || constructor && constructor.name || null; }, /** * Get the publicly accessible representation of this component - i.e. what * is exposed by refs and returned by render. Can be null for stateless * components. * * @return {ReactComponent} the public component instance. * @internal */ getPublicInstance: function () { var inst = this._instance; if (this._compositeType === CompositeTypes.StatelessFunctional) { return null; } return inst; }, // Stub _instantiateReactComponent: null }; module.exports = ReactCompositeComponent; /***/ }, /* 113 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var React = __webpack_require__(3); var invariant = __webpack_require__(8); var ReactNodeTypes = { HOST: 0, COMPOSITE: 1, EMPTY: 2, getType: function (node) { if (node === null || node === false) { return ReactNodeTypes.EMPTY; } else if (React.isValidElement(node)) { if (typeof node.type === 'function') { return ReactNodeTypes.COMPOSITE; } else { return ReactNodeTypes.HOST; } } true ? false ? invariant(false, 'Unexpected node: %s', node) : _prodInvariant('26', node) : void 0; } }; module.exports = ReactNodeTypes; /***/ }, /* 114 */ /***/ function(module, exports) { /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @typechecks * */ /*eslint-disable no-self-compare */ 'use strict'; var hasOwnProperty = Object.prototype.hasOwnProperty; /** * inlined Object.is polyfill to avoid requiring consumers ship their own * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is */ function is(x, y) { // SameValue algorithm if (x === y) { // Steps 1-5, 7-10 // Steps 6.b-6.e: +0 != -0 // Added the nonzero y check to make Flow happy, but it is redundant return x !== 0 || y !== 0 || 1 / x === 1 / y; } else { // Step 6.a: NaN == NaN return x !== x && y !== y; } } /** * Performs equality by iterating through keys on an object and returning false * when any key has values which are not strictly equal between the arguments. * Returns true when the values of all keys are strictly equal. */ function shallowEqual(objA, objB) { if (is(objA, objB)) { return true; } if (typeof objA !== 'object' || objA === null || typeof objB !== 'object' || objB === null) { return false; } var keysA = Object.keys(objA); var keysB = Object.keys(objB); if (keysA.length !== keysB.length) { return false; } // Test for A's keys different from B. for (var i = 0; i < keysA.length; i++) { if (!hasOwnProperty.call(objB, keysA[i]) || !is(objA[keysA[i]], objB[keysA[i]])) { return false; } } return true; } module.exports = shallowEqual; /***/ }, /* 115 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; /** * Given a `prevElement` and `nextElement`, determines if the existing * instance should be updated as opposed to being destroyed or replaced by a new * instance. Both arguments are elements. This ensures that this logic can * operate on stateless trees without any backing instance. * * @param {?object} prevElement * @param {?object} nextElement * @return {boolean} True if the existing instance should be updated. * @protected */ function shouldUpdateReactComponent(prevElement, nextElement) { var prevEmpty = prevElement === null || prevElement === false; var nextEmpty = nextElement === null || nextElement === false; if (prevEmpty || nextEmpty) { return prevEmpty === nextEmpty; } var prevType = typeof prevElement; var nextType = typeof nextElement; if (prevType === 'string' || prevType === 'number') { return nextType === 'string' || nextType === 'number'; } else { return nextType === 'object' && prevElement.type === nextElement.type && prevElement.key === nextElement.key; } } module.exports = shouldUpdateReactComponent; /***/ }, /* 116 */ /***/ function(module, exports) { /** * Copyright 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var emptyComponentFactory; var ReactEmptyComponentInjection = { injectEmptyComponentFactory: function (factory) { emptyComponentFactory = factory; } }; var ReactEmptyComponent = { create: function (instantiate) { return emptyComponentFactory(instantiate); } }; ReactEmptyComponent.injection = ReactEmptyComponentInjection; module.exports = ReactEmptyComponent; /***/ }, /* 117 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var invariant = __webpack_require__(8); var genericComponentClass = null; var textComponentClass = null; var ReactHostComponentInjection = { // This accepts a class that receives the tag string. This is a catch all // that can render any kind of tag. injectGenericComponentClass: function (componentClass) { genericComponentClass = componentClass; }, // This accepts a text component class that takes the text string to be // rendered as props. injectTextComponentClass: function (componentClass) { textComponentClass = componentClass; } }; /** * Get a host internal component class for a specific tag. * * @param {ReactElement} element The element to create. * @return {function} The internal class constructor function. */ function createInternalComponent(element) { !genericComponentClass ? false ? invariant(false, 'There is no registered component for the tag %s', element.type) : _prodInvariant('111', element.type) : void 0; return new genericComponentClass(element); } /** * @param {ReactText} text * @return {ReactComponent} */ function createInstanceForText(text) { return new textComponentClass(text); } /** * @param {ReactComponent} component * @return {boolean} */ function isTextComponent(component) { return component instanceof textComponentClass; } var ReactHostComponent = { createInternalComponent: createInternalComponent, createInstanceForText: createInstanceForText, isTextComponent: isTextComponent, injection: ReactHostComponentInjection }; module.exports = ReactHostComponent; /***/ }, /* 118 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var nextDebugID = 1; function getNextDebugID() { return nextDebugID++; } module.exports = getNextDebugID; /***/ }, /* 119 */ 17, /* 120 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var ReactCurrentOwner = __webpack_require__(10); var REACT_ELEMENT_TYPE = __webpack_require__(121); var getIteratorFn = __webpack_require__(122); var invariant = __webpack_require__(8); var KeyEscapeUtils = __webpack_require__(119); var warning = __webpack_require__(11); var SEPARATOR = '.'; var SUBSEPARATOR = ':'; /** * This is inlined from ReactElement since this file is shared between * isomorphic and renderers. We could extract this to a * */ /** * TODO: Test that a single child and an array with one item have the same key * pattern. */ var didWarnAboutMaps = false; /** * Generate a key string that identifies a component within a set. * * @param {*} component A component that could contain a manual key. * @param {number} index Index that is used if a manual key is not provided. * @return {string} */ function getComponentKey(component, index) { // Do some typechecking here since we call this blindly. We want to ensure // that we don't block potential future ES APIs. if (component && typeof component === 'object' && component.key != null) { // Explicit key return KeyEscapeUtils.escape(component.key); } // Implicit key determined by the index in the set return index.toString(36); } /** * @param {?*} children Children tree container. * @param {!string} nameSoFar Name of the key path so far. * @param {!function} callback Callback to invoke with each child found. * @param {?*} traverseContext Used to pass information throughout the traversal * process. * @return {!number} The number of children in this subtree. */ function traverseAllChildrenImpl(children, nameSoFar, callback, traverseContext) { var type = typeof children; if (type === 'undefined' || type === 'boolean') { // All of the above are perceived as null. children = null; } if (children === null || type === 'string' || type === 'number' || // The following is inlined from ReactElement. This means we can optimize // some checks. React Fiber also inlines this logic for similar purposes. type === 'object' && children.$$typeof === REACT_ELEMENT_TYPE) { callback(traverseContext, children, // If it's the only child, treat the name as if it was wrapped in an array // so that it's consistent if the number of children grows. nameSoFar === '' ? SEPARATOR + getComponentKey(children, 0) : nameSoFar); return 1; } var child; var nextName; var subtreeCount = 0; // Count of children found in the current subtree. var nextNamePrefix = nameSoFar === '' ? SEPARATOR : nameSoFar + SUBSEPARATOR; if (Array.isArray(children)) { for (var i = 0; i < children.length; i++) { child = children[i]; nextName = nextNamePrefix + getComponentKey(child, i); subtreeCount += traverseAllChildrenImpl(child, nextName, callback, traverseContext); } } else { var iteratorFn = getIteratorFn(children); if (iteratorFn) { var iterator = iteratorFn.call(children); var step; if (iteratorFn !== children.entries) { var ii = 0; while (!(step = iterator.next()).done) { child = step.value; nextName = nextNamePrefix + getComponentKey(child, ii++); subtreeCount += traverseAllChildrenImpl(child, nextName, callback, traverseContext); } } else { if (false) { var mapsAsChildrenAddendum = ''; if (ReactCurrentOwner.current) { var mapsAsChildrenOwnerName = ReactCurrentOwner.current.getName(); if (mapsAsChildrenOwnerName) { mapsAsChildrenAddendum = ' Check the render method of `' + mapsAsChildrenOwnerName + '`.'; } } process.env.NODE_ENV !== 'production' ? warning(didWarnAboutMaps, 'Using Maps as children is not yet fully supported. It is an ' + 'experimental feature that might be removed. Convert it to a ' + 'sequence / iterable of keyed ReactElements instead.%s', mapsAsChildrenAddendum) : void 0; didWarnAboutMaps = true; } // Iterator will provide entry [k,v] tuples rather than values. while (!(step = iterator.next()).done) { var entry = step.value; if (entry) { child = entry[1]; nextName = nextNamePrefix + KeyEscapeUtils.escape(entry[0]) + SUBSEPARATOR + getComponentKey(child, 0); subtreeCount += traverseAllChildrenImpl(child, nextName, callback, traverseContext); } } } } else if (type === 'object') { var addendum = ''; if (false) { addendum = ' If you meant to render a collection of children, use an array ' + 'instead or wrap the object using createFragment(object) from the ' + 'React add-ons.'; if (children._isReactElement) { addendum = ' It looks like you\'re using an element created by a different ' + 'version of React. Make sure to use only one copy of React.'; } if (ReactCurrentOwner.current) { var name = ReactCurrentOwner.current.getName(); if (name) { addendum += ' Check the render method of `' + name + '`.'; } } } var childrenString = String(children); true ? false ? invariant(false, 'Objects are not valid as a React child (found: %s).%s', childrenString === '[object Object]' ? 'object with keys {' + Object.keys(children).join(', ') + '}' : childrenString, addendum) : _prodInvariant('31', childrenString === '[object Object]' ? 'object with keys {' + Object.keys(children).join(', ') + '}' : childrenString, addendum) : void 0; } } return subtreeCount; } /** * Traverses children that are typically specified as `props.children`, but * might also be specified through attributes: * * - `traverseAllChildren(this.props.children, ...)` * - `traverseAllChildren(this.props.leftPanelChildren, ...)` * * The `traverseContext` is an optional argument that is passed through the * entire traversal. It can be used to store accumulations or anything else that * the callback might find relevant. * * @param {?*} children Children tree object. * @param {!function} callback To invoke upon traversing each child. * @param {?*} traverseContext Context for traversal. * @return {!number} The number of children in this subtree. */ function traverseAllChildren(children, callback, traverseContext) { if (children == null) { return 0; } return traverseAllChildrenImpl(children, '', callback, traverseContext); } module.exports = traverseAllChildren; /***/ }, /* 121 */ 14, /* 122 */ 16, /* 123 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2016-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var _prodInvariant = __webpack_require__(7); var ReactCurrentOwner = __webpack_require__(10); var invariant = __webpack_require__(8); var warning = __webpack_require__(11); function isNative(fn) { // Based on isNative() from Lodash var funcToString = Function.prototype.toString; var hasOwnProperty = Object.prototype.hasOwnProperty; var reIsNative = RegExp('^' + funcToString // Take an example native function source for comparison .call(hasOwnProperty) // Strip regex characters so we can use it for regex .replace(/[\\^$.*+?()[\]{}|]/g, '\\$&') // Remove hasOwnProperty from the template to make it generic .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'); try { var source = funcToString.call(fn); return reIsNative.test(source); } catch (err) { return false; } } var canUseCollections = // Array.from typeof Array.from === 'function' && // Map typeof Map === 'function' && isNative(Map) && // Map.prototype.keys Map.prototype != null && typeof Map.prototype.keys === 'function' && isNative(Map.prototype.keys) && // Set typeof Set === 'function' && isNative(Set) && // Set.prototype.keys Set.prototype != null && typeof Set.prototype.keys === 'function' && isNative(Set.prototype.keys); var setItem; var getItem; var removeItem; var getItemIDs; var addRoot; var removeRoot; var getRootIDs; if (canUseCollections) { var itemMap = new Map(); var rootIDSet = new Set(); setItem = function (id, item) { itemMap.set(id, item); }; getItem = function (id) { return itemMap.get(id); }; removeItem = function (id) { itemMap['delete'](id); }; getItemIDs = function () { return Array.from(itemMap.keys()); }; addRoot = function (id) { rootIDSet.add(id); }; removeRoot = function (id) { rootIDSet['delete'](id); }; getRootIDs = function () { return Array.from(rootIDSet.keys()); }; } else { var itemByKey = {}; var rootByKey = {}; // Use non-numeric keys to prevent V8 performance issues: // https://github.com/facebook/react/pull/7232 var getKeyFromID = function (id) { return '.' + id; }; var getIDFromKey = function (key) { return parseInt(key.substr(1), 10); }; setItem = function (id, item) { var key = getKeyFromID(id); itemByKey[key] = item; }; getItem = function (id) { var key = getKeyFromID(id); return itemByKey[key]; }; removeItem = function (id) { var key = getKeyFromID(id); delete itemByKey[key]; }; getItemIDs = function () { return Object.keys(itemByKey).map(getIDFromKey); }; addRoot = function (id) { var key = getKeyFromID(id); rootByKey[key] = true; }; removeRoot = function (id) { var key = getKeyFromID(id); delete rootByKey[key]; }; getRootIDs = function () { return Object.keys(rootByKey).map(getIDFromKey); }; } var unmountedIDs = []; function purgeDeep(id) { var item = getItem(id); if (item) { var childIDs = item.childIDs; removeItem(id); childIDs.forEach(purgeDeep); } } function describeComponentFrame(name, source, ownerName) { return '\n in ' + (name || 'Unknown') + (source ? ' (at ' + source.fileName.replace(/^.*[\\\/]/, '') + ':' + source.lineNumber + ')' : ownerName ? ' (created by ' + ownerName + ')' : ''); } function getDisplayName(element) { if (element == null) { return '#empty'; } else if (typeof element === 'string' || typeof element === 'number') { return '#text'; } else if (typeof element.type === 'string') { return element.type; } else { return element.type.displayName || element.type.name || 'Unknown'; } } function describeID(id) { var name = ReactComponentTreeHook.getDisplayName(id); var element = ReactComponentTreeHook.getElement(id); var ownerID = ReactComponentTreeHook.getOwnerID(id); var ownerName; if (ownerID) { ownerName = ReactComponentTreeHook.getDisplayName(ownerID); } false ? warning(element, 'ReactComponentTreeHook: Missing React element for debugID %s when ' + 'building stack', id) : void 0; return describeComponentFrame(name, element && element._source, ownerName); } var ReactComponentTreeHook = { onSetChildren: function (id, nextChildIDs) { var item = getItem(id); !item ? false ? invariant(false, 'Item must have been set') : _prodInvariant('144') : void 0; item.childIDs = nextChildIDs; for (var i = 0; i < nextChildIDs.length; i++) { var nextChildID = nextChildIDs[i]; var nextChild = getItem(nextChildID); !nextChild ? false ? invariant(false, 'Expected hook events to fire for the child before its parent includes it in onSetChildren().') : _prodInvariant('140') : void 0; !(nextChild.childIDs != null || typeof nextChild.element !== 'object' || nextChild.element == null) ? false ? invariant(false, 'Expected onSetChildren() to fire for a container child before its parent includes it in onSetChildren().') : _prodInvariant('141') : void 0; !nextChild.isMounted ? false ? invariant(false, 'Expected onMountComponent() to fire for the child before its parent includes it in onSetChildren().') : _prodInvariant('71') : void 0; if (nextChild.parentID == null) { nextChild.parentID = id; // TODO: This shouldn't be necessary but mounting a new root during in // componentWillMount currently causes not-yet-mounted components to // be purged from our tree data so their parent id is missing. } !(nextChild.parentID === id) ? false ? invariant(false, 'Expected onBeforeMountComponent() parent and onSetChildren() to be consistent (%s has parents %s and %s).', nextChildID, nextChild.parentID, id) : _prodInvariant('142', nextChildID, nextChild.parentID, id) : void 0; } }, onBeforeMountComponent: function (id, element, parentID) { var item = { element: element, parentID: parentID, text: null, childIDs: [], isMounted: false, updateCount: 0 }; setItem(id, item); }, onBeforeUpdateComponent: function (id, element) { var item = getItem(id); if (!item || !item.isMounted) { // We may end up here as a result of setState() in componentWillUnmount(). // In this case, ignore the element. return; } item.element = element; }, onMountComponent: function (id) { var item = getItem(id); !item ? false ? invariant(false, 'Item must have been set') : _prodInvariant('144') : void 0; item.isMounted = true; var isRoot = item.parentID === 0; if (isRoot) { addRoot(id); } }, onUpdateComponent: function (id) { var item = getItem(id); if (!item || !item.isMounted) { // We may end up here as a result of setState() in componentWillUnmount(). // In this case, ignore the element. return; } item.updateCount++; }, onUnmountComponent: function (id) { var item = getItem(id); if (item) { // We need to check if it exists. // `item` might not exist if it is inside an error boundary, and a sibling // error boundary child threw while mounting. Then this instance never // got a chance to mount, but it still gets an unmounting event during // the error boundary cleanup. item.isMounted = false; var isRoot = item.parentID === 0; if (isRoot) { removeRoot(id); } } unmountedIDs.push(id); }, purgeUnmountedComponents: function () { if (ReactComponentTreeHook._preventPurging) { // Should only be used for testing. return; } for (var i = 0; i < unmountedIDs.length; i++) { var id = unmountedIDs[i]; purgeDeep(id); } unmountedIDs.length = 0; }, isMounted: function (id) { var item = getItem(id); return item ? item.isMounted : false; }, getCurrentStackAddendum: function (topElement) { var info = ''; if (topElement) { var name = getDisplayName(topElement); var owner = topElement._owner; info += describeComponentFrame(name, topElement._source, owner && owner.getName()); } var currentOwner = ReactCurrentOwner.current; var id = currentOwner && currentOwner._debugID; info += ReactComponentTreeHook.getStackAddendumByID(id); return info; }, getStackAddendumByID: function (id) { var info = ''; while (id) { info += describeID(id); id = ReactComponentTreeHook.getParentID(id); } return info; }, getChildIDs: function (id) { var item = getItem(id); return item ? item.childIDs : []; }, getDisplayName: function (id) { var element = ReactComponentTreeHook.getElement(id); if (!element) { return null; } return getDisplayName(element); }, getElement: function (id) { var item = getItem(id); return item ? item.element : null; }, getOwnerID: function (id) { var element = ReactComponentTreeHook.getElement(id); if (!element || !element._owner) { return null; } return element._owner._debugID; }, getParentID: function (id) { var item = getItem(id); return item ? item.parentID : null; }, getSource: function (id) { var item = getItem(id); var element = item ? item.element : null; var source = element != null ? element._source : null; return source; }, getText: function (id) { var element = ReactComponentTreeHook.getElement(id); if (typeof element === 'string') { return element; } else if (typeof element === 'number') { return '' + element; } else { return null; } }, getUpdateCount: function (id) { var item = getItem(id); return item ? item.updateCount : 0; }, getRootIDs: getRootIDs, getRegisteredIDs: getItemIDs }; module.exports = ReactComponentTreeHook; /***/ }, /* 124 */ /***/ function(module, exports, __webpack_require__) { /* WEBPACK VAR INJECTION */(function(process) {/** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var KeyEscapeUtils = __webpack_require__(119); var traverseAllChildren = __webpack_require__(120); var warning = __webpack_require__(11); var ReactComponentTreeHook; if (typeof process !== 'undefined' && process.env && ("production") === 'test') { // Temporary hack. // Inline requires don't work well with Jest: // https://github.com/facebook/react/issues/7240 // Remove the inline requires when we don't need them anymore: // https://github.com/facebook/react/pull/7178 ReactComponentTreeHook = __webpack_require__(123); } /** * @param {function} traverseContext Context passed through traversal. * @param {?ReactComponent} child React child component. * @param {!string} name String name of key path to child. * @param {number=} selfDebugID Optional debugID of the current internal instance. */ function flattenSingleChildIntoContext(traverseContext, child, name, selfDebugID) { // We found a component instance. if (traverseContext && typeof traverseContext === 'object') { var result = traverseContext; var keyUnique = result[name] === undefined; if (false) { if (!ReactComponentTreeHook) { ReactComponentTreeHook = require('react/lib/ReactComponentTreeHook'); } if (!keyUnique) { process.env.NODE_ENV !== 'production' ? warning(false, 'flattenChildren(...): Encountered two children with the same key, ' + '`%s`. Child keys must be unique; when two children share a key, only ' + 'the first child will be used.%s', KeyEscapeUtils.unescape(name), ReactComponentTreeHook.getStackAddendumByID(selfDebugID)) : void 0; } } if (keyUnique && child != null) { result[name] = child; } } } /** * Flattens children that are typically specified as `props.children`. Any null * children will not be included in the resulting object. * @return {!object} flattened children keyed by name. */ function flattenChildren(children, selfDebugID) { if (children == null) { return children; } var result = {}; if (false) { traverseAllChildren(children, function (traverseContext, child, name) { return flattenSingleChildIntoContext(traverseContext, child, name, selfDebugID); }, result); } else { traverseAllChildren(children, flattenSingleChildIntoContext, result); } return result; } module.exports = flattenChildren; /* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(110))) /***/ }, /* 125 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var PooledClass = __webpack_require__(47); var Transaction = __webpack_require__(60); var ReactInstrumentation = __webpack_require__(59); var ReactServerUpdateQueue = __webpack_require__(126); /** * Executed within the scope of the `Transaction` instance. Consider these as * being member methods, but with an implied ordering while being isolated from * each other. */ var TRANSACTION_WRAPPERS = []; if (false) { TRANSACTION_WRAPPERS.push({ initialize: ReactInstrumentation.debugTool.onBeginFlush, close: ReactInstrumentation.debugTool.onEndFlush }); } var noopCallbackQueue = { enqueue: function () {} }; /** * @class ReactServerRenderingTransaction * @param {boolean} renderToStaticMarkup */ function ReactServerRenderingTransaction(renderToStaticMarkup) { this.reinitializeTransaction(); this.renderToStaticMarkup = renderToStaticMarkup; this.useCreateElement = false; this.updateQueue = new ReactServerUpdateQueue(this); } var Mixin = { /** * @see Transaction * @abstract * @final * @return {array} Empty list of operation wrap procedures. */ getTransactionWrappers: function () { return TRANSACTION_WRAPPERS; }, /** * @return {object} The queue to collect `onDOMReady` callbacks with. */ getReactMountReady: function () { return noopCallbackQueue; }, /** * @return {object} The queue to collect React async events. */ getUpdateQueue: function () { return this.updateQueue; }, /** * `PooledClass` looks for this, and will invoke this before allowing this * instance to be reused. */ destructor: function () {}, checkpoint: function () {}, rollback: function () {} }; _assign(ReactServerRenderingTransaction.prototype, Transaction, Mixin); PooledClass.addPoolingTo(ReactServerRenderingTransaction); module.exports = ReactServerRenderingTransaction; /***/ }, /* 126 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } var ReactUpdateQueue = __webpack_require__(127); var warning = __webpack_require__(11); function warnNoop(publicInstance, callerName) { if (false) { var constructor = publicInstance.constructor; process.env.NODE_ENV !== 'production' ? warning(false, '%s(...): Can only update a mounting component. ' + 'This usually means you called %s() outside componentWillMount() on the server. ' + 'This is a no-op. Please check the code for the %s component.', callerName, callerName, constructor && (constructor.displayName || constructor.name) || 'ReactClass') : void 0; } } /** * This is the update queue used for server rendering. * It delegates to ReactUpdateQueue while server rendering is in progress and * switches to ReactNoopUpdateQueue after the transaction has completed. * @class ReactServerUpdateQueue * @param {Transaction} transaction */ var ReactServerUpdateQueue = function () { function ReactServerUpdateQueue(transaction) { _classCallCheck(this, ReactServerUpdateQueue); this.transaction = transaction; } /** * Checks whether or not this composite component is mounted. * @param {ReactClass} publicInstance The instance we want to test. * @return {boolean} True if mounted, false otherwise. * @protected * @final */ ReactServerUpdateQueue.prototype.isMounted = function isMounted(publicInstance) { return false; }; /** * Enqueue a callback that will be executed after all the pending updates * have processed. * * @param {ReactClass} publicInstance The instance to use as `this` context. * @param {?function} callback Called after state is updated. * @internal */ ReactServerUpdateQueue.prototype.enqueueCallback = function enqueueCallback(publicInstance, callback, callerName) { if (this.transaction.isInTransaction()) { ReactUpdateQueue.enqueueCallback(publicInstance, callback, callerName); } }; /** * Forces an update. This should only be invoked when it is known with * certainty that we are **not** in a DOM transaction. * * You may want to call this when you know that some deeper aspect of the * component's state has changed but `setState` was not called. * * This will not invoke `shouldComponentUpdate`, but it will invoke * `componentWillUpdate` and `componentDidUpdate`. * * @param {ReactClass} publicInstance The instance that should rerender. * @internal */ ReactServerUpdateQueue.prototype.enqueueForceUpdate = function enqueueForceUpdate(publicInstance) { if (this.transaction.isInTransaction()) { ReactUpdateQueue.enqueueForceUpdate(publicInstance); } else { warnNoop(publicInstance, 'forceUpdate'); } }; /** * Replaces all of the state. Always use this or `setState` to mutate state. * You should treat `this.state` as immutable. * * There is no guarantee that `this.state` will be immediately updated, so * accessing `this.state` after calling this method may return the old value. * * @param {ReactClass} publicInstance The instance that should rerender. * @param {object|function} completeState Next state. * @internal */ ReactServerUpdateQueue.prototype.enqueueReplaceState = function enqueueReplaceState(publicInstance, completeState) { if (this.transaction.isInTransaction()) { ReactUpdateQueue.enqueueReplaceState(publicInstance, completeState); } else { warnNoop(publicInstance, 'replaceState'); } }; /** * Sets a subset of the state. This only exists because _pendingState is * internal. This provides a merging strategy that is not available to deep * properties which is confusing. TODO: Expose pendingState or don't use it * during the merge. * * @param {ReactClass} publicInstance The instance that should rerender. * @param {object|function} partialState Next partial state to be merged with state. * @internal */ ReactServerUpdateQueue.prototype.enqueueSetState = function enqueueSetState(publicInstance, partialState) { if (this.transaction.isInTransaction()) { ReactUpdateQueue.enqueueSetState(publicInstance, partialState); } else { warnNoop(publicInstance, 'setState'); } }; return ReactServerUpdateQueue; }(); module.exports = ReactServerUpdateQueue; /***/ }, /* 127 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var ReactCurrentOwner = __webpack_require__(10); var ReactInstanceMap = __webpack_require__(108); var ReactInstrumentation = __webpack_require__(59); var ReactUpdates = __webpack_require__(53); var invariant = __webpack_require__(8); var warning = __webpack_require__(11); function enqueueUpdate(internalInstance) { ReactUpdates.enqueueUpdate(internalInstance); } function formatUnexpectedArgument(arg) { var type = typeof arg; if (type !== 'object') { return type; } var displayName = arg.constructor && arg.constructor.name || type; var keys = Object.keys(arg); if (keys.length > 0 && keys.length < 20) { return displayName + ' (keys: ' + keys.join(', ') + ')'; } return displayName; } function getInternalInstanceReadyForUpdate(publicInstance, callerName) { var internalInstance = ReactInstanceMap.get(publicInstance); if (!internalInstance) { if (false) { var ctor = publicInstance.constructor; // Only warn when we have a callerName. Otherwise we should be silent. // We're probably calling from enqueueCallback. We don't want to warn // there because we already warned for the corresponding lifecycle method. process.env.NODE_ENV !== 'production' ? warning(!callerName, '%s(...): Can only update a mounted or mounting component. ' + 'This usually means you called %s() on an unmounted component. ' + 'This is a no-op. Please check the code for the %s component.', callerName, callerName, ctor && (ctor.displayName || ctor.name) || 'ReactClass') : void 0; } return null; } if (false) { process.env.NODE_ENV !== 'production' ? warning(ReactCurrentOwner.current == null, '%s(...): Cannot update during an existing state transition (such as ' + 'within `render` or another component\'s constructor). Render methods ' + 'should be a pure function of props and state; constructor ' + 'side-effects are an anti-pattern, but can be moved to ' + '`componentWillMount`.', callerName) : void 0; } return internalInstance; } /** * ReactUpdateQueue allows for state updates to be scheduled into a later * reconciliation step. */ var ReactUpdateQueue = { /** * Checks whether or not this composite component is mounted. * @param {ReactClass} publicInstance The instance we want to test. * @return {boolean} True if mounted, false otherwise. * @protected * @final */ isMounted: function (publicInstance) { if (false) { var owner = ReactCurrentOwner.current; if (owner !== null) { process.env.NODE_ENV !== 'production' ? warning(owner._warnedAboutRefsInRender, '%s is accessing isMounted inside its render() function. ' + 'render() should be a pure function of props and state. It should ' + 'never access something that requires stale data from the previous ' + 'render, such as refs. Move this logic to componentDidMount and ' + 'componentDidUpdate instead.', owner.getName() || 'A component') : void 0; owner._warnedAboutRefsInRender = true; } } var internalInstance = ReactInstanceMap.get(publicInstance); if (internalInstance) { // During componentWillMount and render this will still be null but after // that will always render to something. At least for now. So we can use // this hack. return !!internalInstance._renderedComponent; } else { return false; } }, /** * Enqueue a callback that will be executed after all the pending updates * have processed. * * @param {ReactClass} publicInstance The instance to use as `this` context. * @param {?function} callback Called after state is updated. * @param {string} callerName Name of the calling function in the public API. * @internal */ enqueueCallback: function (publicInstance, callback, callerName) { ReactUpdateQueue.validateCallback(callback, callerName); var internalInstance = getInternalInstanceReadyForUpdate(publicInstance); // Previously we would throw an error if we didn't have an internal // instance. Since we want to make it a no-op instead, we mirror the same // behavior we have in other enqueue* methods. // We also need to ignore callbacks in componentWillMount. See // enqueueUpdates. if (!internalInstance) { return null; } if (internalInstance._pendingCallbacks) { internalInstance._pendingCallbacks.push(callback); } else { internalInstance._pendingCallbacks = [callback]; } // TODO: The callback here is ignored when setState is called from // componentWillMount. Either fix it or disallow doing so completely in // favor of getInitialState. Alternatively, we can disallow // componentWillMount during server-side rendering. enqueueUpdate(internalInstance); }, enqueueCallbackInternal: function (internalInstance, callback) { if (internalInstance._pendingCallbacks) { internalInstance._pendingCallbacks.push(callback); } else { internalInstance._pendingCallbacks = [callback]; } enqueueUpdate(internalInstance); }, /** * Forces an update. This should only be invoked when it is known with * certainty that we are **not** in a DOM transaction. * * You may want to call this when you know that some deeper aspect of the * component's state has changed but `setState` was not called. * * This will not invoke `shouldComponentUpdate`, but it will invoke * `componentWillUpdate` and `componentDidUpdate`. * * @param {ReactClass} publicInstance The instance that should rerender. * @internal */ enqueueForceUpdate: function (publicInstance) { var internalInstance = getInternalInstanceReadyForUpdate(publicInstance, 'forceUpdate'); if (!internalInstance) { return; } internalInstance._pendingForceUpdate = true; enqueueUpdate(internalInstance); }, /** * Replaces all of the state. Always use this or `setState` to mutate state. * You should treat `this.state` as immutable. * * There is no guarantee that `this.state` will be immediately updated, so * accessing `this.state` after calling this method may return the old value. * * @param {ReactClass} publicInstance The instance that should rerender. * @param {object} completeState Next state. * @internal */ enqueueReplaceState: function (publicInstance, completeState) { var internalInstance = getInternalInstanceReadyForUpdate(publicInstance, 'replaceState'); if (!internalInstance) { return; } internalInstance._pendingStateQueue = [completeState]; internalInstance._pendingReplaceState = true; enqueueUpdate(internalInstance); }, /** * Sets a subset of the state. This only exists because _pendingState is * internal. This provides a merging strategy that is not available to deep * properties which is confusing. TODO: Expose pendingState or don't use it * during the merge. * * @param {ReactClass} publicInstance The instance that should rerender. * @param {object} partialState Next partial state to be merged with state. * @internal */ enqueueSetState: function (publicInstance, partialState) { if (false) { ReactInstrumentation.debugTool.onSetState(); process.env.NODE_ENV !== 'production' ? warning(partialState != null, 'setState(...): You passed an undefined or null state object; ' + 'instead, use forceUpdate().') : void 0; } var internalInstance = getInternalInstanceReadyForUpdate(publicInstance, 'setState'); if (!internalInstance) { return; } var queue = internalInstance._pendingStateQueue || (internalInstance._pendingStateQueue = []); queue.push(partialState); enqueueUpdate(internalInstance); }, enqueueElementInternal: function (internalInstance, nextElement, nextContext) { internalInstance._pendingElement = nextElement; // TODO: introduce _pendingContext instead of setting it directly. internalInstance._context = nextContext; enqueueUpdate(internalInstance); }, validateCallback: function (callback, callerName) { !(!callback || typeof callback === 'function') ? false ? invariant(false, '%s(...): Expected the last optional `callback` argument to be a function. Instead received: %s.', callerName, formatUnexpectedArgument(callback)) : _prodInvariant('122', callerName, formatUnexpectedArgument(callback)) : void 0; } }; module.exports = ReactUpdateQueue; /***/ }, /* 128 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var emptyFunction = __webpack_require__(12); var warning = __webpack_require__(11); var validateDOMNesting = emptyFunction; if (false) { // This validation code was written based on the HTML5 parsing spec: // https://html.spec.whatwg.org/multipage/syntax.html#has-an-element-in-scope // // Note: this does not catch all invalid nesting, nor does it try to (as it's // not clear what practical benefit doing so provides); instead, we warn only // for cases where the parser will give a parse tree differing from what React // intended. For example, <b><div></div></b> is invalid but we don't warn // because it still parses correctly; we do warn for other cases like nested // <p> tags where the beginning of the second element implicitly closes the // first, causing a confusing mess. // https://html.spec.whatwg.org/multipage/syntax.html#special var specialTags = ['address', 'applet', 'area', 'article', 'aside', 'base', 'basefont', 'bgsound', 'blockquote', 'body', 'br', 'button', 'caption', 'center', 'col', 'colgroup', 'dd', 'details', 'dir', 'div', 'dl', 'dt', 'embed', 'fieldset', 'figcaption', 'figure', 'footer', 'form', 'frame', 'frameset', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'head', 'header', 'hgroup', 'hr', 'html', 'iframe', 'img', 'input', 'isindex', 'li', 'link', 'listing', 'main', 'marquee', 'menu', 'menuitem', 'meta', 'nav', 'noembed', 'noframes', 'noscript', 'object', 'ol', 'p', 'param', 'plaintext', 'pre', 'script', 'section', 'select', 'source', 'style', 'summary', 'table', 'tbody', 'td', 'template', 'textarea', 'tfoot', 'th', 'thead', 'title', 'tr', 'track', 'ul', 'wbr', 'xmp']; // https://html.spec.whatwg.org/multipage/syntax.html#has-an-element-in-scope var inScopeTags = ['applet', 'caption', 'html', 'table', 'td', 'th', 'marquee', 'object', 'template', // https://html.spec.whatwg.org/multipage/syntax.html#html-integration-point // TODO: Distinguish by namespace here -- for <title>, including it here // errs on the side of fewer warnings 'foreignObject', 'desc', 'title']; // https://html.spec.whatwg.org/multipage/syntax.html#has-an-element-in-button-scope var buttonScopeTags = inScopeTags.concat(['button']); // https://html.spec.whatwg.org/multipage/syntax.html#generate-implied-end-tags var impliedEndTags = ['dd', 'dt', 'li', 'option', 'optgroup', 'p', 'rp', 'rt']; var emptyAncestorInfo = { current: null, formTag: null, aTagInScope: null, buttonTagInScope: null, nobrTagInScope: null, pTagInButtonScope: null, listItemTagAutoclosing: null, dlItemTagAutoclosing: null }; var updatedAncestorInfo = function (oldInfo, tag, instance) { var ancestorInfo = _assign({}, oldInfo || emptyAncestorInfo); var info = { tag: tag, instance: instance }; if (inScopeTags.indexOf(tag) !== -1) { ancestorInfo.aTagInScope = null; ancestorInfo.buttonTagInScope = null; ancestorInfo.nobrTagInScope = null; } if (buttonScopeTags.indexOf(tag) !== -1) { ancestorInfo.pTagInButtonScope = null; } // See rules for 'li', 'dd', 'dt' start tags in // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-inbody if (specialTags.indexOf(tag) !== -1 && tag !== 'address' && tag !== 'div' && tag !== 'p') { ancestorInfo.listItemTagAutoclosing = null; ancestorInfo.dlItemTagAutoclosing = null; } ancestorInfo.current = info; if (tag === 'form') { ancestorInfo.formTag = info; } if (tag === 'a') { ancestorInfo.aTagInScope = info; } if (tag === 'button') { ancestorInfo.buttonTagInScope = info; } if (tag === 'nobr') { ancestorInfo.nobrTagInScope = info; } if (tag === 'p') { ancestorInfo.pTagInButtonScope = info; } if (tag === 'li') { ancestorInfo.listItemTagAutoclosing = info; } if (tag === 'dd' || tag === 'dt') { ancestorInfo.dlItemTagAutoclosing = info; } return ancestorInfo; }; /** * Returns whether */ var isTagValidWithParent = function (tag, parentTag) { // First, let's check if we're in an unusual parsing mode... switch (parentTag) { // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-inselect case 'select': return tag === 'option' || tag === 'optgroup' || tag === '#text'; case 'optgroup': return tag === 'option' || tag === '#text'; // Strictly speaking, seeing an <option> doesn't mean we're in a <select> // but case 'option': return tag === '#text'; // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-intd // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-incaption // No special behavior since these rules fall back to "in body" mode for // all except special table nodes which cause bad parsing behavior anyway. // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-intr case 'tr': return tag === 'th' || tag === 'td' || tag === 'style' || tag === 'script' || tag === 'template'; // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-intbody case 'tbody': case 'thead': case 'tfoot': return tag === 'tr' || tag === 'style' || tag === 'script' || tag === 'template'; // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-incolgroup case 'colgroup': return tag === 'col' || tag === 'template'; // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-intable case 'table': return tag === 'caption' || tag === 'colgroup' || tag === 'tbody' || tag === 'tfoot' || tag === 'thead' || tag === 'style' || tag === 'script' || tag === 'template'; // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-inhead case 'head': return tag === 'base' || tag === 'basefont' || tag === 'bgsound' || tag === 'link' || tag === 'meta' || tag === 'title' || tag === 'noscript' || tag === 'noframes' || tag === 'style' || tag === 'script' || tag === 'template'; // https://html.spec.whatwg.org/multipage/semantics.html#the-html-element case 'html': return tag === 'head' || tag === 'body'; case '#document': return tag === 'html'; } // Probably in the "in body" parsing mode, so we outlaw only tag combos // where the parsing rules cause implicit opens or closes to be added. // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-inbody switch (tag) { case 'h1': case 'h2': case 'h3': case 'h4': case 'h5': case 'h6': return parentTag !== 'h1' && parentTag !== 'h2' && parentTag !== 'h3' && parentTag !== 'h4' && parentTag !== 'h5' && parentTag !== 'h6'; case 'rp': case 'rt': return impliedEndTags.indexOf(parentTag) === -1; case 'body': case 'caption': case 'col': case 'colgroup': case 'frame': case 'head': case 'html': case 'tbody': case 'td': case 'tfoot': case 'th': case 'thead': case 'tr': // These tags are only valid with a few parents that have special child // parsing rules -- if we're down here, then none of those matched and // so we allow it only if we don't know what the parent is, as all other // cases are invalid. return parentTag == null; } return true; }; /** * Returns whether */ var findInvalidAncestorForTag = function (tag, ancestorInfo) { switch (tag) { case 'address': case 'article': case 'aside': case 'blockquote': case 'center': case 'details': case 'dialog': case 'dir': case 'div': case 'dl': case 'fieldset': case 'figcaption': case 'figure': case 'footer': case 'header': case 'hgroup': case 'main': case 'menu': case 'nav': case 'ol': case 'p': case 'section': case 'summary': case 'ul': case 'pre': case 'listing': case 'table': case 'hr': case 'xmp': case 'h1': case 'h2': case 'h3': case 'h4': case 'h5': case 'h6': return ancestorInfo.pTagInButtonScope; case 'form': return ancestorInfo.formTag || ancestorInfo.pTagInButtonScope; case 'li': return ancestorInfo.listItemTagAutoclosing; case 'dd': case 'dt': return ancestorInfo.dlItemTagAutoclosing; case 'button': return ancestorInfo.buttonTagInScope; case 'a': // Spec says something about storing a list of markers, but it sounds // equivalent to this check. return ancestorInfo.aTagInScope; case 'nobr': return ancestorInfo.nobrTagInScope; } return null; }; /** * Given a ReactCompositeComponent instance, return a list of its recursive * owners, starting at the root and ending with the instance itself. */ var findOwnerStack = function (instance) { if (!instance) { return []; } var stack = []; do { stack.push(instance); } while (instance = instance._currentElement._owner); stack.reverse(); return stack; }; var didWarn = {}; validateDOMNesting = function (childTag, childText, childInstance, ancestorInfo) { ancestorInfo = ancestorInfo || emptyAncestorInfo; var parentInfo = ancestorInfo.current; var parentTag = parentInfo && parentInfo.tag; if (childText != null) { process.env.NODE_ENV !== 'production' ? warning(childTag == null, 'validateDOMNesting: when childText is passed, childTag should be null') : void 0; childTag = '#text'; } var invalidParent = isTagValidWithParent(childTag, parentTag) ? null : parentInfo; var invalidAncestor = invalidParent ? null : findInvalidAncestorForTag(childTag, ancestorInfo); var problematic = invalidParent || invalidAncestor; if (problematic) { var ancestorTag = problematic.tag; var ancestorInstance = problematic.instance; var childOwner = childInstance && childInstance._currentElement._owner; var ancestorOwner = ancestorInstance && ancestorInstance._currentElement._owner; var childOwners = findOwnerStack(childOwner); var ancestorOwners = findOwnerStack(ancestorOwner); var minStackLen = Math.min(childOwners.length, ancestorOwners.length); var i; var deepestCommon = -1; for (i = 0; i < minStackLen; i++) { if (childOwners[i] === ancestorOwners[i]) { deepestCommon = i; } else { break; } } var UNKNOWN = '(unknown)'; var childOwnerNames = childOwners.slice(deepestCommon + 1).map(function (inst) { return inst.getName() || UNKNOWN; }); var ancestorOwnerNames = ancestorOwners.slice(deepestCommon + 1).map(function (inst) { return inst.getName() || UNKNOWN; }); var ownerInfo = [].concat( // If the parent and child instances have a common owner ancestor, start // with that -- otherwise we just start with the parent's owners. deepestCommon !== -1 ? childOwners[deepestCommon].getName() || UNKNOWN : [], ancestorOwnerNames, ancestorTag, // If we're warning about an invalid (non-parent) ancestry, add '...' invalidAncestor ? ['...'] : [], childOwnerNames, childTag).join(' > '); var warnKey = !!invalidParent + '|' + childTag + '|' + ancestorTag + '|' + ownerInfo; if (didWarn[warnKey]) { return; } didWarn[warnKey] = true; var tagDisplayName = childTag; var whitespaceInfo = ''; if (childTag === '#text') { if (/\S/.test(childText)) { tagDisplayName = 'Text nodes'; } else { tagDisplayName = 'Whitespace text nodes'; whitespaceInfo = ' Make sure you don\'t have any extra whitespace between tags on ' + 'each line of your source code.'; } } else { tagDisplayName = '<' + childTag + '>'; } if (invalidParent) { var info = ''; if (ancestorTag === 'table' && childTag === 'tr') { info += ' Add a <tbody> to your code to match the DOM tree generated by ' + 'the browser.'; } process.env.NODE_ENV !== 'production' ? warning(false, 'validateDOMNesting(...): %s cannot appear as a child of <%s>.%s ' + 'See %s.%s', tagDisplayName, ancestorTag, whitespaceInfo, ownerInfo, info) : void 0; } else { process.env.NODE_ENV !== 'production' ? warning(false, 'validateDOMNesting(...): %s cannot appear as a descendant of ' + '<%s>. See %s.', tagDisplayName, ancestorTag, ownerInfo) : void 0; } } }; validateDOMNesting.updatedAncestorInfo = updatedAncestorInfo; // For testing validateDOMNesting.isTagValidInContext = function (tag, ancestorInfo) { ancestorInfo = ancestorInfo || emptyAncestorInfo; var parentInfo = ancestorInfo.current; var parentTag = parentInfo && parentInfo.tag; return isTagValidWithParent(tag, parentTag) && !findInvalidAncestorForTag(tag, ancestorInfo); }; } module.exports = validateDOMNesting; /***/ }, /* 129 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var DOMLazyTree = __webpack_require__(73); var ReactDOMComponentTree = __webpack_require__(31); var ReactDOMEmptyComponent = function (instantiate) { // ReactCompositeComponent uses this: this._currentElement = null; // ReactDOMComponentTree uses these: this._hostNode = null; this._hostParent = null; this._hostContainerInfo = null; this._domID = 0; }; _assign(ReactDOMEmptyComponent.prototype, { mountComponent: function (transaction, hostParent, hostContainerInfo, context) { var domID = hostContainerInfo._idCounter++; this._domID = domID; this._hostParent = hostParent; this._hostContainerInfo = hostContainerInfo; var nodeValue = ' react-empty: ' + this._domID + ' '; if (transaction.useCreateElement) { var ownerDocument = hostContainerInfo._ownerDocument; var node = ownerDocument.createComment(nodeValue); ReactDOMComponentTree.precacheNode(this, node); return DOMLazyTree(node); } else { if (transaction.renderToStaticMarkup) { // Normally we'd insert a comment node, but since this is a situation // where React won't take over (static pages), we can simply return // nothing. return ''; } return '<!--' + nodeValue + '-->'; } }, receiveComponent: function () {}, getHostNode: function () { return ReactDOMComponentTree.getNodeFromInstance(this); }, unmountComponent: function () { ReactDOMComponentTree.uncacheNode(this); } }); module.exports = ReactDOMEmptyComponent; /***/ }, /* 130 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var invariant = __webpack_require__(8); /** * Return the lowest common ancestor of A and B, or null if they are in * different trees. */ function getLowestCommonAncestor(instA, instB) { !('_hostNode' in instA) ? false ? invariant(false, 'getNodeFromInstance: Invalid argument.') : _prodInvariant('33') : void 0; !('_hostNode' in instB) ? false ? invariant(false, 'getNodeFromInstance: Invalid argument.') : _prodInvariant('33') : void 0; var depthA = 0; for (var tempA = instA; tempA; tempA = tempA._hostParent) { depthA++; } var depthB = 0; for (var tempB = instB; tempB; tempB = tempB._hostParent) { depthB++; } // If A is deeper, crawl up. while (depthA - depthB > 0) { instA = instA._hostParent; depthA--; } // If B is deeper, crawl up. while (depthB - depthA > 0) { instB = instB._hostParent; depthB--; } // Walk in lockstep until we find a match. var depth = depthA; while (depth--) { if (instA === instB) { return instA; } instA = instA._hostParent; instB = instB._hostParent; } return null; } /** * Return if A is an ancestor of B. */ function isAncestor(instA, instB) { !('_hostNode' in instA) ? false ? invariant(false, 'isAncestor: Invalid argument.') : _prodInvariant('35') : void 0; !('_hostNode' in instB) ? false ? invariant(false, 'isAncestor: Invalid argument.') : _prodInvariant('35') : void 0; while (instB) { if (instB === instA) { return true; } instB = instB._hostParent; } return false; } /** * Return the parent instance of the passed-in instance. */ function getParentInstance(inst) { !('_hostNode' in inst) ? false ? invariant(false, 'getParentInstance: Invalid argument.') : _prodInvariant('36') : void 0; return inst._hostParent; } /** * Simulates the traversal of a two-phase, capture/bubble event dispatch. */ function traverseTwoPhase(inst, fn, arg) { var path = []; while (inst) { path.push(inst); inst = inst._hostParent; } var i; for (i = path.length; i-- > 0;) { fn(path[i], 'captured', arg); } for (i = 0; i < path.length; i++) { fn(path[i], 'bubbled', arg); } } /** * Traverses the ID hierarchy and invokes the supplied `cb` on any IDs that * should would receive a `mouseEnter` or `mouseLeave` event. * * Does not invoke the callback on the nearest common ancestor because nothing * "entered" or "left" that element. */ function traverseEnterLeave(from, to, fn, argFrom, argTo) { var common = from && to ? getLowestCommonAncestor(from, to) : null; var pathFrom = []; while (from && from !== common) { pathFrom.push(from); from = from._hostParent; } var pathTo = []; while (to && to !== common) { pathTo.push(to); to = to._hostParent; } var i; for (i = 0; i < pathFrom.length; i++) { fn(pathFrom[i], 'bubbled', argFrom); } for (i = pathTo.length; i-- > 0;) { fn(pathTo[i], 'captured', argTo); } } module.exports = { isAncestor: isAncestor, getLowestCommonAncestor: getLowestCommonAncestor, getParentInstance: getParentInstance, traverseTwoPhase: traverseTwoPhase, traverseEnterLeave: traverseEnterLeave }; /***/ }, /* 131 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32), _assign = __webpack_require__(4); var DOMChildrenOperations = __webpack_require__(72); var DOMLazyTree = __webpack_require__(73); var ReactDOMComponentTree = __webpack_require__(31); var escapeTextContentForBrowser = __webpack_require__(78); var invariant = __webpack_require__(8); var validateDOMNesting = __webpack_require__(128); /** * Text nodes violate a couple assumptions that React makes about components: * * - When mounting text into the DOM, adjacent text nodes are merged. * - Text nodes cannot be assigned a React root ID. * * This component is used to wrap strings between comment nodes so that they * can undergo the same reconciliation that is applied to elements. * * TODO: Investigate representing React components in the DOM with text nodes. * * @class ReactDOMTextComponent * @extends ReactComponent * @internal */ var ReactDOMTextComponent = function (text) { // TODO: This is really a ReactText (ReactNode), not a ReactElement this._currentElement = text; this._stringText = '' + text; // ReactDOMComponentTree uses these: this._hostNode = null; this._hostParent = null; // Properties this._domID = 0; this._mountIndex = 0; this._closingComment = null; this._commentNodes = null; }; _assign(ReactDOMTextComponent.prototype, { /** * Creates the markup for this text node. This node is not intended to have * any features besides containing text content. * * @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction * @return {string} Markup for this text node. * @internal */ mountComponent: function (transaction, hostParent, hostContainerInfo, context) { if (false) { var parentInfo; if (hostParent != null) { parentInfo = hostParent._ancestorInfo; } else if (hostContainerInfo != null) { parentInfo = hostContainerInfo._ancestorInfo; } if (parentInfo) { // parentInfo should always be present except for the top-level // component when server rendering validateDOMNesting(null, this._stringText, this, parentInfo); } } var domID = hostContainerInfo._idCounter++; var openingValue = ' react-text: ' + domID + ' '; var closingValue = ' /react-text '; this._domID = domID; this._hostParent = hostParent; if (transaction.useCreateElement) { var ownerDocument = hostContainerInfo._ownerDocument; var openingComment = ownerDocument.createComment(openingValue); var closingComment = ownerDocument.createComment(closingValue); var lazyTree = DOMLazyTree(ownerDocument.createDocumentFragment()); DOMLazyTree.queueChild(lazyTree, DOMLazyTree(openingComment)); if (this._stringText) { DOMLazyTree.queueChild(lazyTree, DOMLazyTree(ownerDocument.createTextNode(this._stringText))); } DOMLazyTree.queueChild(lazyTree, DOMLazyTree(closingComment)); ReactDOMComponentTree.precacheNode(this, openingComment); this._closingComment = closingComment; return lazyTree; } else { var escapedText = escapeTextContentForBrowser(this._stringText); if (transaction.renderToStaticMarkup) { // Normally we'd wrap this between comment nodes for the reasons stated // above, but since this is a situation where React won't take over // (static pages), we can simply return the text as it is. return escapedText; } return '<!--' + openingValue + '-->' + escapedText + '<!--' + closingValue + '-->'; } }, /** * Updates this component by updating the text content. * * @param {ReactText} nextText The next text content * @param {ReactReconcileTransaction} transaction * @internal */ receiveComponent: function (nextText, transaction) { if (nextText !== this._currentElement) { this._currentElement = nextText; var nextStringText = '' + nextText; if (nextStringText !== this._stringText) { // TODO: Save this as pending props and use performUpdateIfNecessary // and/or updateComponent to do the actual update for consistency with // other component types? this._stringText = nextStringText; var commentNodes = this.getHostNode(); DOMChildrenOperations.replaceDelimitedText(commentNodes[0], commentNodes[1], nextStringText); } } }, getHostNode: function () { var hostNode = this._commentNodes; if (hostNode) { return hostNode; } if (!this._closingComment) { var openingComment = ReactDOMComponentTree.getNodeFromInstance(this); var node = openingComment.nextSibling; while (true) { !(node != null) ? false ? invariant(false, 'Missing closing comment for text component %s', this._domID) : _prodInvariant('67', this._domID) : void 0; if (node.nodeType === 8 && node.nodeValue === ' /react-text ') { this._closingComment = node; break; } node = node.nextSibling; } } hostNode = [this._hostNode, this._closingComment]; this._commentNodes = hostNode; return hostNode; }, unmountComponent: function () { this._closingComment = null; this._commentNodes = null; ReactDOMComponentTree.uncacheNode(this); } }); module.exports = ReactDOMTextComponent; /***/ }, /* 132 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var ReactUpdates = __webpack_require__(53); var Transaction = __webpack_require__(60); var emptyFunction = __webpack_require__(12); var RESET_BATCHED_UPDATES = { initialize: emptyFunction, close: function () { ReactDefaultBatchingStrategy.isBatchingUpdates = false; } }; var FLUSH_BATCHED_UPDATES = { initialize: emptyFunction, close: ReactUpdates.flushBatchedUpdates.bind(ReactUpdates) }; var TRANSACTION_WRAPPERS = [FLUSH_BATCHED_UPDATES, RESET_BATCHED_UPDATES]; function ReactDefaultBatchingStrategyTransaction() { this.reinitializeTransaction(); } _assign(ReactDefaultBatchingStrategyTransaction.prototype, Transaction, { getTransactionWrappers: function () { return TRANSACTION_WRAPPERS; } }); var transaction = new ReactDefaultBatchingStrategyTransaction(); var ReactDefaultBatchingStrategy = { isBatchingUpdates: false, /** * Call the provided function in a context within which calls to `setState` * and friends are batched such that components aren't updated unnecessarily. */ batchedUpdates: function (callback, a, b, c, d, e) { var alreadyBatchingUpdates = ReactDefaultBatchingStrategy.isBatchingUpdates; ReactDefaultBatchingStrategy.isBatchingUpdates = true; // The code is written this way to avoid extra allocations if (alreadyBatchingUpdates) { return callback(a, b, c, d, e); } else { return transaction.perform(callback, null, a, b, c, d, e); } } }; module.exports = ReactDefaultBatchingStrategy; /***/ }, /* 133 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var EventListener = __webpack_require__(134); var ExecutionEnvironment = __webpack_require__(45); var PooledClass = __webpack_require__(47); var ReactDOMComponentTree = __webpack_require__(31); var ReactUpdates = __webpack_require__(53); var getEventTarget = __webpack_require__(61); var getUnboundedScrollPosition = __webpack_require__(135); /** * Find the deepest React component completely containing the root of the * passed-in instance (for use when entire React trees are nested within each * other). If React trees are not nested, returns null. */ function findParent(inst) { // TODO: It may be a good idea to cache this to prevent unnecessary DOM // traversal, but caching is difficult to do correctly without using a // mutation observer to listen for all DOM changes. while (inst._hostParent) { inst = inst._hostParent; } var rootNode = ReactDOMComponentTree.getNodeFromInstance(inst); var container = rootNode.parentNode; return ReactDOMComponentTree.getClosestInstanceFromNode(container); } // Used to store ancestor hierarchy in top level callback function TopLevelCallbackBookKeeping(topLevelType, nativeEvent) { this.topLevelType = topLevelType; this.nativeEvent = nativeEvent; this.ancestors = []; } _assign(TopLevelCallbackBookKeeping.prototype, { destructor: function () { this.topLevelType = null; this.nativeEvent = null; this.ancestors.length = 0; } }); PooledClass.addPoolingTo(TopLevelCallbackBookKeeping, PooledClass.twoArgumentPooler); function handleTopLevelImpl(bookKeeping) { var nativeEventTarget = getEventTarget(bookKeeping.nativeEvent); var targetInst = ReactDOMComponentTree.getClosestInstanceFromNode(nativeEventTarget); // Loop through the hierarchy, in case there's any nested components. // It's important that we build the array of ancestors before calling any // event handlers, because event handlers can modify the DOM, leading to // inconsistencies with ReactMount's node cache. See #1105. var ancestor = targetInst; do { bookKeeping.ancestors.push(ancestor); ancestor = ancestor && findParent(ancestor); } while (ancestor); for (var i = 0; i < bookKeeping.ancestors.length; i++) { targetInst = bookKeeping.ancestors[i]; ReactEventListener._handleTopLevel(bookKeeping.topLevelType, targetInst, bookKeeping.nativeEvent, getEventTarget(bookKeeping.nativeEvent)); } } function scrollValueMonitor(cb) { var scrollPosition = getUnboundedScrollPosition(window); cb(scrollPosition); } var ReactEventListener = { _enabled: true, _handleTopLevel: null, WINDOW_HANDLE: ExecutionEnvironment.canUseDOM ? window : null, setHandleTopLevel: function (handleTopLevel) { ReactEventListener._handleTopLevel = handleTopLevel; }, setEnabled: function (enabled) { ReactEventListener._enabled = !!enabled; }, isEnabled: function () { return ReactEventListener._enabled; }, /** * Traps top-level events by using event bubbling. * * @param {string} topLevelType Record from `EventConstants`. * @param {string} handlerBaseName Event name (e.g. "click"). * @param {object} element Element on which to attach listener. * @return {?object} An object with a remove function which will forcefully * remove the listener. * @internal */ trapBubbledEvent: function (topLevelType, handlerBaseName, element) { if (!element) { return null; } return EventListener.listen(element, handlerBaseName, ReactEventListener.dispatchEvent.bind(null, topLevelType)); }, /** * Traps a top-level event by using event capturing. * * @param {string} topLevelType Record from `EventConstants`. * @param {string} handlerBaseName Event name (e.g. "click"). * @param {object} element Element on which to attach listener. * @return {?object} An object with a remove function which will forcefully * remove the listener. * @internal */ trapCapturedEvent: function (topLevelType, handlerBaseName, element) { if (!element) { return null; } return EventListener.capture(element, handlerBaseName, ReactEventListener.dispatchEvent.bind(null, topLevelType)); }, monitorScrollValue: function (refresh) { var callback = scrollValueMonitor.bind(null, refresh); EventListener.listen(window, 'scroll', callback); }, dispatchEvent: function (topLevelType, nativeEvent) { if (!ReactEventListener._enabled) { return; } var bookKeeping = TopLevelCallbackBookKeeping.getPooled(topLevelType, nativeEvent); try { // Event queue being processed in the same cycle allows // `preventDefault`. ReactUpdates.batchedUpdates(handleTopLevelImpl, bookKeeping); } finally { TopLevelCallbackBookKeeping.release(bookKeeping); } } }; module.exports = ReactEventListener; /***/ }, /* 134 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; /** * Copyright (c) 2013-present, Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @typechecks */ var emptyFunction = __webpack_require__(12); /** * Upstream version of event listener. Does not take into account specific * nature of platform. */ var EventListener = { /** * Listen to DOM events during the bubble phase. * * @param {DOMEventTarget} target DOM element to register listener on. * @param {string} eventType Event type, e.g. 'click' or 'mouseover'. * @param {function} callback Callback function. * @return {object} Object with a `remove` method. */ listen: function listen(target, eventType, callback) { if (target.addEventListener) { target.addEventListener(eventType, callback, false); return { remove: function remove() { target.removeEventListener(eventType, callback, false); } }; } else if (target.attachEvent) { target.attachEvent('on' + eventType, callback); return { remove: function remove() { target.detachEvent('on' + eventType, callback); } }; } }, /** * Listen to DOM events during the capture phase. * * @param {DOMEventTarget} target DOM element to register listener on. * @param {string} eventType Event type, e.g. 'click' or 'mouseover'. * @param {function} callback Callback function. * @return {object} Object with a `remove` method. */ capture: function capture(target, eventType, callback) { if (target.addEventListener) { target.addEventListener(eventType, callback, true); return { remove: function remove() { target.removeEventListener(eventType, callback, true); } }; } else { if (false) { console.error('Attempted to listen to events during the capture phase on a ' + 'browser that does not support the capture phase. Your application ' + 'will not receive some events.'); } return { remove: emptyFunction }; } }, registerDefault: function registerDefault() {} }; module.exports = EventListener; /***/ }, /* 135 */ /***/ function(module, exports) { /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @typechecks */ 'use strict'; /** * Gets the scroll position of the supplied element or window. * * The return values are unbounded, unlike `getScrollPosition`. This means they * may be negative or exceed the element boundaries (which is possible using * inertial scrolling). * * @param {DOMWindow|DOMElement} scrollable * @return {object} Map with `x` and `y` keys. */ function getUnboundedScrollPosition(scrollable) { if (scrollable === window) { return { x: window.pageXOffset || document.documentElement.scrollLeft, y: window.pageYOffset || document.documentElement.scrollTop }; } return { x: scrollable.scrollLeft, y: scrollable.scrollTop }; } module.exports = getUnboundedScrollPosition; /***/ }, /* 136 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var DOMProperty = __webpack_require__(33); var EventPluginHub = __webpack_require__(39); var EventPluginUtils = __webpack_require__(41); var ReactComponentEnvironment = __webpack_require__(107); var ReactEmptyComponent = __webpack_require__(116); var ReactBrowserEventEmitter = __webpack_require__(97); var ReactHostComponent = __webpack_require__(117); var ReactUpdates = __webpack_require__(53); var ReactInjection = { Component: ReactComponentEnvironment.injection, DOMProperty: DOMProperty.injection, EmptyComponent: ReactEmptyComponent.injection, EventPluginHub: EventPluginHub.injection, EventPluginUtils: EventPluginUtils.injection, EventEmitter: ReactBrowserEventEmitter.injection, HostComponent: ReactHostComponent.injection, Updates: ReactUpdates.injection }; module.exports = ReactInjection; /***/ }, /* 137 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _assign = __webpack_require__(4); var CallbackQueue = __webpack_require__(54); var PooledClass = __webpack_require__(47); var ReactBrowserEventEmitter = __webpack_require__(97); var ReactInputSelection = __webpack_require__(138); var ReactInstrumentation = __webpack_require__(59); var Transaction = __webpack_require__(60); var ReactUpdateQueue = __webpack_require__(127); /** * Ensures that, when possible, the selection range (currently selected text * input) is not disturbed by performing the transaction. */ var SELECTION_RESTORATION = { /** * @return {Selection} Selection information. */ initialize: ReactInputSelection.getSelectionInformation, /** * @param {Selection} sel Selection information returned from `initialize`. */ close: ReactInputSelection.restoreSelection }; /** * Suppresses events (blur/focus) that could be inadvertently dispatched due to * high level DOM manipulations (like temporarily removing a text input from the * DOM). */ var EVENT_SUPPRESSION = { /** * @return {boolean} The enabled status of `ReactBrowserEventEmitter` before * the reconciliation. */ initialize: function () { var currentlyEnabled = ReactBrowserEventEmitter.isEnabled(); ReactBrowserEventEmitter.setEnabled(false); return currentlyEnabled; }, /** * @param {boolean} previouslyEnabled Enabled status of * `ReactBrowserEventEmitter` before the reconciliation occurred. `close` * restores the previous value. */ close: function (previouslyEnabled) { ReactBrowserEventEmitter.setEnabled(previouslyEnabled); } }; /** * Provides a queue for collecting `componentDidMount` and * `componentDidUpdate` callbacks during the transaction. */ var ON_DOM_READY_QUEUEING = { /** * Initializes the internal `onDOMReady` queue. */ initialize: function () { this.reactMountReady.reset(); }, /** * After DOM is flushed, invoke all registered `onDOMReady` callbacks. */ close: function () { this.reactMountReady.notifyAll(); } }; /** * Executed within the scope of the `Transaction` instance. Consider these as * being member methods, but with an implied ordering while being isolated from * each other. */ var TRANSACTION_WRAPPERS = [SELECTION_RESTORATION, EVENT_SUPPRESSION, ON_DOM_READY_QUEUEING]; if (false) { TRANSACTION_WRAPPERS.push({ initialize: ReactInstrumentation.debugTool.onBeginFlush, close: ReactInstrumentation.debugTool.onEndFlush }); } /** * Currently: * - The order that these are listed in the transaction is critical: * - Suppresses events. * - Restores selection range. * * Future: * - Restore document/overflow scroll positions that were unintentionally * modified via DOM insertions above the top viewport boundary. * - Implement/integrate with customized constraint based layout system and keep * track of which dimensions must be remeasured. * * @class ReactReconcileTransaction */ function ReactReconcileTransaction(useCreateElement) { this.reinitializeTransaction(); // Only server-side rendering really needs this option (see // `ReactServerRendering`), but server-side uses // `ReactServerRenderingTransaction` instead. This option is here so that it's // accessible and defaults to false when `ReactDOMComponent` and // `ReactDOMTextComponent` checks it in `mountComponent`.` this.renderToStaticMarkup = false; this.reactMountReady = CallbackQueue.getPooled(null); this.useCreateElement = useCreateElement; } var Mixin = { /** * @see Transaction * @abstract * @final * @return {array<object>} List of operation wrap procedures. * TODO: convert to array<TransactionWrapper> */ getTransactionWrappers: function () { return TRANSACTION_WRAPPERS; }, /** * @return {object} The queue to collect `onDOMReady` callbacks with. */ getReactMountReady: function () { return this.reactMountReady; }, /** * @return {object} The queue to collect React async events. */ getUpdateQueue: function () { return ReactUpdateQueue; }, /** * Save current transaction state -- if the return value from this method is * passed to `rollback`, the transaction will be reset to that state. */ checkpoint: function () { // reactMountReady is the our only stateful wrapper return this.reactMountReady.checkpoint(); }, rollback: function (checkpoint) { this.reactMountReady.rollback(checkpoint); }, /** * `PooledClass` looks for this, and will invoke this before allowing this * instance to be reused. */ destructor: function () { CallbackQueue.release(this.reactMountReady); this.reactMountReady = null; } }; _assign(ReactReconcileTransaction.prototype, Transaction, Mixin); PooledClass.addPoolingTo(ReactReconcileTransaction); module.exports = ReactReconcileTransaction; /***/ }, /* 138 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ReactDOMSelection = __webpack_require__(139); var containsNode = __webpack_require__(141); var focusNode = __webpack_require__(86); var getActiveElement = __webpack_require__(144); function isInDocument(node) { return containsNode(document.documentElement, node); } /** * @ReactInputSelection: React input selection module. Based on Selection.js, * but modified to be suitable for react and has a couple of bug fixes (doesn't * assume buttons have range selections allowed). * Input selection module for React. */ var ReactInputSelection = { hasSelectionCapabilities: function (elem) { var nodeName = elem && elem.nodeName && elem.nodeName.toLowerCase(); return nodeName && (nodeName === 'input' && elem.type === 'text' || nodeName === 'textarea' || elem.contentEditable === 'true'); }, getSelectionInformation: function () { var focusedElem = getActiveElement(); return { focusedElem: focusedElem, selectionRange: ReactInputSelection.hasSelectionCapabilities(focusedElem) ? ReactInputSelection.getSelection(focusedElem) : null }; }, /** * @restoreSelection: If any selection information was potentially lost, * restore it. This is useful when performing operations that could remove dom * nodes and place them back in, resulting in focus being lost. */ restoreSelection: function (priorSelectionInformation) { var curFocusedElem = getActiveElement(); var priorFocusedElem = priorSelectionInformation.focusedElem; var priorSelectionRange = priorSelectionInformation.selectionRange; if (curFocusedElem !== priorFocusedElem && isInDocument(priorFocusedElem)) { if (ReactInputSelection.hasSelectionCapabilities(priorFocusedElem)) { ReactInputSelection.setSelection(priorFocusedElem, priorSelectionRange); } focusNode(priorFocusedElem); } }, /** * @getSelection: Gets the selection bounds of a focused textarea, input or * contentEditable node. * -@input: Look up selection bounds of this input * -@return {start: selectionStart, end: selectionEnd} */ getSelection: function (input) { var selection; if ('selectionStart' in input) { // Modern browser with input or textarea. selection = { start: input.selectionStart, end: input.selectionEnd }; } else if (document.selection && input.nodeName && input.nodeName.toLowerCase() === 'input') { // IE8 input. var range = document.selection.createRange(); // There can only be one selection per document in IE, so it must // be in our element. if (range.parentElement() === input) { selection = { start: -range.moveStart('character', -input.value.length), end: -range.moveEnd('character', -input.value.length) }; } } else { // Content editable or old IE textarea. selection = ReactDOMSelection.getOffsets(input); } return selection || { start: 0, end: 0 }; }, /** * @setSelection: Sets the selection bounds of a textarea or input and focuses * the input. * -@input Set selection bounds of this input or textarea * -@offsets Object of same form that is returned from get* */ setSelection: function (input, offsets) { var start = offsets.start; var end = offsets.end; if (end === undefined) { end = start; } if ('selectionStart' in input) { input.selectionStart = start; input.selectionEnd = Math.min(end, input.value.length); } else if (document.selection && input.nodeName && input.nodeName.toLowerCase() === 'input') { var range = input.createTextRange(); range.collapse(true); range.moveStart('character', start); range.moveEnd('character', end - start); range.select(); } else { ReactDOMSelection.setOffsets(input, offsets); } } }; module.exports = ReactInputSelection; /***/ }, /* 139 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ExecutionEnvironment = __webpack_require__(45); var getNodeForCharacterOffset = __webpack_require__(140); var getTextContentAccessor = __webpack_require__(48); /** * While `isCollapsed` is available on the Selection object and `collapsed` * is available on the Range object, IE11 sometimes gets them wrong. * If the anchor/focus nodes and offsets are the same, the range is collapsed. */ function isCollapsed(anchorNode, anchorOffset, focusNode, focusOffset) { return anchorNode === focusNode && anchorOffset === focusOffset; } /** * Get the appropriate anchor and focus node/offset pairs for IE. * * The catch here is that IE's selection API doesn't provide information * about whether the selection is forward or backward, so we have to * behave as though it's always forward. * * IE text differs from modern selection in that it behaves as though * block elements end with a new line. This means character offsets will * differ between the two APIs. * * @param {DOMElement} node * @return {object} */ function getIEOffsets(node) { var selection = document.selection; var selectedRange = selection.createRange(); var selectedLength = selectedRange.text.length; // Duplicate selection so we can move range without breaking user selection. var fromStart = selectedRange.duplicate(); fromStart.moveToElementText(node); fromStart.setEndPoint('EndToStart', selectedRange); var startOffset = fromStart.text.length; var endOffset = startOffset + selectedLength; return { start: startOffset, end: endOffset }; } /** * @param {DOMElement} node * @return {?object} */ function getModernOffsets(node) { var selection = window.getSelection && window.getSelection(); if (!selection || selection.rangeCount === 0) { return null; } var anchorNode = selection.anchorNode; var anchorOffset = selection.anchorOffset; var focusNode = selection.focusNode; var focusOffset = selection.focusOffset; var currentRange = selection.getRangeAt(0); // In Firefox, range.startContainer and range.endContainer can be "anonymous // divs", e.g. the up/down buttons on an <input type="number">. Anonymous // divs do not seem to expose properties, triggering a "Permission denied // error" if any of its properties are accessed. The only seemingly possible // way to avoid erroring is to access a property that typically works for // non-anonymous divs and catch any error that may otherwise arise. See // https://bugzilla.mozilla.org/show_bug.cgi?id=208427 try { /* eslint-disable no-unused-expressions */ currentRange.startContainer.nodeType; currentRange.endContainer.nodeType; /* eslint-enable no-unused-expressions */ } catch (e) { return null; } // If the node and offset values are the same, the selection is collapsed. // `Selection.isCollapsed` is available natively, but IE sometimes gets // this value wrong. var isSelectionCollapsed = isCollapsed(selection.anchorNode, selection.anchorOffset, selection.focusNode, selection.focusOffset); var rangeLength = isSelectionCollapsed ? 0 : currentRange.toString().length; var tempRange = currentRange.cloneRange(); tempRange.selectNodeContents(node); tempRange.setEnd(currentRange.startContainer, currentRange.startOffset); var isTempRangeCollapsed = isCollapsed(tempRange.startContainer, tempRange.startOffset, tempRange.endContainer, tempRange.endOffset); var start = isTempRangeCollapsed ? 0 : tempRange.toString().length; var end = start + rangeLength; // Detect whether the selection is backward. var detectionRange = document.createRange(); detectionRange.setStart(anchorNode, anchorOffset); detectionRange.setEnd(focusNode, focusOffset); var isBackward = detectionRange.collapsed; return { start: isBackward ? end : start, end: isBackward ? start : end }; } /** * @param {DOMElement|DOMTextNode} node * @param {object} offsets */ function setIEOffsets(node, offsets) { var range = document.selection.createRange().duplicate(); var start, end; if (offsets.end === undefined) { start = offsets.start; end = start; } else if (offsets.start > offsets.end) { start = offsets.end; end = offsets.start; } else { start = offsets.start; end = offsets.end; } range.moveToElementText(node); range.moveStart('character', start); range.setEndPoint('EndToStart', range); range.moveEnd('character', end - start); range.select(); } /** * In modern non-IE browsers, we can support both forward and backward * selections. * * Note: IE10+ supports the Selection object, but it does not support * the `extend` method, which means that even in modern IE, it's not possible * to programmatically create a backward selection. Thus, for all IE * versions, we use the old IE API to create our selections. * * @param {DOMElement|DOMTextNode} node * @param {object} offsets */ function setModernOffsets(node, offsets) { if (!window.getSelection) { return; } var selection = window.getSelection(); var length = node[getTextContentAccessor()].length; var start = Math.min(offsets.start, length); var end = offsets.end === undefined ? start : Math.min(offsets.end, length); // IE 11 uses modern selection, but doesn't support the extend method. // Flip backward selections, so we can set with a single range. if (!selection.extend && start > end) { var temp = end; end = start; start = temp; } var startMarker = getNodeForCharacterOffset(node, start); var endMarker = getNodeForCharacterOffset(node, end); if (startMarker && endMarker) { var range = document.createRange(); range.setStart(startMarker.node, startMarker.offset); selection.removeAllRanges(); if (start > end) { selection.addRange(range); selection.extend(endMarker.node, endMarker.offset); } else { range.setEnd(endMarker.node, endMarker.offset); selection.addRange(range); } } } var useIEOffsets = ExecutionEnvironment.canUseDOM && 'selection' in document && !('getSelection' in window); var ReactDOMSelection = { /** * @param {DOMElement} node */ getOffsets: useIEOffsets ? getIEOffsets : getModernOffsets, /** * @param {DOMElement|DOMTextNode} node * @param {object} offsets */ setOffsets: useIEOffsets ? setIEOffsets : setModernOffsets }; module.exports = ReactDOMSelection; /***/ }, /* 140 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; /** * Given any node return the first leaf node without children. * * @param {DOMElement|DOMTextNode} node * @return {DOMElement|DOMTextNode} */ function getLeafNode(node) { while (node && node.firstChild) { node = node.firstChild; } return node; } /** * Get the next sibling within a container. This will walk up the * DOM if a node's siblings have been exhausted. * * @param {DOMElement|DOMTextNode} node * @return {?DOMElement|DOMTextNode} */ function getSiblingNode(node) { while (node) { if (node.nextSibling) { return node.nextSibling; } node = node.parentNode; } } /** * Get object describing the nodes which contain characters at offset. * * @param {DOMElement|DOMTextNode} root * @param {number} offset * @return {?object} */ function getNodeForCharacterOffset(root, offset) { var node = getLeafNode(root); var nodeStart = 0; var nodeEnd = 0; while (node) { if (node.nodeType === 3) { nodeEnd = nodeStart + node.textContent.length; if (nodeStart <= offset && nodeEnd >= offset) { return { node: node, offset: offset - nodeStart }; } nodeStart = nodeEnd; } node = getLeafNode(getSiblingNode(node)); } } module.exports = getNodeForCharacterOffset; /***/ }, /* 141 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ var isTextNode = __webpack_require__(142); /*eslint-disable no-bitwise */ /** * Checks if a given DOM node contains or is another DOM node. */ function containsNode(outerNode, innerNode) { if (!outerNode || !innerNode) { return false; } else if (outerNode === innerNode) { return true; } else if (isTextNode(outerNode)) { return false; } else if (isTextNode(innerNode)) { return containsNode(outerNode, innerNode.parentNode); } else if ('contains' in outerNode) { return outerNode.contains(innerNode); } else if (outerNode.compareDocumentPosition) { return !!(outerNode.compareDocumentPosition(innerNode) & 16); } else { return false; } } module.exports = containsNode; /***/ }, /* 142 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @typechecks */ var isNode = __webpack_require__(143); /** * @param {*} object The object to check. * @return {boolean} Whether or not the object is a DOM text node. */ function isTextNode(object) { return isNode(object) && object.nodeType == 3; } module.exports = isTextNode; /***/ }, /* 143 */ /***/ function(module, exports) { 'use strict'; /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @typechecks */ /** * @param {*} object The object to check. * @return {boolean} Whether or not the object is a DOM node. */ function isNode(object) { return !!(object && (typeof Node === 'function' ? object instanceof Node : typeof object === 'object' && typeof object.nodeType === 'number' && typeof object.nodeName === 'string')); } module.exports = isNode; /***/ }, /* 144 */ /***/ function(module, exports) { 'use strict'; /** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @typechecks */ /* eslint-disable fb-www/typeof-undefined */ /** * Same as document.activeElement but wraps in a try-catch block. In IE it is * not safe to call document.activeElement if there is nothing focused. * * The activeElement will be null only if the document or document body is not * yet defined. */ function getActiveElement() /*?DOMElement*/{ if (typeof document === 'undefined') { return null; } try { return document.activeElement || document.body; } catch (e) { return document.body; } } module.exports = getActiveElement; /***/ }, /* 145 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var NS = { xlink: 'http://www.w3.org/1999/xlink', xml: 'http://www.w3.org/XML/1998/namespace' }; // We use attributes for everything SVG so let's avoid some duplication and run // code instead. // The following are all specified in the HTML config already so we exclude here. // - class (as className) // - color // - height // - id // - lang // - max // - media // - method // - min // - name // - style // - target // - type // - width var ATTRS = { accentHeight: 'accent-height', accumulate: 0, additive: 0, alignmentBaseline: 'alignment-baseline', allowReorder: 'allowReorder', alphabetic: 0, amplitude: 0, arabicForm: 'arabic-form', ascent: 0, attributeName: 'attributeName', attributeType: 'attributeType', autoReverse: 'autoReverse', azimuth: 0, baseFrequency: 'baseFrequency', baseProfile: 'baseProfile', baselineShift: 'baseline-shift', bbox: 0, begin: 0, bias: 0, by: 0, calcMode: 'calcMode', capHeight: 'cap-height', clip: 0, clipPath: 'clip-path', clipRule: 'clip-rule', clipPathUnits: 'clipPathUnits', colorInterpolation: 'color-interpolation', colorInterpolationFilters: 'color-interpolation-filters', colorProfile: 'color-profile', colorRendering: 'color-rendering', contentScriptType: 'contentScriptType', contentStyleType: 'contentStyleType', cursor: 0, cx: 0, cy: 0, d: 0, decelerate: 0, descent: 0, diffuseConstant: 'diffuseConstant', direction: 0, display: 0, divisor: 0, dominantBaseline: 'dominant-baseline', dur: 0, dx: 0, dy: 0, edgeMode: 'edgeMode', elevation: 0, enableBackground: 'enable-background', end: 0, exponent: 0, externalResourcesRequired: 'externalResourcesRequired', fill: 0, fillOpacity: 'fill-opacity', fillRule: 'fill-rule', filter: 0, filterRes: 'filterRes', filterUnits: 'filterUnits', floodColor: 'flood-color', floodOpacity: 'flood-opacity', focusable: 0, fontFamily: 'font-family', fontSize: 'font-size', fontSizeAdjust: 'font-size-adjust', fontStretch: 'font-stretch', fontStyle: 'font-style', fontVariant: 'font-variant', fontWeight: 'font-weight', format: 0, from: 0, fx: 0, fy: 0, g1: 0, g2: 0, glyphName: 'glyph-name', glyphOrientationHorizontal: 'glyph-orientation-horizontal', glyphOrientationVertical: 'glyph-orientation-vertical', glyphRef: 'glyphRef', gradientTransform: 'gradientTransform', gradientUnits: 'gradientUnits', hanging: 0, horizAdvX: 'horiz-adv-x', horizOriginX: 'horiz-origin-x', ideographic: 0, imageRendering: 'image-rendering', 'in': 0, in2: 0, intercept: 0, k: 0, k1: 0, k2: 0, k3: 0, k4: 0, kernelMatrix: 'kernelMatrix', kernelUnitLength: 'kernelUnitLength', kerning: 0, keyPoints: 'keyPoints', keySplines: 'keySplines', keyTimes: 'keyTimes', lengthAdjust: 'lengthAdjust', letterSpacing: 'letter-spacing', lightingColor: 'lighting-color', limitingConeAngle: 'limitingConeAngle', local: 0, markerEnd: 'marker-end', markerMid: 'marker-mid', markerStart: 'marker-start', markerHeight: 'markerHeight', markerUnits: 'markerUnits', markerWidth: 'markerWidth', mask: 0, maskContentUnits: 'maskContentUnits', maskUnits: 'maskUnits', mathematical: 0, mode: 0, numOctaves: 'numOctaves', offset: 0, opacity: 0, operator: 0, order: 0, orient: 0, orientation: 0, origin: 0, overflow: 0, overlinePosition: 'overline-position', overlineThickness: 'overline-thickness', paintOrder: 'paint-order', panose1: 'panose-1', pathLength: 'pathLength', patternContentUnits: 'patternContentUnits', patternTransform: 'patternTransform', patternUnits: 'patternUnits', pointerEvents: 'pointer-events', points: 0, pointsAtX: 'pointsAtX', pointsAtY: 'pointsAtY', pointsAtZ: 'pointsAtZ', preserveAlpha: 'preserveAlpha', preserveAspectRatio: 'preserveAspectRatio', primitiveUnits: 'primitiveUnits', r: 0, radius: 0, refX: 'refX', refY: 'refY', renderingIntent: 'rendering-intent', repeatCount: 'repeatCount', repeatDur: 'repeatDur', requiredExtensions: 'requiredExtensions', requiredFeatures: 'requiredFeatures', restart: 0, result: 0, rotate: 0, rx: 0, ry: 0, scale: 0, seed: 0, shapeRendering: 'shape-rendering', slope: 0, spacing: 0, specularConstant: 'specularConstant', specularExponent: 'specularExponent', speed: 0, spreadMethod: 'spreadMethod', startOffset: 'startOffset', stdDeviation: 'stdDeviation', stemh: 0, stemv: 0, stitchTiles: 'stitchTiles', stopColor: 'stop-color', stopOpacity: 'stop-opacity', strikethroughPosition: 'strikethrough-position', strikethroughThickness: 'strikethrough-thickness', string: 0, stroke: 0, strokeDasharray: 'stroke-dasharray', strokeDashoffset: 'stroke-dashoffset', strokeLinecap: 'stroke-linecap', strokeLinejoin: 'stroke-linejoin', strokeMiterlimit: 'stroke-miterlimit', strokeOpacity: 'stroke-opacity', strokeWidth: 'stroke-width', surfaceScale: 'surfaceScale', systemLanguage: 'systemLanguage', tableValues: 'tableValues', targetX: 'targetX', targetY: 'targetY', textAnchor: 'text-anchor', textDecoration: 'text-decoration', textRendering: 'text-rendering', textLength: 'textLength', to: 0, transform: 0, u1: 0, u2: 0, underlinePosition: 'underline-position', underlineThickness: 'underline-thickness', unicode: 0, unicodeBidi: 'unicode-bidi', unicodeRange: 'unicode-range', unitsPerEm: 'units-per-em', vAlphabetic: 'v-alphabetic', vHanging: 'v-hanging', vIdeographic: 'v-ideographic', vMathematical: 'v-mathematical', values: 0, vectorEffect: 'vector-effect', version: 0, vertAdvY: 'vert-adv-y', vertOriginX: 'vert-origin-x', vertOriginY: 'vert-origin-y', viewBox: 'viewBox', viewTarget: 'viewTarget', visibility: 0, widths: 0, wordSpacing: 'word-spacing', writingMode: 'writing-mode', x: 0, xHeight: 'x-height', x1: 0, x2: 0, xChannelSelector: 'xChannelSelector', xlinkActuate: 'xlink:actuate', xlinkArcrole: 'xlink:arcrole', xlinkHref: 'xlink:href', xlinkRole: 'xlink:role', xlinkShow: 'xlink:show', xlinkTitle: 'xlink:title', xlinkType: 'xlink:type', xmlBase: 'xml:base', xmlns: 0, xmlnsXlink: 'xmlns:xlink', xmlLang: 'xml:lang', xmlSpace: 'xml:space', y: 0, y1: 0, y2: 0, yChannelSelector: 'yChannelSelector', z: 0, zoomAndPan: 'zoomAndPan' }; var SVGDOMPropertyConfig = { Properties: {}, DOMAttributeNamespaces: { xlinkActuate: NS.xlink, xlinkArcrole: NS.xlink, xlinkHref: NS.xlink, xlinkRole: NS.xlink, xlinkShow: NS.xlink, xlinkTitle: NS.xlink, xlinkType: NS.xlink, xmlBase: NS.xml, xmlLang: NS.xml, xmlSpace: NS.xml }, DOMAttributeNames: {} }; Object.keys(ATTRS).forEach(function (key) { SVGDOMPropertyConfig.Properties[key] = 0; if (ATTRS[key]) { SVGDOMPropertyConfig.DOMAttributeNames[key] = ATTRS[key]; } }); module.exports = SVGDOMPropertyConfig; /***/ }, /* 146 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var EventPropagators = __webpack_require__(38); var ExecutionEnvironment = __webpack_require__(45); var ReactDOMComponentTree = __webpack_require__(31); var ReactInputSelection = __webpack_require__(138); var SyntheticEvent = __webpack_require__(50); var getActiveElement = __webpack_require__(144); var isTextInputElement = __webpack_require__(63); var shallowEqual = __webpack_require__(114); var skipSelectionChangeEvent = ExecutionEnvironment.canUseDOM && 'documentMode' in document && document.documentMode <= 11; var eventTypes = { select: { phasedRegistrationNames: { bubbled: 'onSelect', captured: 'onSelectCapture' }, dependencies: ['topBlur', 'topContextMenu', 'topFocus', 'topKeyDown', 'topKeyUp', 'topMouseDown', 'topMouseUp', 'topSelectionChange'] } }; var activeElement = null; var activeElementInst = null; var lastSelection = null; var mouseDown = false; // Track whether a listener exists for this plugin. If none exist, we do // not extract events. See #3639. var hasListener = false; /** * Get an object which is a unique representation of the current selection. * * The return value will not be consistent across nodes or browsers, but * two identical selections on the same node will return identical objects. * * @param {DOMElement} node * @return {object} */ function getSelection(node) { if ('selectionStart' in node && ReactInputSelection.hasSelectionCapabilities(node)) { return { start: node.selectionStart, end: node.selectionEnd }; } else if (window.getSelection) { var selection = window.getSelection(); return { anchorNode: selection.anchorNode, anchorOffset: selection.anchorOffset, focusNode: selection.focusNode, focusOffset: selection.focusOffset }; } else if (document.selection) { var range = document.selection.createRange(); return { parentElement: range.parentElement(), text: range.text, top: range.boundingTop, left: range.boundingLeft }; } } /** * Poll selection to see whether it's changed. * * @param {object} nativeEvent * @return {?SyntheticEvent} */ function constructSelectEvent(nativeEvent, nativeEventTarget) { // Ensure we have the right element, and that the user is not dragging a // selection (this matches native `select` event behavior). In HTML5, select // fires only on input and textarea thus if there's no focused element we // won't dispatch. if (mouseDown || activeElement == null || activeElement !== getActiveElement()) { return null; } // Only fire when selection has actually changed. var currentSelection = getSelection(activeElement); if (!lastSelection || !shallowEqual(lastSelection, currentSelection)) { lastSelection = currentSelection; var syntheticEvent = SyntheticEvent.getPooled(eventTypes.select, activeElementInst, nativeEvent, nativeEventTarget); syntheticEvent.type = 'select'; syntheticEvent.target = activeElement; EventPropagators.accumulateTwoPhaseDispatches(syntheticEvent); return syntheticEvent; } return null; } /** * This plugin creates an `onSelect` event that normalizes select events * across form elements. * * Supported elements are: * - input (see `isTextInputElement`) * - textarea * - contentEditable * * This differs from native browser implementations in the following ways: * - Fires on contentEditable fields as well as inputs. * - Fires for collapsed selection. * - Fires after user input. */ var SelectEventPlugin = { eventTypes: eventTypes, extractEvents: function (topLevelType, targetInst, nativeEvent, nativeEventTarget) { if (!hasListener) { return null; } var targetNode = targetInst ? ReactDOMComponentTree.getNodeFromInstance(targetInst) : window; switch (topLevelType) { // Track the input node that has focus. case 'topFocus': if (isTextInputElement(targetNode) || targetNode.contentEditable === 'true') { activeElement = targetNode; activeElementInst = targetInst; lastSelection = null; } break; case 'topBlur': activeElement = null; activeElementInst = null; lastSelection = null; break; // Don't fire the event while the user is dragging. This matches the // semantics of the native select event. case 'topMouseDown': mouseDown = true; break; case 'topContextMenu': case 'topMouseUp': mouseDown = false; return constructSelectEvent(nativeEvent, nativeEventTarget); // Chrome and IE fire non-standard event when selection is changed (and // sometimes when it hasn't). IE's event fires out of order with respect // to key and input events on deletion, so we discard it. // // Firefox doesn't support selectionchange, so check selection status // after each key entry. The selection changes after keydown and before // keyup, but we check on keydown as well in the case of holding down a // key, when multiple keydown events are fired but only one keyup is. // This is also our approach for IE handling, for the reason above. case 'topSelectionChange': if (skipSelectionChangeEvent) { break; } // falls through case 'topKeyDown': case 'topKeyUp': return constructSelectEvent(nativeEvent, nativeEventTarget); } return null; }, didPutListener: function (inst, registrationName, listener) { if (registrationName === 'onSelect') { hasListener = true; } } }; module.exports = SelectEventPlugin; /***/ }, /* 147 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var EventListener = __webpack_require__(134); var EventPropagators = __webpack_require__(38); var ReactDOMComponentTree = __webpack_require__(31); var SyntheticAnimationEvent = __webpack_require__(148); var SyntheticClipboardEvent = __webpack_require__(149); var SyntheticEvent = __webpack_require__(50); var SyntheticFocusEvent = __webpack_require__(150); var SyntheticKeyboardEvent = __webpack_require__(151); var SyntheticMouseEvent = __webpack_require__(66); var SyntheticDragEvent = __webpack_require__(154); var SyntheticTouchEvent = __webpack_require__(155); var SyntheticTransitionEvent = __webpack_require__(156); var SyntheticUIEvent = __webpack_require__(67); var SyntheticWheelEvent = __webpack_require__(157); var emptyFunction = __webpack_require__(12); var getEventCharCode = __webpack_require__(152); var invariant = __webpack_require__(8); /** * Turns * ['abort', ...] * into * eventTypes = { * 'abort': { * phasedRegistrationNames: { * bubbled: 'onAbort', * captured: 'onAbortCapture', * }, * dependencies: ['topAbort'], * }, * ... * }; * topLevelEventsToDispatchConfig = { * 'topAbort': { sameConfig } * }; */ var eventTypes = {}; var topLevelEventsToDispatchConfig = {}; ['abort', 'animationEnd', 'animationIteration', 'animationStart', 'blur', 'canPlay', 'canPlayThrough', 'click', 'contextMenu', 'copy', 'cut', 'doubleClick', 'drag', 'dragEnd', 'dragEnter', 'dragExit', 'dragLeave', 'dragOver', 'dragStart', 'drop', 'durationChange', 'emptied', 'encrypted', 'ended', 'error', 'focus', 'input', 'invalid', 'keyDown', 'keyPress', 'keyUp', 'load', 'loadedData', 'loadedMetadata', 'loadStart', 'mouseDown', 'mouseMove', 'mouseOut', 'mouseOver', 'mouseUp', 'paste', 'pause', 'play', 'playing', 'progress', 'rateChange', 'reset', 'scroll', 'seeked', 'seeking', 'stalled', 'submit', 'suspend', 'timeUpdate', 'touchCancel', 'touchEnd', 'touchMove', 'touchStart', 'transitionEnd', 'volumeChange', 'waiting', 'wheel'].forEach(function (event) { var capitalizedEvent = event[0].toUpperCase() + event.slice(1); var onEvent = 'on' + capitalizedEvent; var topEvent = 'top' + capitalizedEvent; var type = { phasedRegistrationNames: { bubbled: onEvent, captured: onEvent + 'Capture' }, dependencies: [topEvent] }; eventTypes[event] = type; topLevelEventsToDispatchConfig[topEvent] = type; }); var onClickListeners = {}; function getDictionaryKey(inst) { // Prevents V8 performance issue: // https://github.com/facebook/react/pull/7232 return '.' + inst._rootNodeID; } function isInteractive(tag) { return tag === 'button' || tag === 'input' || tag === 'select' || tag === 'textarea'; } var SimpleEventPlugin = { eventTypes: eventTypes, extractEvents: function (topLevelType, targetInst, nativeEvent, nativeEventTarget) { var dispatchConfig = topLevelEventsToDispatchConfig[topLevelType]; if (!dispatchConfig) { return null; } var EventConstructor; switch (topLevelType) { case 'topAbort': case 'topCanPlay': case 'topCanPlayThrough': case 'topDurationChange': case 'topEmptied': case 'topEncrypted': case 'topEnded': case 'topError': case 'topInput': case 'topInvalid': case 'topLoad': case 'topLoadedData': case 'topLoadedMetadata': case 'topLoadStart': case 'topPause': case 'topPlay': case 'topPlaying': case 'topProgress': case 'topRateChange': case 'topReset': case 'topSeeked': case 'topSeeking': case 'topStalled': case 'topSubmit': case 'topSuspend': case 'topTimeUpdate': case 'topVolumeChange': case 'topWaiting': // HTML Events // @see http://www.w3.org/TR/html5/index.html#events-0 EventConstructor = SyntheticEvent; break; case 'topKeyPress': // Firefox creates a keypress event for function keys too. This removes // the unwanted keypress events. Enter is however both printable and // non-printable. One would expect Tab to be as well (but it isn't). if (getEventCharCode(nativeEvent) === 0) { return null; } /* falls through */ case 'topKeyDown': case 'topKeyUp': EventConstructor = SyntheticKeyboardEvent; break; case 'topBlur': case 'topFocus': EventConstructor = SyntheticFocusEvent; break; case 'topClick': // Firefox creates a click event on right mouse clicks. This removes the // unwanted click events. if (nativeEvent.button === 2) { return null; } /* falls through */ case 'topDoubleClick': case 'topMouseDown': case 'topMouseMove': case 'topMouseUp': // TODO: Disabled elements should not respond to mouse events /* falls through */ case 'topMouseOut': case 'topMouseOver': case 'topContextMenu': EventConstructor = SyntheticMouseEvent; break; case 'topDrag': case 'topDragEnd': case 'topDragEnter': case 'topDragExit': case 'topDragLeave': case 'topDragOver': case 'topDragStart': case 'topDrop': EventConstructor = SyntheticDragEvent; break; case 'topTouchCancel': case 'topTouchEnd': case 'topTouchMove': case 'topTouchStart': EventConstructor = SyntheticTouchEvent; break; case 'topAnimationEnd': case 'topAnimationIteration': case 'topAnimationStart': EventConstructor = SyntheticAnimationEvent; break; case 'topTransitionEnd': EventConstructor = SyntheticTransitionEvent; break; case 'topScroll': EventConstructor = SyntheticUIEvent; break; case 'topWheel': EventConstructor = SyntheticWheelEvent; break; case 'topCopy': case 'topCut': case 'topPaste': EventConstructor = SyntheticClipboardEvent; break; } !EventConstructor ? false ? invariant(false, 'SimpleEventPlugin: Unhandled event type, `%s`.', topLevelType) : _prodInvariant('86', topLevelType) : void 0; var event = EventConstructor.getPooled(dispatchConfig, targetInst, nativeEvent, nativeEventTarget); EventPropagators.accumulateTwoPhaseDispatches(event); return event; }, didPutListener: function (inst, registrationName, listener) { // Mobile Safari does not fire properly bubble click events on // non-interactive elements, which means delegated click listeners do not // fire. The workaround for this bug involves attaching an empty click // listener on the target node. // http://www.quirksmode.org/blog/archives/2010/09/click_event_del.html if (registrationName === 'onClick' && !isInteractive(inst._tag)) { var key = getDictionaryKey(inst); var node = ReactDOMComponentTree.getNodeFromInstance(inst); if (!onClickListeners[key]) { onClickListeners[key] = EventListener.listen(node, 'click', emptyFunction); } } }, willDeleteListener: function (inst, registrationName) { if (registrationName === 'onClick' && !isInteractive(inst._tag)) { var key = getDictionaryKey(inst); onClickListeners[key].remove(); delete onClickListeners[key]; } } }; module.exports = SimpleEventPlugin; /***/ }, /* 148 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticEvent = __webpack_require__(50); /** * @interface Event * @see http://www.w3.org/TR/css3-animations/#AnimationEvent-interface * @see https://developer.mozilla.org/en-US/docs/Web/API/AnimationEvent */ var AnimationEventInterface = { animationName: null, elapsedTime: null, pseudoElement: null }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticEvent} */ function SyntheticAnimationEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticEvent.augmentClass(SyntheticAnimationEvent, AnimationEventInterface); module.exports = SyntheticAnimationEvent; /***/ }, /* 149 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticEvent = __webpack_require__(50); /** * @interface Event * @see http://www.w3.org/TR/clipboard-apis/ */ var ClipboardEventInterface = { clipboardData: function (event) { return 'clipboardData' in event ? event.clipboardData : window.clipboardData; } }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticUIEvent} */ function SyntheticClipboardEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticEvent.augmentClass(SyntheticClipboardEvent, ClipboardEventInterface); module.exports = SyntheticClipboardEvent; /***/ }, /* 150 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticUIEvent = __webpack_require__(67); /** * @interface FocusEvent * @see http://www.w3.org/TR/DOM-Level-3-Events/ */ var FocusEventInterface = { relatedTarget: null }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticUIEvent} */ function SyntheticFocusEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticUIEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticUIEvent.augmentClass(SyntheticFocusEvent, FocusEventInterface); module.exports = SyntheticFocusEvent; /***/ }, /* 151 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticUIEvent = __webpack_require__(67); var getEventCharCode = __webpack_require__(152); var getEventKey = __webpack_require__(153); var getEventModifierState = __webpack_require__(69); /** * @interface KeyboardEvent * @see http://www.w3.org/TR/DOM-Level-3-Events/ */ var KeyboardEventInterface = { key: getEventKey, location: null, ctrlKey: null, shiftKey: null, altKey: null, metaKey: null, repeat: null, locale: null, getModifierState: getEventModifierState, // Legacy Interface charCode: function (event) { // `charCode` is the result of a KeyPress event and represents the value of // the actual printable character. // KeyPress is deprecated, but its replacement is not yet final and not // implemented in any major browser. Only KeyPress has charCode. if (event.type === 'keypress') { return getEventCharCode(event); } return 0; }, keyCode: function (event) { // `keyCode` is the result of a KeyDown/Up event and represents the value of // physical keyboard key. // The actual meaning of the value depends on the users' keyboard layout // which cannot be detected. Assuming that it is a US keyboard layout // provides a surprisingly accurate mapping for US and European users. // Due to this, it is left to the user to implement at this time. if (event.type === 'keydown' || event.type === 'keyup') { return event.keyCode; } return 0; }, which: function (event) { // `which` is an alias for either `keyCode` or `charCode` depending on the // type of the event. if (event.type === 'keypress') { return getEventCharCode(event); } if (event.type === 'keydown' || event.type === 'keyup') { return event.keyCode; } return 0; } }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticUIEvent} */ function SyntheticKeyboardEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticUIEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticUIEvent.augmentClass(SyntheticKeyboardEvent, KeyboardEventInterface); module.exports = SyntheticKeyboardEvent; /***/ }, /* 152 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; /** * `charCode` represents the actual "character code" and is safe to use with * `String.fromCharCode`. As such, only keys that correspond to printable * characters produce a valid `charCode`, the only exception to this is Enter. * The Tab-key is considered non-printable and does not have a `charCode`, * presumably because it does not produce a tab-character in browsers. * * @param {object} nativeEvent Native browser event. * @return {number} Normalized `charCode` property. */ function getEventCharCode(nativeEvent) { var charCode; var keyCode = nativeEvent.keyCode; if ('charCode' in nativeEvent) { charCode = nativeEvent.charCode; // FF does not set `charCode` for the Enter-key, check against `keyCode`. if (charCode === 0 && keyCode === 13) { charCode = 13; } } else { // IE8 does not implement `charCode`, but `keyCode` has the correct value. charCode = keyCode; } // Some non-printable keys are reported in `charCode`/`keyCode`, discard them. // Must not discard the (non-)printable Enter-key. if (charCode >= 32 || charCode === 13) { return charCode; } return 0; } module.exports = getEventCharCode; /***/ }, /* 153 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var getEventCharCode = __webpack_require__(152); /** * Normalization of deprecated HTML5 `key` values * @see https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent#Key_names */ var normalizeKey = { 'Esc': 'Escape', 'Spacebar': ' ', 'Left': 'ArrowLeft', 'Up': 'ArrowUp', 'Right': 'ArrowRight', 'Down': 'ArrowDown', 'Del': 'Delete', 'Win': 'OS', 'Menu': 'ContextMenu', 'Apps': 'ContextMenu', 'Scroll': 'ScrollLock', 'MozPrintableKey': 'Unidentified' }; /** * Translation from legacy `keyCode` to HTML5 `key` * Only special keys supported, all others depend on keyboard layout or browser * @see https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent#Key_names */ var translateToKey = { 8: 'Backspace', 9: 'Tab', 12: 'Clear', 13: 'Enter', 16: 'Shift', 17: 'Control', 18: 'Alt', 19: 'Pause', 20: 'CapsLock', 27: 'Escape', 32: ' ', 33: 'PageUp', 34: 'PageDown', 35: 'End', 36: 'Home', 37: 'ArrowLeft', 38: 'ArrowUp', 39: 'ArrowRight', 40: 'ArrowDown', 45: 'Insert', 46: 'Delete', 112: 'F1', 113: 'F2', 114: 'F3', 115: 'F4', 116: 'F5', 117: 'F6', 118: 'F7', 119: 'F8', 120: 'F9', 121: 'F10', 122: 'F11', 123: 'F12', 144: 'NumLock', 145: 'ScrollLock', 224: 'Meta' }; /** * @param {object} nativeEvent Native browser event. * @return {string} Normalized `key` property. */ function getEventKey(nativeEvent) { if (nativeEvent.key) { // Normalize inconsistent values reported by browsers due to // implementations of a working draft specification. // FireFox implements `key` but returns `MozPrintableKey` for all // printable characters (normalized to `Unidentified`), ignore it. var key = normalizeKey[nativeEvent.key] || nativeEvent.key; if (key !== 'Unidentified') { return key; } } // Browser does not implement `key`, polyfill as much of it as we can. if (nativeEvent.type === 'keypress') { var charCode = getEventCharCode(nativeEvent); // The enter-key is technically both printable and non-printable and can // thus be captured by `keypress`, no other non-printable key should. return charCode === 13 ? 'Enter' : String.fromCharCode(charCode); } if (nativeEvent.type === 'keydown' || nativeEvent.type === 'keyup') { // While user keyboard layout determines the actual meaning of each // `keyCode` value, almost all function keys have a universal value. return translateToKey[nativeEvent.keyCode] || 'Unidentified'; } return ''; } module.exports = getEventKey; /***/ }, /* 154 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticMouseEvent = __webpack_require__(66); /** * @interface DragEvent * @see http://www.w3.org/TR/DOM-Level-3-Events/ */ var DragEventInterface = { dataTransfer: null }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticUIEvent} */ function SyntheticDragEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticMouseEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticMouseEvent.augmentClass(SyntheticDragEvent, DragEventInterface); module.exports = SyntheticDragEvent; /***/ }, /* 155 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticUIEvent = __webpack_require__(67); var getEventModifierState = __webpack_require__(69); /** * @interface TouchEvent * @see http://www.w3.org/TR/touch-events/ */ var TouchEventInterface = { touches: null, targetTouches: null, changedTouches: null, altKey: null, metaKey: null, ctrlKey: null, shiftKey: null, getModifierState: getEventModifierState }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticUIEvent} */ function SyntheticTouchEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticUIEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticUIEvent.augmentClass(SyntheticTouchEvent, TouchEventInterface); module.exports = SyntheticTouchEvent; /***/ }, /* 156 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticEvent = __webpack_require__(50); /** * @interface Event * @see http://www.w3.org/TR/2009/WD-css3-transitions-20090320/#transition-events- * @see https://developer.mozilla.org/en-US/docs/Web/API/TransitionEvent */ var TransitionEventInterface = { propertyName: null, elapsedTime: null, pseudoElement: null }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticEvent} */ function SyntheticTransitionEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticEvent.augmentClass(SyntheticTransitionEvent, TransitionEventInterface); module.exports = SyntheticTransitionEvent; /***/ }, /* 157 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var SyntheticMouseEvent = __webpack_require__(66); /** * @interface WheelEvent * @see http://www.w3.org/TR/DOM-Level-3-Events/ */ var WheelEventInterface = { deltaX: function (event) { return 'deltaX' in event ? event.deltaX : // Fallback to `wheelDeltaX` for Webkit and normalize (right is positive). 'wheelDeltaX' in event ? -event.wheelDeltaX : 0; }, deltaY: function (event) { return 'deltaY' in event ? event.deltaY : // Fallback to `wheelDeltaY` for Webkit and normalize (down is positive). 'wheelDeltaY' in event ? -event.wheelDeltaY : // Fallback to `wheelDelta` for IE<9 and normalize (down is positive). 'wheelDelta' in event ? -event.wheelDelta : 0; }, deltaZ: null, // Browsers without "deltaMode" is reporting in raw wheel delta where one // notch on the scroll is always +/- 120, roughly equivalent to pixels. // A good approximation of DOM_DELTA_LINE (1) is 5% of viewport size or // ~40 pixels, for DOM_DELTA_SCREEN (2) it is 87.5% of viewport size. deltaMode: null }; /** * @param {object} dispatchConfig Configuration used to dispatch this event. * @param {string} dispatchMarker Marker identifying the event target. * @param {object} nativeEvent Native browser event. * @extends {SyntheticMouseEvent} */ function SyntheticWheelEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) { return SyntheticMouseEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget); } SyntheticMouseEvent.augmentClass(SyntheticWheelEvent, WheelEventInterface); module.exports = SyntheticWheelEvent; /***/ }, /* 158 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var DOMLazyTree = __webpack_require__(73); var DOMProperty = __webpack_require__(33); var React = __webpack_require__(3); var ReactBrowserEventEmitter = __webpack_require__(97); var ReactCurrentOwner = __webpack_require__(10); var ReactDOMComponentTree = __webpack_require__(31); var ReactDOMContainerInfo = __webpack_require__(159); var ReactDOMFeatureFlags = __webpack_require__(160); var ReactFeatureFlags = __webpack_require__(55); var ReactInstanceMap = __webpack_require__(108); var ReactInstrumentation = __webpack_require__(59); var ReactMarkupChecksum = __webpack_require__(161); var ReactReconciler = __webpack_require__(56); var ReactUpdateQueue = __webpack_require__(127); var ReactUpdates = __webpack_require__(53); var emptyObject = __webpack_require__(20); var instantiateReactComponent = __webpack_require__(111); var invariant = __webpack_require__(8); var setInnerHTML = __webpack_require__(75); var shouldUpdateReactComponent = __webpack_require__(115); var warning = __webpack_require__(11); var ATTR_NAME = DOMProperty.ID_ATTRIBUTE_NAME; var ROOT_ATTR_NAME = DOMProperty.ROOT_ATTRIBUTE_NAME; var ELEMENT_NODE_TYPE = 1; var DOC_NODE_TYPE = 9; var DOCUMENT_FRAGMENT_NODE_TYPE = 11; var instancesByReactRootID = {}; /** * Finds the index of the first character * that's not common between the two given strings. * * @return {number} the index of the character where the strings diverge */ function firstDifferenceIndex(string1, string2) { var minLen = Math.min(string1.length, string2.length); for (var i = 0; i < minLen; i++) { if (string1.charAt(i) !== string2.charAt(i)) { return i; } } return string1.length === string2.length ? -1 : minLen; } /** * @param {DOMElement|DOMDocument} container DOM element that may contain * a React component * @return {?*} DOM element that may have the reactRoot ID, or null. */ function getReactRootElementInContainer(container) { if (!container) { return null; } if (container.nodeType === DOC_NODE_TYPE) { return container.documentElement; } else { return container.firstChild; } } function internalGetID(node) { // If node is something like a window, document, or text node, none of // which support attributes or a .getAttribute method, gracefully return // the empty string, as if the attribute were missing. return node.getAttribute && node.getAttribute(ATTR_NAME) || ''; } /** * Mounts this component and inserts it into the DOM. * * @param {ReactComponent} componentInstance The instance to mount. * @param {DOMElement} container DOM element to mount into. * @param {ReactReconcileTransaction} transaction * @param {boolean} shouldReuseMarkup If true, do not insert markup */ function mountComponentIntoNode(wrapperInstance, container, transaction, shouldReuseMarkup, context) { var markerName; if (ReactFeatureFlags.logTopLevelRenders) { var wrappedElement = wrapperInstance._currentElement.props.child; var type = wrappedElement.type; markerName = 'React mount: ' + (typeof type === 'string' ? type : type.displayName || type.name); console.time(markerName); } var markup = ReactReconciler.mountComponent(wrapperInstance, transaction, null, ReactDOMContainerInfo(wrapperInstance, container), context, 0 /* parentDebugID */ ); if (markerName) { console.timeEnd(markerName); } wrapperInstance._renderedComponent._topLevelWrapper = wrapperInstance; ReactMount._mountImageIntoNode(markup, container, wrapperInstance, shouldReuseMarkup, transaction); } /** * Batched mount. * * @param {ReactComponent} componentInstance The instance to mount. * @param {DOMElement} container DOM element to mount into. * @param {boolean} shouldReuseMarkup If true, do not insert markup */ function batchedMountComponentIntoNode(componentInstance, container, shouldReuseMarkup, context) { var transaction = ReactUpdates.ReactReconcileTransaction.getPooled( /* useCreateElement */ !shouldReuseMarkup && ReactDOMFeatureFlags.useCreateElement); transaction.perform(mountComponentIntoNode, null, componentInstance, container, transaction, shouldReuseMarkup, context); ReactUpdates.ReactReconcileTransaction.release(transaction); } /** * Unmounts a component and removes it from the DOM. * * @param {ReactComponent} instance React component instance. * @param {DOMElement} container DOM element to unmount from. * @final * @internal * @see {ReactMount.unmountComponentAtNode} */ function unmountComponentFromNode(instance, container, safely) { if (false) { ReactInstrumentation.debugTool.onBeginFlush(); } ReactReconciler.unmountComponent(instance, safely); if (false) { ReactInstrumentation.debugTool.onEndFlush(); } if (container.nodeType === DOC_NODE_TYPE) { container = container.documentElement; } // http://jsperf.com/emptying-a-node while (container.lastChild) { container.removeChild(container.lastChild); } } /** * True if the supplied DOM node has a direct React-rendered child that is * not a React root element. Useful for warning in `render`, * `unmountComponentAtNode`, etc. * * @param {?DOMElement} node The candidate DOM node. * @return {boolean} True if the DOM element contains a direct child that was * rendered by React but is not a root element. * @internal */ function hasNonRootReactChild(container) { var rootEl = getReactRootElementInContainer(container); if (rootEl) { var inst = ReactDOMComponentTree.getInstanceFromNode(rootEl); return !!(inst && inst._hostParent); } } /** * True if the supplied DOM node is a React DOM element and * it has been rendered by another copy of React. * * @param {?DOMElement} node The candidate DOM node. * @return {boolean} True if the DOM has been rendered by another copy of React * @internal */ function nodeIsRenderedByOtherInstance(container) { var rootEl = getReactRootElementInContainer(container); return !!(rootEl && isReactNode(rootEl) && !ReactDOMComponentTree.getInstanceFromNode(rootEl)); } /** * True if the supplied DOM node is a valid node element. * * @param {?DOMElement} node The candidate DOM node. * @return {boolean} True if the DOM is a valid DOM node. * @internal */ function isValidContainer(node) { return !!(node && (node.nodeType === ELEMENT_NODE_TYPE || node.nodeType === DOC_NODE_TYPE || node.nodeType === DOCUMENT_FRAGMENT_NODE_TYPE)); } /** * True if the supplied DOM node is a valid React node element. * * @param {?DOMElement} node The candidate DOM node. * @return {boolean} True if the DOM is a valid React DOM node. * @internal */ function isReactNode(node) { return isValidContainer(node) && (node.hasAttribute(ROOT_ATTR_NAME) || node.hasAttribute(ATTR_NAME)); } function getHostRootInstanceInContainer(container) { var rootEl = getReactRootElementInContainer(container); var prevHostInstance = rootEl && ReactDOMComponentTree.getInstanceFromNode(rootEl); return prevHostInstance && !prevHostInstance._hostParent ? prevHostInstance : null; } function getTopLevelWrapperInContainer(container) { var root = getHostRootInstanceInContainer(container); return root ? root._hostContainerInfo._topLevelWrapper : null; } /** * Temporary (?) hack so that we can store all top-level pending updates on * composites instead of having to worry about different types of components * here. */ var topLevelRootCounter = 1; var TopLevelWrapper = function () { this.rootID = topLevelRootCounter++; }; TopLevelWrapper.prototype.isReactComponent = {}; if (false) { TopLevelWrapper.displayName = 'TopLevelWrapper'; } TopLevelWrapper.prototype.render = function () { return this.props.child; }; TopLevelWrapper.isReactTopLevelWrapper = true; /** * Mounting is the process of initializing a React component by creating its * representative DOM elements and inserting them into a supplied `container`. * Any prior content inside `container` is destroyed in the process. * * ReactMount.render( * component, * document.getElementById('container') * ); * * <div id="container"> <-- Supplied `container`. * <div data-reactid=".3"> <-- Rendered reactRoot of React * // ... component. * </div> * </div> * * Inside of `container`, the first element rendered is the "reactRoot". */ var ReactMount = { TopLevelWrapper: TopLevelWrapper, /** * Used by devtools. The keys are not important. */ _instancesByReactRootID: instancesByReactRootID, /** * This is a hook provided to support rendering React components while * ensuring that the apparent scroll position of its `container` does not * change. * * @param {DOMElement} container The `container` being rendered into. * @param {function} renderCallback This must be called once to do the render. */ scrollMonitor: function (container, renderCallback) { renderCallback(); }, /** * Take a component that's already mounted into the DOM and replace its props * @param {ReactComponent} prevComponent component instance already in the DOM * @param {ReactElement} nextElement component instance to render * @param {DOMElement} container container to render into * @param {?function} callback function triggered on completion */ _updateRootComponent: function (prevComponent, nextElement, nextContext, container, callback) { ReactMount.scrollMonitor(container, function () { ReactUpdateQueue.enqueueElementInternal(prevComponent, nextElement, nextContext); if (callback) { ReactUpdateQueue.enqueueCallbackInternal(prevComponent, callback); } }); return prevComponent; }, /** * Render a new component into the DOM. Hooked by hooks! * * @param {ReactElement} nextElement element to render * @param {DOMElement} container container to render into * @param {boolean} shouldReuseMarkup if we should skip the markup insertion * @return {ReactComponent} nextComponent */ _renderNewRootComponent: function (nextElement, container, shouldReuseMarkup, context) { // Various parts of our code (such as ReactCompositeComponent's // _renderValidatedComponent) assume that calls to render aren't nested; // verify that that's the case. false ? warning(ReactCurrentOwner.current == null, '_renderNewRootComponent(): Render methods should be a pure function ' + 'of props and state; triggering nested component updates from ' + 'render is not allowed. If necessary, trigger nested updates in ' + 'componentDidUpdate. Check the render method of %s.', ReactCurrentOwner.current && ReactCurrentOwner.current.getName() || 'ReactCompositeComponent') : void 0; !isValidContainer(container) ? false ? invariant(false, '_registerComponent(...): Target container is not a DOM element.') : _prodInvariant('37') : void 0; ReactBrowserEventEmitter.ensureScrollValueMonitoring(); var componentInstance = instantiateReactComponent(nextElement, false); // The initial render is synchronous but any updates that happen during // rendering, in componentWillMount or componentDidMount, will be batched // according to the current batching strategy. ReactUpdates.batchedUpdates(batchedMountComponentIntoNode, componentInstance, container, shouldReuseMarkup, context); var wrapperID = componentInstance._instance.rootID; instancesByReactRootID[wrapperID] = componentInstance; return componentInstance; }, /** * Renders a React component into the DOM in the supplied `container`. * * If the React component was previously rendered into `container`, this will * perform an update on it and only mutate the DOM as necessary to reflect the * latest React component. * * @param {ReactComponent} parentComponent The conceptual parent of this render tree. * @param {ReactElement} nextElement Component element to render. * @param {DOMElement} container DOM element to render into. * @param {?function} callback function triggered on completion * @return {ReactComponent} Component instance rendered in `container`. */ renderSubtreeIntoContainer: function (parentComponent, nextElement, container, callback) { !(parentComponent != null && ReactInstanceMap.has(parentComponent)) ? false ? invariant(false, 'parentComponent must be a valid React Component') : _prodInvariant('38') : void 0; return ReactMount._renderSubtreeIntoContainer(parentComponent, nextElement, container, callback); }, _renderSubtreeIntoContainer: function (parentComponent, nextElement, container, callback) { ReactUpdateQueue.validateCallback(callback, 'ReactDOM.render'); !React.isValidElement(nextElement) ? false ? invariant(false, 'ReactDOM.render(): Invalid component element.%s', typeof nextElement === 'string' ? ' Instead of passing a string like \'div\', pass ' + 'React.createElement(\'div\') or <div />.' : typeof nextElement === 'function' ? ' Instead of passing a class like Foo, pass ' + 'React.createElement(Foo) or <Foo />.' : // Check if it quacks like an element nextElement != null && nextElement.props !== undefined ? ' This may be caused by unintentionally loading two independent ' + 'copies of React.' : '') : _prodInvariant('39', typeof nextElement === 'string' ? ' Instead of passing a string like \'div\', pass ' + 'React.createElement(\'div\') or <div />.' : typeof nextElement === 'function' ? ' Instead of passing a class like Foo, pass ' + 'React.createElement(Foo) or <Foo />.' : nextElement != null && nextElement.props !== undefined ? ' This may be caused by unintentionally loading two independent ' + 'copies of React.' : '') : void 0; false ? warning(!container || !container.tagName || container.tagName.toUpperCase() !== 'BODY', 'render(): Rendering components directly into document.body is ' + 'discouraged, since its children are often manipulated by third-party ' + 'scripts and browser extensions. This may lead to subtle ' + 'reconciliation issues. Try rendering into a container element created ' + 'for your app.') : void 0; var nextWrappedElement = React.createElement(TopLevelWrapper, { child: nextElement }); var nextContext; if (parentComponent) { var parentInst = ReactInstanceMap.get(parentComponent); nextContext = parentInst._processChildContext(parentInst._context); } else { nextContext = emptyObject; } var prevComponent = getTopLevelWrapperInContainer(container); if (prevComponent) { var prevWrappedElement = prevComponent._currentElement; var prevElement = prevWrappedElement.props.child; if (shouldUpdateReactComponent(prevElement, nextElement)) { var publicInst = prevComponent._renderedComponent.getPublicInstance(); var updatedCallback = callback && function () { callback.call(publicInst); }; ReactMount._updateRootComponent(prevComponent, nextWrappedElement, nextContext, container, updatedCallback); return publicInst; } else { ReactMount.unmountComponentAtNode(container); } } var reactRootElement = getReactRootElementInContainer(container); var containerHasReactMarkup = reactRootElement && !!internalGetID(reactRootElement); var containerHasNonRootReactChild = hasNonRootReactChild(container); if (false) { process.env.NODE_ENV !== 'production' ? warning(!containerHasNonRootReactChild, 'render(...): Replacing React-rendered children with a new root ' + 'component. If you intended to update the children of this node, ' + 'you should instead have the existing children update their state ' + 'and render the new components instead of calling ReactDOM.render.') : void 0; if (!containerHasReactMarkup || reactRootElement.nextSibling) { var rootElementSibling = reactRootElement; while (rootElementSibling) { if (internalGetID(rootElementSibling)) { process.env.NODE_ENV !== 'production' ? warning(false, 'render(): Target node has markup rendered by React, but there ' + 'are unrelated nodes as well. This is most commonly caused by ' + 'white-space inserted around server-rendered markup.') : void 0; break; } rootElementSibling = rootElementSibling.nextSibling; } } } var shouldReuseMarkup = containerHasReactMarkup && !prevComponent && !containerHasNonRootReactChild; var component = ReactMount._renderNewRootComponent(nextWrappedElement, container, shouldReuseMarkup, nextContext)._renderedComponent.getPublicInstance(); if (callback) { callback.call(component); } return component; }, /** * Renders a React component into the DOM in the supplied `container`. * See https://facebook.github.io/react/docs/top-level-api.html#reactdom.render * * If the React component was previously rendered into `container`, this will * perform an update on it and only mutate the DOM as necessary to reflect the * latest React component. * * @param {ReactElement} nextElement Component element to render. * @param {DOMElement} container DOM element to render into. * @param {?function} callback function triggered on completion * @return {ReactComponent} Component instance rendered in `container`. */ render: function (nextElement, container, callback) { return ReactMount._renderSubtreeIntoContainer(null, nextElement, container, callback); }, /** * Unmounts and destroys the React component rendered in the `container`. * See https://facebook.github.io/react/docs/top-level-api.html#reactdom.unmountcomponentatnode * * @param {DOMElement} container DOM element containing a React component. * @return {boolean} True if a component was found in and unmounted from * `container` */ unmountComponentAtNode: function (container) { // Various parts of our code (such as ReactCompositeComponent's // _renderValidatedComponent) assume that calls to render aren't nested; // verify that that's the case. (Strictly speaking, unmounting won't cause a // render but we still don't expect to be in a render call here.) false ? warning(ReactCurrentOwner.current == null, 'unmountComponentAtNode(): Render methods should be a pure function ' + 'of props and state; triggering nested component updates from render ' + 'is not allowed. If necessary, trigger nested updates in ' + 'componentDidUpdate. Check the render method of %s.', ReactCurrentOwner.current && ReactCurrentOwner.current.getName() || 'ReactCompositeComponent') : void 0; !isValidContainer(container) ? false ? invariant(false, 'unmountComponentAtNode(...): Target container is not a DOM element.') : _prodInvariant('40') : void 0; if (false) { process.env.NODE_ENV !== 'production' ? warning(!nodeIsRenderedByOtherInstance(container), 'unmountComponentAtNode(): The node you\'re attempting to unmount ' + 'was rendered by another copy of React.') : void 0; } var prevComponent = getTopLevelWrapperInContainer(container); if (!prevComponent) { // Check if the node being unmounted was rendered by React, but isn't a // root node. var containerHasNonRootReactChild = hasNonRootReactChild(container); // Check if the container itself is a React root node. var isContainerReactRoot = container.nodeType === 1 && container.hasAttribute(ROOT_ATTR_NAME); if (false) { process.env.NODE_ENV !== 'production' ? warning(!containerHasNonRootReactChild, 'unmountComponentAtNode(): The node you\'re attempting to unmount ' + 'was rendered by React and is not a top-level container. %s', isContainerReactRoot ? 'You may have accidentally passed in a React root node instead ' + 'of its container.' : 'Instead, have the parent component update its state and ' + 'rerender in order to remove this component.') : void 0; } return false; } delete instancesByReactRootID[prevComponent._instance.rootID]; ReactUpdates.batchedUpdates(unmountComponentFromNode, prevComponent, container, false); return true; }, _mountImageIntoNode: function (markup, container, instance, shouldReuseMarkup, transaction) { !isValidContainer(container) ? false ? invariant(false, 'mountComponentIntoNode(...): Target container is not valid.') : _prodInvariant('41') : void 0; if (shouldReuseMarkup) { var rootElement = getReactRootElementInContainer(container); if (ReactMarkupChecksum.canReuseMarkup(markup, rootElement)) { ReactDOMComponentTree.precacheNode(instance, rootElement); return; } else { var checksum = rootElement.getAttribute(ReactMarkupChecksum.CHECKSUM_ATTR_NAME); rootElement.removeAttribute(ReactMarkupChecksum.CHECKSUM_ATTR_NAME); var rootMarkup = rootElement.outerHTML; rootElement.setAttribute(ReactMarkupChecksum.CHECKSUM_ATTR_NAME, checksum); var normalizedMarkup = markup; if (false) { // because rootMarkup is retrieved from the DOM, various normalizations // will have occurred which will not be present in `markup`. Here, // insert markup into a <div> or <iframe> depending on the container // type to perform the same normalizations before comparing. var normalizer; if (container.nodeType === ELEMENT_NODE_TYPE) { normalizer = document.createElement('div'); normalizer.innerHTML = markup; normalizedMarkup = normalizer.innerHTML; } else { normalizer = document.createElement('iframe'); document.body.appendChild(normalizer); normalizer.contentDocument.write(markup); normalizedMarkup = normalizer.contentDocument.documentElement.outerHTML; document.body.removeChild(normalizer); } } var diffIndex = firstDifferenceIndex(normalizedMarkup, rootMarkup); var difference = ' (client) ' + normalizedMarkup.substring(diffIndex - 20, diffIndex + 20) + '\n (server) ' + rootMarkup.substring(diffIndex - 20, diffIndex + 20); !(container.nodeType !== DOC_NODE_TYPE) ? false ? invariant(false, 'You\'re trying to render a component to the document using server rendering but the checksum was invalid. This usually means you rendered a different component type or props on the client from the one on the server, or your render() methods are impure. React cannot handle this case due to cross-browser quirks by rendering at the document root. You should look for environment dependent code in your components and ensure the props are the same client and server side:\n%s', difference) : _prodInvariant('42', difference) : void 0; if (false) { process.env.NODE_ENV !== 'production' ? warning(false, 'React attempted to reuse markup in a container but the ' + 'checksum was invalid. This generally means that you are ' + 'using server rendering and the markup generated on the ' + 'server was not what the client was expecting. React injected ' + 'new markup to compensate which works but you have lost many ' + 'of the benefits of server rendering. Instead, figure out ' + 'why the markup being generated is different on the client ' + 'or server:\n%s', difference) : void 0; } } } !(container.nodeType !== DOC_NODE_TYPE) ? false ? invariant(false, 'You\'re trying to render a component to the document but you didn\'t use server rendering. We can\'t do this without using server rendering due to cross-browser quirks. See ReactDOMServer.renderToString() for server rendering.') : _prodInvariant('43') : void 0; if (transaction.useCreateElement) { while (container.lastChild) { container.removeChild(container.lastChild); } DOMLazyTree.insertTreeBefore(container, markup, null); } else { setInnerHTML(container, markup); ReactDOMComponentTree.precacheNode(instance, container.firstChild); } if (false) { var hostNode = ReactDOMComponentTree.getInstanceFromNode(container.firstChild); if (hostNode._debugID !== 0) { ReactInstrumentation.debugTool.onHostOperation({ instanceID: hostNode._debugID, type: 'mount', payload: markup.toString() }); } } } }; module.exports = ReactMount; /***/ }, /* 159 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var validateDOMNesting = __webpack_require__(128); var DOC_NODE_TYPE = 9; function ReactDOMContainerInfo(topLevelWrapper, node) { var info = { _topLevelWrapper: topLevelWrapper, _idCounter: 1, _ownerDocument: node ? node.nodeType === DOC_NODE_TYPE ? node : node.ownerDocument : null, _node: node, _tag: node ? node.nodeName.toLowerCase() : null, _namespaceURI: node ? node.namespaceURI : null }; if (false) { info._ancestorInfo = node ? validateDOMNesting.updatedAncestorInfo(null, info._tag, null) : null; } return info; } module.exports = ReactDOMContainerInfo; /***/ }, /* 160 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ReactDOMFeatureFlags = { useCreateElement: true, useFiber: false }; module.exports = ReactDOMFeatureFlags; /***/ }, /* 161 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var adler32 = __webpack_require__(162); var TAG_END = /\/?>/; var COMMENT_START = /^<\!\-\-/; var ReactMarkupChecksum = { CHECKSUM_ATTR_NAME: 'data-react-checksum', /** * @param {string} markup Markup string * @return {string} Markup string with checksum attribute attached */ addChecksumToMarkup: function (markup) { var checksum = adler32(markup); // Add checksum (handle both parent tags, comments and self-closing tags) if (COMMENT_START.test(markup)) { return markup; } else { return markup.replace(TAG_END, ' ' + ReactMarkupChecksum.CHECKSUM_ATTR_NAME + '="' + checksum + '"$&'); } }, /** * @param {string} markup to use * @param {DOMElement} element root React element * @returns {boolean} whether or not the markup is the same */ canReuseMarkup: function (markup, element) { var existingChecksum = element.getAttribute(ReactMarkupChecksum.CHECKSUM_ATTR_NAME); existingChecksum = existingChecksum && parseInt(existingChecksum, 10); var markupChecksum = adler32(markup); return markupChecksum === existingChecksum; } }; module.exports = ReactMarkupChecksum; /***/ }, /* 162 */ /***/ function(module, exports) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var MOD = 65521; // adler32 is not cryptographically strong, and is only used to sanity check that // markup generated on the server matches the markup generated on the client. // This implementation (a modified version of the SheetJS version) has been optimized // for our use case, at the expense of conforming to the adler32 specification // for non-ascii inputs. function adler32(data) { var a = 1; var b = 0; var i = 0; var l = data.length; var m = l & ~0x3; while (i < m) { var n = Math.min(i + 4096, m); for (; i < n; i += 4) { b += (a += data.charCodeAt(i)) + (a += data.charCodeAt(i + 1)) + (a += data.charCodeAt(i + 2)) + (a += data.charCodeAt(i + 3)); } a %= MOD; b %= MOD; } for (; i < l; i++) { b += a += data.charCodeAt(i); } a %= MOD; b %= MOD; return a | b << 16; } module.exports = adler32; /***/ }, /* 163 */ 27, /* 164 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var _prodInvariant = __webpack_require__(32); var ReactCurrentOwner = __webpack_require__(10); var ReactDOMComponentTree = __webpack_require__(31); var ReactInstanceMap = __webpack_require__(108); var getHostComponentFromComposite = __webpack_require__(165); var invariant = __webpack_require__(8); var warning = __webpack_require__(11); /** * Returns the DOM node rendered by this element. * * See https://facebook.github.io/react/docs/top-level-api.html#reactdom.finddomnode * * @param {ReactComponent|DOMElement} componentOrElement * @return {?DOMElement} The root node of this element. */ function findDOMNode(componentOrElement) { if (false) { var owner = ReactCurrentOwner.current; if (owner !== null) { process.env.NODE_ENV !== 'production' ? warning(owner._warnedAboutRefsInRender, '%s is accessing findDOMNode inside its render(). ' + 'render() should be a pure function of props and state. It should ' + 'never access something that requires stale data from the previous ' + 'render, such as refs. Move this logic to componentDidMount and ' + 'componentDidUpdate instead.', owner.getName() || 'A component') : void 0; owner._warnedAboutRefsInRender = true; } } if (componentOrElement == null) { return null; } if (componentOrElement.nodeType === 1) { return componentOrElement; } var inst = ReactInstanceMap.get(componentOrElement); if (inst) { inst = getHostComponentFromComposite(inst); return inst ? ReactDOMComponentTree.getNodeFromInstance(inst) : null; } if (typeof componentOrElement.render === 'function') { true ? false ? invariant(false, 'findDOMNode was called on an unmounted component.') : _prodInvariant('44') : void 0; } else { true ? false ? invariant(false, 'Element appears to be neither ReactComponent nor DOMNode (keys: %s)', Object.keys(componentOrElement)) : _prodInvariant('45', Object.keys(componentOrElement)) : void 0; } } module.exports = findDOMNode; /***/ }, /* 165 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ReactNodeTypes = __webpack_require__(113); function getHostComponentFromComposite(inst) { var type; while ((type = inst._renderedNodeType) === ReactNodeTypes.COMPOSITE) { inst = inst._renderedComponent; } if (type === ReactNodeTypes.HOST) { return inst._renderedComponent; } else if (type === ReactNodeTypes.EMPTY) { return null; } } module.exports = getHostComponentFromComposite; /***/ }, /* 166 */ /***/ function(module, exports, __webpack_require__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ 'use strict'; var ReactMount = __webpack_require__(158); module.exports = ReactMount.renderSubtreeIntoContainer; /***/ }, /* 167 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _example_components = __webpack_require__(168); var _example_components2 = _interopRequireDefault(_example_components); var _hero_example = __webpack_require__(638); var _hero_example2 = _interopRequireDefault(_hero_example); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'Root', render: function render() { return _react2.default.createElement( 'div', null, _react2.default.createElement( 'div', { className: 'hero' }, _react2.default.createElement( 'div', { className: 'hero__content' }, _react2.default.createElement( 'h1', { className: 'hero__title' }, 'ReactJS Datepicker' ), _react2.default.createElement( 'div', { className: 'hero__crafted-by' }, _react2.default.createElement( 'a', { href: 'https://hackerone.com', className: 'hero__crafted-by-link' }, 'Crafted by ', _react2.default.createElement('img', { src: 'images/logo.png', className: 'hero__image', alt: 'HackerOne', title: 'HackerOne' }) ) ), _react2.default.createElement( 'div', { className: 'hero__example' }, _react2.default.createElement(_hero_example2.default, null) ) ) ), _react2.default.createElement( 'div', { className: 'wrapper' }, _react2.default.createElement( 'h1', null, 'ReactJS Datepicker' ), _react2.default.createElement( 'p', null, _react2.default.createElement( 'a', { href: 'https://npmjs.org/package/react-datepicker' }, _react2.default.createElement('img', { src: 'https://badge.fury.io/js/react-datepicker.svg', className: 'badge' }) ), _react2.default.createElement( 'a', { href: 'https://travis-ci.org/Hacker0x01/react-datepicker' }, _react2.default.createElement('img', { src: 'https://travis-ci.org/Hacker0x01/react-datepicker.svg?branch=master', className: 'badge' }) ), _react2.default.createElement( 'a', { href: 'https://david-dm.org/Hacker0x01/react-datepicker' }, _react2.default.createElement('img', { src: 'https://david-dm.org/Hacker0x01/react-datepicker.svg', className: 'badge' }) ), _react2.default.createElement( 'a', { href: 'https://npmjs.org/package/react-datepicker' + '?__hstc=72727564.ca821b01b5b29b1831f0936a681f0483.1428679773810.1435582678273.1438354735499.5' + '&__hssc=72727564.1.1438354735499' + '&__hsfp=2497064007' }, _react2.default.createElement('img', { src: 'https://img.shields.io/npm/dm/react-datepicker.svg', className: 'badge' }) ) ), _react2.default.createElement( 'p', null, 'A simple and reusable datepicker component for React.' ), _react2.default.createElement( 'h2', null, 'Installation' ), _react2.default.createElement( 'p', null, 'The package can be installed via NPM:' ), _react2.default.createElement( 'p', null, _react2.default.createElement( 'code', null, 'npm install react-datepicker --save' ) ) ), _react2.default.createElement( 'div', { className: 'wrapper' }, _react2.default.createElement(_example_components2.default, null) ), _react2.default.createElement( 'a', { href: 'https://github.com/Hacker0x01/react-datepicker/' }, _react2.default.createElement('img', { className: 'github-ribbon', src: 'images/ribbon.png', alt: 'Fork me on GitHub' }) ) ); } }); /***/ }, /* 168 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _highlight = __webpack_require__(169); var _highlight2 = _interopRequireDefault(_highlight); var _default = __webpack_require__(340); var _default2 = _interopRequireDefault(_default); var _code_example_component = __webpack_require__(603); var _code_example_component2 = _interopRequireDefault(_code_example_component); var _custom_date_format = __webpack_require__(604); var _custom_date_format2 = _interopRequireDefault(_custom_date_format); var _custom_class_name = __webpack_require__(605); var _custom_class_name2 = _interopRequireDefault(_custom_class_name); var _custom_calendar_class_name = __webpack_require__(606); var _custom_calendar_class_name2 = _interopRequireDefault(_custom_calendar_class_name); var _placeholder_text = __webpack_require__(607); var _placeholder_text2 = _interopRequireDefault(_placeholder_text); var _specific_date_range = __webpack_require__(608); var _specific_date_range2 = _interopRequireDefault(_specific_date_range); var _locale = __webpack_require__(609); var _locale2 = _interopRequireDefault(_locale); var _exclude_dates = __webpack_require__(610); var _exclude_dates2 = _interopRequireDefault(_exclude_dates); var _highlight_dates = __webpack_require__(611); var _highlight_dates2 = _interopRequireDefault(_highlight_dates); var _include_dates = __webpack_require__(612); var _include_dates2 = _interopRequireDefault(_include_dates); var _filter_dates = __webpack_require__(613); var _filter_dates2 = _interopRequireDefault(_filter_dates); var _disabled = __webpack_require__(614); var _disabled2 = _interopRequireDefault(_disabled); var _disabled_keyboard_navigation = __webpack_require__(615); var _disabled_keyboard_navigation2 = _interopRequireDefault(_disabled_keyboard_navigation); var _clear_input = __webpack_require__(616); var _clear_input2 = _interopRequireDefault(_clear_input); var _on_blur_callbacks = __webpack_require__(617); var _on_blur_callbacks2 = _interopRequireDefault(_on_blur_callbacks); var _placement = __webpack_require__(618); var _placement2 = _interopRequireDefault(_placement); var _date_range = __webpack_require__(619); var _date_range2 = _interopRequireDefault(_date_range); var _tab_index = __webpack_require__(620); var _tab_index2 = _interopRequireDefault(_tab_index); var _year_dropdown = __webpack_require__(621); var _year_dropdown2 = _interopRequireDefault(_year_dropdown); var _month_dropdown = __webpack_require__(622); var _month_dropdown2 = _interopRequireDefault(_month_dropdown); var _year_select_dropdown = __webpack_require__(623); var _year_select_dropdown2 = _interopRequireDefault(_year_select_dropdown); var _today = __webpack_require__(624); var _today2 = _interopRequireDefault(_today); var _timezone_date = __webpack_require__(625); var _timezone_date2 = _interopRequireDefault(_timezone_date); var _inline = __webpack_require__(626); var _inline2 = _interopRequireDefault(_inline); var _open_to_date = __webpack_require__(627); var _open_to_date2 = _interopRequireDefault(_open_to_date); var _fixed_calendar = __webpack_require__(628); var _fixed_calendar2 = _interopRequireDefault(_fixed_calendar); var _week_numbers = __webpack_require__(629); var _week_numbers2 = _interopRequireDefault(_week_numbers); var _custom_input = __webpack_require__(630); var _custom_input2 = _interopRequireDefault(_custom_input); var _multi_month = __webpack_require__(631); var _multi_month2 = _interopRequireDefault(_multi_month); var _multi_month_drp = __webpack_require__(632); var _multi_month_drp2 = _interopRequireDefault(_multi_month_drp); var _children = __webpack_require__(633); var _children2 = _interopRequireDefault(_children); var _portal = __webpack_require__(634); var _portal2 = _interopRequireDefault(_portal); var _raw_change = __webpack_require__(635); var _raw_change2 = _interopRequireDefault(_raw_change); __webpack_require__(636); __webpack_require__(637); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'exampleComponents', componentDidMount: function componentDidMount() { _highlight2.default.initHighlightingOnLoad(); }, examples: [{ title: 'Default', component: _react2.default.createElement(_default2.default, null) }, { title: 'Custom date format', component: _react2.default.createElement(_custom_date_format2.default, null) }, { title: 'Custom class name', component: _react2.default.createElement(_custom_class_name2.default, null) }, { title: 'Custom calendar class name', component: _react2.default.createElement(_custom_calendar_class_name2.default, null) }, { title: 'Today button', component: _react2.default.createElement(_today2.default, null) }, { title: 'TimeZone date', component: _react2.default.createElement(_timezone_date2.default, null) }, { title: 'Placeholder text', component: _react2.default.createElement(_placeholder_text2.default, null) }, { title: 'Specific date range', component: _react2.default.createElement(_specific_date_range2.default, null) }, { title: 'Locale', component: _react2.default.createElement(_locale2.default, null) }, { title: 'Exclude dates', component: _react2.default.createElement(_exclude_dates2.default, null) }, { title: 'Highlight dates', component: _react2.default.createElement(_highlight_dates2.default, null) }, { title: 'Include dates', component: _react2.default.createElement(_include_dates2.default, null) }, { title: 'Filter dates', component: _react2.default.createElement(_filter_dates2.default, null) }, { title: 'Date Range', component: _react2.default.createElement(_date_range2.default, null) }, { title: 'Disable datepicker', component: _react2.default.createElement(_disabled2.default, null) }, { title: 'Disable keyboard navigation', component: _react2.default.createElement(_disabled_keyboard_navigation2.default, null) }, { title: 'Clear datepicker input', component: _react2.default.createElement(_clear_input2.default, null) }, { title: 'onBlur callbacks in console', component: _react2.default.createElement(_on_blur_callbacks2.default, null) }, { title: 'Configure Popover Placement', component: _react2.default.createElement(_placement2.default, null) }, { title: 'Portal version', component: _react2.default.createElement(_portal2.default, null) }, { title: 'TabIndex', component: _react2.default.createElement(_tab_index2.default, null) }, { title: 'Year dropdown', component: _react2.default.createElement(_year_dropdown2.default, null) }, { title: 'Month dropdown', component: _react2.default.createElement(_month_dropdown2.default, null) }, { title: 'Year select dropdown', component: _react2.default.createElement(_year_select_dropdown2.default, null) }, { title: 'Inline version', component: _react2.default.createElement(_inline2.default, null) }, { title: 'Open to date', component: _react2.default.createElement(_open_to_date2.default, null) }, { title: 'Fixed height of Calendar', component: _react2.default.createElement(_fixed_calendar2.default, null) }, { title: 'Display Week Numbers', component: _react2.default.createElement(_week_numbers2.default, null) }, { title: 'Custom input', component: _react2.default.createElement(_custom_input2.default, null) }, { title: 'Multiple months', component: _react2.default.createElement(_multi_month2.default, null) }, { title: 'Multiple months with year dropdown', component: _react2.default.createElement(_multi_month_drp2.default, null) }, { title: 'Children', component: _react2.default.createElement(_children2.default, null) }, { title: 'Get raw input value on change', component: _react2.default.createElement(_raw_change2.default, null) }], renderExamples: function renderExamples() { return this.examples.map(function (example, index) { return _react2.default.createElement( _code_example_component2.default, { key: 'example-' + index, id: index, title: example.title }, example.component ); }); }, renderLeftColumn: function renderLeftColumn() { return this.examples.map(function (example, index) { return _react2.default.createElement( 'li', { className: 'examples__navigation-item', key: 'link-' + index }, _react2.default.createElement( 'a', { href: '#example-' + index }, example.title ) ); }); }, render: function render() { return _react2.default.createElement( 'div', null, _react2.default.createElement( 'h1', null, 'Examples' ), _react2.default.createElement( 'ul', { className: 'examples__navigation' }, this.renderLeftColumn() ), _react2.default.createElement( 'div', { className: 'examples' }, this.renderExamples() ) ); } }); /***/ }, /* 169 */ /***/ function(module, exports, __webpack_require__) { var hljs = __webpack_require__(170); hljs.registerLanguage('1c', __webpack_require__(171)); hljs.registerLanguage('abnf', __webpack_require__(172)); hljs.registerLanguage('accesslog', __webpack_require__(173)); hljs.registerLanguage('actionscript', __webpack_require__(174)); hljs.registerLanguage('ada', __webpack_require__(175)); hljs.registerLanguage('apache', __webpack_require__(176)); hljs.registerLanguage('applescript', __webpack_require__(177)); hljs.registerLanguage('cpp', __webpack_require__(178)); hljs.registerLanguage('arduino', __webpack_require__(179)); hljs.registerLanguage('armasm', __webpack_require__(180)); hljs.registerLanguage('xml', __webpack_require__(181)); hljs.registerLanguage('asciidoc', __webpack_require__(182)); hljs.registerLanguage('aspectj', __webpack_require__(183)); hljs.registerLanguage('autohotkey', __webpack_require__(184)); hljs.registerLanguage('autoit', __webpack_require__(185)); hljs.registerLanguage('avrasm', __webpack_require__(186)); hljs.registerLanguage('awk', __webpack_require__(187)); hljs.registerLanguage('axapta', __webpack_require__(188)); hljs.registerLanguage('bash', __webpack_require__(189)); hljs.registerLanguage('basic', __webpack_require__(190)); hljs.registerLanguage('bnf', __webpack_require__(191)); hljs.registerLanguage('brainfuck', __webpack_require__(192)); hljs.registerLanguage('cal', __webpack_require__(193)); hljs.registerLanguage('capnproto', __webpack_require__(194)); hljs.registerLanguage('ceylon', __webpack_require__(195)); hljs.registerLanguage('clean', __webpack_require__(196)); hljs.registerLanguage('clojure', __webpack_require__(197)); hljs.registerLanguage('clojure-repl', __webpack_require__(198)); hljs.registerLanguage('cmake', __webpack_require__(199)); hljs.registerLanguage('coffeescript', __webpack_require__(200)); hljs.registerLanguage('coq', __webpack_require__(201)); hljs.registerLanguage('cos', __webpack_require__(202)); hljs.registerLanguage('crmsh', __webpack_require__(203)); hljs.registerLanguage('crystal', __webpack_require__(204)); hljs.registerLanguage('cs', __webpack_require__(205)); hljs.registerLanguage('csp', __webpack_require__(206)); hljs.registerLanguage('css', __webpack_require__(207)); hljs.registerLanguage('d', __webpack_require__(208)); hljs.registerLanguage('markdown', __webpack_require__(209)); hljs.registerLanguage('dart', __webpack_require__(210)); hljs.registerLanguage('delphi', __webpack_require__(211)); hljs.registerLanguage('diff', __webpack_require__(212)); hljs.registerLanguage('django', __webpack_require__(213)); hljs.registerLanguage('dns', __webpack_require__(214)); hljs.registerLanguage('dockerfile', __webpack_require__(215)); hljs.registerLanguage('dos', __webpack_require__(216)); hljs.registerLanguage('dsconfig', __webpack_require__(217)); hljs.registerLanguage('dts', __webpack_require__(218)); hljs.registerLanguage('dust', __webpack_require__(219)); hljs.registerLanguage('ebnf', __webpack_require__(220)); hljs.registerLanguage('elixir', __webpack_require__(221)); hljs.registerLanguage('elm', __webpack_require__(222)); hljs.registerLanguage('ruby', __webpack_require__(223)); hljs.registerLanguage('erb', __webpack_require__(224)); hljs.registerLanguage('erlang-repl', __webpack_require__(225)); hljs.registerLanguage('erlang', __webpack_require__(226)); hljs.registerLanguage('excel', __webpack_require__(227)); hljs.registerLanguage('fix', __webpack_require__(228)); hljs.registerLanguage('flix', __webpack_require__(229)); hljs.registerLanguage('fortran', __webpack_require__(230)); hljs.registerLanguage('fsharp', __webpack_require__(231)); hljs.registerLanguage('gams', __webpack_require__(232)); hljs.registerLanguage('gauss', __webpack_require__(233)); hljs.registerLanguage('gcode', __webpack_require__(234)); hljs.registerLanguage('gherkin', __webpack_require__(235)); hljs.registerLanguage('glsl', __webpack_require__(236)); hljs.registerLanguage('go', __webpack_require__(237)); hljs.registerLanguage('golo', __webpack_require__(238)); hljs.registerLanguage('gradle', __webpack_require__(239)); hljs.registerLanguage('groovy', __webpack_require__(240)); hljs.registerLanguage('haml', __webpack_require__(241)); hljs.registerLanguage('handlebars', __webpack_require__(242)); hljs.registerLanguage('haskell', __webpack_require__(243)); hljs.registerLanguage('haxe', __webpack_require__(244)); hljs.registerLanguage('hsp', __webpack_require__(245)); hljs.registerLanguage('htmlbars', __webpack_require__(246)); hljs.registerLanguage('http', __webpack_require__(247)); hljs.registerLanguage('inform7', __webpack_require__(248)); hljs.registerLanguage('ini', __webpack_require__(249)); hljs.registerLanguage('irpf90', __webpack_require__(250)); hljs.registerLanguage('java', __webpack_require__(251)); hljs.registerLanguage('javascript', __webpack_require__(252)); hljs.registerLanguage('json', __webpack_require__(253)); hljs.registerLanguage('julia', __webpack_require__(254)); hljs.registerLanguage('kotlin', __webpack_require__(255)); hljs.registerLanguage('lasso', __webpack_require__(256)); hljs.registerLanguage('ldif', __webpack_require__(257)); hljs.registerLanguage('less', __webpack_require__(258)); hljs.registerLanguage('lisp', __webpack_require__(259)); hljs.registerLanguage('livecodeserver', __webpack_require__(260)); hljs.registerLanguage('livescript', __webpack_require__(261)); hljs.registerLanguage('llvm', __webpack_require__(262)); hljs.registerLanguage('lsl', __webpack_require__(263)); hljs.registerLanguage('lua', __webpack_require__(264)); hljs.registerLanguage('makefile', __webpack_require__(265)); hljs.registerLanguage('mathematica', __webpack_require__(266)); hljs.registerLanguage('matlab', __webpack_require__(267)); hljs.registerLanguage('maxima', __webpack_require__(268)); hljs.registerLanguage('mel', __webpack_require__(269)); hljs.registerLanguage('mercury', __webpack_require__(270)); hljs.registerLanguage('mipsasm', __webpack_require__(271)); hljs.registerLanguage('mizar', __webpack_require__(272)); hljs.registerLanguage('perl', __webpack_require__(273)); hljs.registerLanguage('mojolicious', __webpack_require__(274)); hljs.registerLanguage('monkey', __webpack_require__(275)); hljs.registerLanguage('moonscript', __webpack_require__(276)); hljs.registerLanguage('nginx', __webpack_require__(277)); hljs.registerLanguage('nimrod', __webpack_require__(278)); hljs.registerLanguage('nix', __webpack_require__(279)); hljs.registerLanguage('nsis', __webpack_require__(280)); hljs.registerLanguage('objectivec', __webpack_require__(281)); hljs.registerLanguage('ocaml', __webpack_require__(282)); hljs.registerLanguage('openscad', __webpack_require__(283)); hljs.registerLanguage('oxygene', __webpack_require__(284)); hljs.registerLanguage('parser3', __webpack_require__(285)); hljs.registerLanguage('pf', __webpack_require__(286)); hljs.registerLanguage('php', __webpack_require__(287)); hljs.registerLanguage('pony', __webpack_require__(288)); hljs.registerLanguage('powershell', __webpack_require__(289)); hljs.registerLanguage('processing', __webpack_require__(290)); hljs.registerLanguage('profile', __webpack_require__(291)); hljs.registerLanguage('prolog', __webpack_require__(292)); hljs.registerLanguage('protobuf', __webpack_require__(293)); hljs.registerLanguage('puppet', __webpack_require__(294)); hljs.registerLanguage('purebasic', __webpack_require__(295)); hljs.registerLanguage('python', __webpack_require__(296)); hljs.registerLanguage('q', __webpack_require__(297)); hljs.registerLanguage('qml', __webpack_require__(298)); hljs.registerLanguage('r', __webpack_require__(299)); hljs.registerLanguage('rib', __webpack_require__(300)); hljs.registerLanguage('roboconf', __webpack_require__(301)); hljs.registerLanguage('rsl', __webpack_require__(302)); hljs.registerLanguage('ruleslanguage', __webpack_require__(303)); hljs.registerLanguage('rust', __webpack_require__(304)); hljs.registerLanguage('scala', __webpack_require__(305)); hljs.registerLanguage('scheme', __webpack_require__(306)); hljs.registerLanguage('scilab', __webpack_require__(307)); hljs.registerLanguage('scss', __webpack_require__(308)); hljs.registerLanguage('smali', __webpack_require__(309)); hljs.registerLanguage('smalltalk', __webpack_require__(310)); hljs.registerLanguage('sml', __webpack_require__(311)); hljs.registerLanguage('sqf', __webpack_require__(312)); hljs.registerLanguage('sql', __webpack_require__(313)); hljs.registerLanguage('stan', __webpack_require__(314)); hljs.registerLanguage('stata', __webpack_require__(315)); hljs.registerLanguage('step21', __webpack_require__(316)); hljs.registerLanguage('stylus', __webpack_require__(317)); hljs.registerLanguage('subunit', __webpack_require__(318)); hljs.registerLanguage('swift', __webpack_require__(319)); hljs.registerLanguage('taggerscript', __webpack_require__(320)); hljs.registerLanguage('yaml', __webpack_require__(321)); hljs.registerLanguage('tap', __webpack_require__(322)); hljs.registerLanguage('tcl', __webpack_require__(323)); hljs.registerLanguage('tex', __webpack_require__(324)); hljs.registerLanguage('thrift', __webpack_require__(325)); hljs.registerLanguage('tp', __webpack_require__(326)); hljs.registerLanguage('twig', __webpack_require__(327)); hljs.registerLanguage('typescript', __webpack_require__(328)); hljs.registerLanguage('vala', __webpack_require__(329)); hljs.registerLanguage('vbnet', __webpack_require__(330)); hljs.registerLanguage('vbscript', __webpack_require__(331)); hljs.registerLanguage('vbscript-html', __webpack_require__(332)); hljs.registerLanguage('verilog', __webpack_require__(333)); hljs.registerLanguage('vhdl', __webpack_require__(334)); hljs.registerLanguage('vim', __webpack_require__(335)); hljs.registerLanguage('x86asm', __webpack_require__(336)); hljs.registerLanguage('xl', __webpack_require__(337)); hljs.registerLanguage('xquery', __webpack_require__(338)); hljs.registerLanguage('zephir', __webpack_require__(339)); module.exports = hljs; /***/ }, /* 170 */ /***/ function(module, exports, __webpack_require__) { /* Syntax highlighting with language autodetection. https://highlightjs.org/ */ (function(factory) { // Find the global object for export to both the browser and web workers. var globalObject = typeof window === 'object' && window || typeof self === 'object' && self; // Setup highlight.js for different environments. First is Node.js or // CommonJS. if(true) { factory(exports); } else if(globalObject) { // Export hljs globally even when using AMD for cases when this script // is loaded with others that may still expect a global hljs. globalObject.hljs = factory({}); // Finally register the global hljs with AMD. if(typeof define === 'function' && define.amd) { define([], function() { return globalObject.hljs; }); } } }(function(hljs) { // Convenience variables for build-in objects var ArrayProto = [], objectKeys = Object.keys; // Global internal variables used within the highlight.js library. var languages = {}, aliases = {}; // Regular expressions used throughout the highlight.js library. var noHighlightRe = /^(no-?highlight|plain|text)$/i, languagePrefixRe = /\blang(?:uage)?-([\w-]+)\b/i, fixMarkupRe = /((^(<[^>]+>|\t|)+|(?:\n)))/gm; var spanEndTag = '</span>'; // Global options used when within external APIs. This is modified when // calling the `hljs.configure` function. var options = { classPrefix: 'hljs-', tabReplace: null, useBR: false, languages: undefined }; // Object map that is used to escape some common HTML characters. var escapeRegexMap = { '&': '&amp;', '<': '&lt;', '>': '&gt;' }; /* Utility functions */ function escape(value) { return value.replace(/[&<>]/gm, function(character) { return escapeRegexMap[character]; }); } function tag(node) { return node.nodeName.toLowerCase(); } function testRe(re, lexeme) { var match = re && re.exec(lexeme); return match && match.index === 0; } function isNotHighlighted(language) { return noHighlightRe.test(language); } function blockLanguage(block) { var i, match, length, _class; var classes = block.className + ' '; classes += block.parentNode ? block.parentNode.className : ''; // language-* takes precedence over non-prefixed class names. match = languagePrefixRe.exec(classes); if (match) { return getLanguage(match[1]) ? match[1] : 'no-highlight'; } classes = classes.split(/\s+/); for (i = 0, length = classes.length; i < length; i++) { _class = classes[i] if (isNotHighlighted(_class) || getLanguage(_class)) { return _class; } } } function inherit(parent, obj) { var key; var result = {}; for (key in parent) result[key] = parent[key]; if (obj) for (key in obj) result[key] = obj[key]; return result; } /* Stream merging */ function nodeStream(node) { var result = []; (function _nodeStream(node, offset) { for (var child = node.firstChild; child; child = child.nextSibling) { if (child.nodeType === 3) offset += child.nodeValue.length; else if (child.nodeType === 1) { result.push({ event: 'start', offset: offset, node: child }); offset = _nodeStream(child, offset); // Prevent void elements from having an end tag that would actually // double them in the output. There are more void elements in HTML // but we list only those realistically expected in code display. if (!tag(child).match(/br|hr|img|input/)) { result.push({ event: 'stop', offset: offset, node: child }); } } } return offset; })(node, 0); return result; } function mergeStreams(original, highlighted, value) { var processed = 0; var result = ''; var nodeStack = []; function selectStream() { if (!original.length || !highlighted.length) { return original.length ? original : highlighted; } if (original[0].offset !== highlighted[0].offset) { return (original[0].offset < highlighted[0].offset) ? original : highlighted; } /* To avoid starting the stream just before it should stop the order is ensured that original always starts first and closes last: if (event1 == 'start' && event2 == 'start') return original; if (event1 == 'start' && event2 == 'stop') return highlighted; if (event1 == 'stop' && event2 == 'start') return original; if (event1 == 'stop' && event2 == 'stop') return highlighted; ... which is collapsed to: */ return highlighted[0].event === 'start' ? original : highlighted; } function open(node) { function attr_str(a) {return ' ' + a.nodeName + '="' + escape(a.value) + '"';} result += '<' + tag(node) + ArrayProto.map.call(node.attributes, attr_str).join('') + '>'; } function close(node) { result += '</' + tag(node) + '>'; } function render(event) { (event.event === 'start' ? open : close)(event.node); } while (original.length || highlighted.length) { var stream = selectStream(); result += escape(value.substring(processed, stream[0].offset)); processed = stream[0].offset; if (stream === original) { /* On any opening or closing tag of the original markup we first close the entire highlighted node stack, then render the original tag along with all the following original tags at the same offset and then reopen all the tags on the highlighted stack. */ nodeStack.reverse().forEach(close); do { render(stream.splice(0, 1)[0]); stream = selectStream(); } while (stream === original && stream.length && stream[0].offset === processed); nodeStack.reverse().forEach(open); } else { if (stream[0].event === 'start') { nodeStack.push(stream[0].node); } else { nodeStack.pop(); } render(stream.splice(0, 1)[0]); } } return result + escape(value.substr(processed)); } /* Initialization */ function compileLanguage(language) { function reStr(re) { return (re && re.source) || re; } function langRe(value, global) { return new RegExp( reStr(value), 'm' + (language.case_insensitive ? 'i' : '') + (global ? 'g' : '') ); } function compileMode(mode, parent) { if (mode.compiled) return; mode.compiled = true; mode.keywords = mode.keywords || mode.beginKeywords; if (mode.keywords) { var compiled_keywords = {}; var flatten = function(className, str) { if (language.case_insensitive) { str = str.toLowerCase(); } str.split(' ').forEach(function(kw) { var pair = kw.split('|'); compiled_keywords[pair[0]] = [className, pair[1] ? Number(pair[1]) : 1]; }); }; if (typeof mode.keywords === 'string') { // string flatten('keyword', mode.keywords); } else { objectKeys(mode.keywords).forEach(function (className) { flatten(className, mode.keywords[className]); }); } mode.keywords = compiled_keywords; } mode.lexemesRe = langRe(mode.lexemes || /\w+/, true); if (parent) { if (mode.beginKeywords) { mode.begin = '\\b(' + mode.beginKeywords.split(' ').join('|') + ')\\b'; } if (!mode.begin) mode.begin = /\B|\b/; mode.beginRe = langRe(mode.begin); if (!mode.end && !mode.endsWithParent) mode.end = /\B|\b/; if (mode.end) mode.endRe = langRe(mode.end); mode.terminator_end = reStr(mode.end) || ''; if (mode.endsWithParent && parent.terminator_end) mode.terminator_end += (mode.end ? '|' : '') + parent.terminator_end; } if (mode.illegal) mode.illegalRe = langRe(mode.illegal); if (mode.relevance == null) mode.relevance = 1; if (!mode.contains) { mode.contains = []; } var expanded_contains = []; mode.contains.forEach(function(c) { if (c.variants) { c.variants.forEach(function(v) {expanded_contains.push(inherit(c, v));}); } else { expanded_contains.push(c === 'self' ? mode : c); } }); mode.contains = expanded_contains; mode.contains.forEach(function(c) {compileMode(c, mode);}); if (mode.starts) { compileMode(mode.starts, parent); } var terminators = mode.contains.map(function(c) { return c.beginKeywords ? '\\.?(' + c.begin + ')\\.?' : c.begin; }) .concat([mode.terminator_end, mode.illegal]) .map(reStr) .filter(Boolean); mode.terminators = terminators.length ? langRe(terminators.join('|'), true) : {exec: function(/*s*/) {return null;}}; } compileMode(language); } /* Core highlighting function. Accepts a language name, or an alias, and a string with the code to highlight. Returns an object with the following properties: - relevance (int) - value (an HTML string with highlighting markup) */ function highlight(name, value, ignore_illegals, continuation) { function subMode(lexeme, mode) { var i, length; for (i = 0, length = mode.contains.length; i < length; i++) { if (testRe(mode.contains[i].beginRe, lexeme)) { return mode.contains[i]; } } } function endOfMode(mode, lexeme) { if (testRe(mode.endRe, lexeme)) { while (mode.endsParent && mode.parent) { mode = mode.parent; } return mode; } if (mode.endsWithParent) { return endOfMode(mode.parent, lexeme); } } function isIllegal(lexeme, mode) { return !ignore_illegals && testRe(mode.illegalRe, lexeme); } function keywordMatch(mode, match) { var match_str = language.case_insensitive ? match[0].toLowerCase() : match[0]; return mode.keywords.hasOwnProperty(match_str) && mode.keywords[match_str]; } function buildSpan(classname, insideSpan, leaveOpen, noPrefix) { var classPrefix = noPrefix ? '' : options.classPrefix, openSpan = '<span class="' + classPrefix, closeSpan = leaveOpen ? '' : spanEndTag openSpan += classname + '">'; return openSpan + insideSpan + closeSpan; } function processKeywords() { var keyword_match, last_index, match, result; if (!top.keywords) return escape(mode_buffer); result = ''; last_index = 0; top.lexemesRe.lastIndex = 0; match = top.lexemesRe.exec(mode_buffer); while (match) { result += escape(mode_buffer.substring(last_index, match.index)); keyword_match = keywordMatch(top, match); if (keyword_match) { relevance += keyword_match[1]; result += buildSpan(keyword_match[0], escape(match[0])); } else { result += escape(match[0]); } last_index = top.lexemesRe.lastIndex; match = top.lexemesRe.exec(mode_buffer); } return result + escape(mode_buffer.substr(last_index)); } function processSubLanguage() { var explicit = typeof top.subLanguage === 'string'; if (explicit && !languages[top.subLanguage]) { return escape(mode_buffer); } var result = explicit ? highlight(top.subLanguage, mode_buffer, true, continuations[top.subLanguage]) : highlightAuto(mode_buffer, top.subLanguage.length ? top.subLanguage : undefined); // Counting embedded language score towards the host language may be disabled // with zeroing the containing mode relevance. Usecase in point is Markdown that // allows XML everywhere and makes every XML snippet to have a much larger Markdown // score. if (top.relevance > 0) { relevance += result.relevance; } if (explicit) { continuations[top.subLanguage] = result.top; } return buildSpan(result.language, result.value, false, true); } function processBuffer() { result += (top.subLanguage != null ? processSubLanguage() : processKeywords()); mode_buffer = ''; } function startNewMode(mode) { result += mode.className? buildSpan(mode.className, '', true): ''; top = Object.create(mode, {parent: {value: top}}); } function processLexeme(buffer, lexeme) { mode_buffer += buffer; if (lexeme == null) { processBuffer(); return 0; } var new_mode = subMode(lexeme, top); if (new_mode) { if (new_mode.skip) { mode_buffer += lexeme; } else { if (new_mode.excludeBegin) { mode_buffer += lexeme; } processBuffer(); if (!new_mode.returnBegin && !new_mode.excludeBegin) { mode_buffer = lexeme; } } startNewMode(new_mode, lexeme); return new_mode.returnBegin ? 0 : lexeme.length; } var end_mode = endOfMode(top, lexeme); if (end_mode) { var origin = top; if (origin.skip) { mode_buffer += lexeme; } else { if (!(origin.returnEnd || origin.excludeEnd)) { mode_buffer += lexeme; } processBuffer(); if (origin.excludeEnd) { mode_buffer = lexeme; } } do { if (top.className) { result += spanEndTag; } if (!top.skip) { relevance += top.relevance; } top = top.parent; } while (top !== end_mode.parent); if (end_mode.starts) { startNewMode(end_mode.starts, ''); } return origin.returnEnd ? 0 : lexeme.length; } if (isIllegal(lexeme, top)) throw new Error('Illegal lexeme "' + lexeme + '" for mode "' + (top.className || '<unnamed>') + '"'); /* Parser should not reach this point as all types of lexemes should be caught earlier, but if it does due to some bug make sure it advances at least one character forward to prevent infinite looping. */ mode_buffer += lexeme; return lexeme.length || 1; } var language = getLanguage(name); if (!language) { throw new Error('Unknown language: "' + name + '"'); } compileLanguage(language); var top = continuation || language; var continuations = {}; // keep continuations for sub-languages var result = '', current; for(current = top; current !== language; current = current.parent) { if (current.className) { result = buildSpan(current.className, '', true) + result; } } var mode_buffer = ''; var relevance = 0; try { var match, count, index = 0; while (true) { top.terminators.lastIndex = index; match = top.terminators.exec(value); if (!match) break; count = processLexeme(value.substring(index, match.index), match[0]); index = match.index + count; } processLexeme(value.substr(index)); for(current = top; current.parent; current = current.parent) { // close dangling modes if (current.className) { result += spanEndTag; } } return { relevance: relevance, value: result, language: name, top: top }; } catch (e) { if (e.message && e.message.indexOf('Illegal') !== -1) { return { relevance: 0, value: escape(value) }; } else { throw e; } } } /* Highlighting with language detection. Accepts a string with the code to highlight. Returns an object with the following properties: - language (detected language) - relevance (int) - value (an HTML string with highlighting markup) - second_best (object with the same structure for second-best heuristically detected language, may be absent) */ function highlightAuto(text, languageSubset) { languageSubset = languageSubset || options.languages || objectKeys(languages); var result = { relevance: 0, value: escape(text) }; var second_best = result; languageSubset.filter(getLanguage).forEach(function(name) { var current = highlight(name, text, false); current.language = name; if (current.relevance > second_best.relevance) { second_best = current; } if (current.relevance > result.relevance) { second_best = result; result = current; } }); if (second_best.language) { result.second_best = second_best; } return result; } /* Post-processing of the highlighted markup: - replace TABs with something more useful - replace real line-breaks with '<br>' for non-pre containers */ function fixMarkup(value) { return !(options.tabReplace || options.useBR) ? value : value.replace(fixMarkupRe, function(match, p1) { if (options.useBR && match === '\n') { return '<br>'; } else if (options.tabReplace) { return p1.replace(/\t/g, options.tabReplace); } }); } function buildClassName(prevClassName, currentLang, resultLang) { var language = currentLang ? aliases[currentLang] : resultLang, result = [prevClassName.trim()]; if (!prevClassName.match(/\bhljs\b/)) { result.push('hljs'); } if (prevClassName.indexOf(language) === -1) { result.push(language); } return result.join(' ').trim(); } /* Applies highlighting to a DOM node containing code. Accepts a DOM node and two optional parameters for fixMarkup. */ function highlightBlock(block) { var node, originalStream, result, resultNode, text; var language = blockLanguage(block); if (isNotHighlighted(language)) return; if (options.useBR) { node = document.createElementNS('http://www.w3.org/1999/xhtml', 'div'); node.innerHTML = block.innerHTML.replace(/\n/g, '').replace(/<br[ \/]*>/g, '\n'); } else { node = block; } text = node.textContent; result = language ? highlight(language, text, true) : highlightAuto(text); originalStream = nodeStream(node); if (originalStream.length) { resultNode = document.createElementNS('http://www.w3.org/1999/xhtml', 'div'); resultNode.innerHTML = result.value; result.value = mergeStreams(originalStream, nodeStream(resultNode), text); } result.value = fixMarkup(result.value); block.innerHTML = result.value; block.className = buildClassName(block.className, language, result.language); block.result = { language: result.language, re: result.relevance }; if (result.second_best) { block.second_best = { language: result.second_best.language, re: result.second_best.relevance }; } } /* Updates highlight.js global options with values passed in the form of an object. */ function configure(user_options) { options = inherit(options, user_options); } /* Applies highlighting to all <pre><code>..</code></pre> blocks on a page. */ function initHighlighting() { if (initHighlighting.called) return; initHighlighting.called = true; var blocks = document.querySelectorAll('pre code'); ArrayProto.forEach.call(blocks, highlightBlock); } /* Attaches highlighting to the page load event. */ function initHighlightingOnLoad() { addEventListener('DOMContentLoaded', initHighlighting, false); addEventListener('load', initHighlighting, false); } function registerLanguage(name, language) { var lang = languages[name] = language(hljs); if (lang.aliases) { lang.aliases.forEach(function(alias) {aliases[alias] = name;}); } } function listLanguages() { return objectKeys(languages); } function getLanguage(name) { name = (name || '').toLowerCase(); return languages[name] || languages[aliases[name]]; } /* Interface definition */ hljs.highlight = highlight; hljs.highlightAuto = highlightAuto; hljs.fixMarkup = fixMarkup; hljs.highlightBlock = highlightBlock; hljs.configure = configure; hljs.initHighlighting = initHighlighting; hljs.initHighlightingOnLoad = initHighlightingOnLoad; hljs.registerLanguage = registerLanguage; hljs.listLanguages = listLanguages; hljs.getLanguage = getLanguage; hljs.inherit = inherit; // Common regexps hljs.IDENT_RE = '[a-zA-Z]\\w*'; hljs.UNDERSCORE_IDENT_RE = '[a-zA-Z_]\\w*'; hljs.NUMBER_RE = '\\b\\d+(\\.\\d+)?'; hljs.C_NUMBER_RE = '(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)'; // 0x..., 0..., decimal, float hljs.BINARY_NUMBER_RE = '\\b(0b[01]+)'; // 0b... hljs.RE_STARTERS_RE = '!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~'; // Common modes hljs.BACKSLASH_ESCAPE = { begin: '\\\\[\\s\\S]', relevance: 0 }; hljs.APOS_STRING_MODE = { className: 'string', begin: '\'', end: '\'', illegal: '\\n', contains: [hljs.BACKSLASH_ESCAPE] }; hljs.QUOTE_STRING_MODE = { className: 'string', begin: '"', end: '"', illegal: '\\n', contains: [hljs.BACKSLASH_ESCAPE] }; hljs.PHRASAL_WORDS_MODE = { begin: /\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|like)\b/ }; hljs.COMMENT = function (begin, end, inherits) { var mode = hljs.inherit( { className: 'comment', begin: begin, end: end, contains: [] }, inherits || {} ); mode.contains.push(hljs.PHRASAL_WORDS_MODE); mode.contains.push({ className: 'doctag', begin: '(?:TODO|FIXME|NOTE|BUG|XXX):', relevance: 0 }); return mode; }; hljs.C_LINE_COMMENT_MODE = hljs.COMMENT('//', '$'); hljs.C_BLOCK_COMMENT_MODE = hljs.COMMENT('/\\*', '\\*/'); hljs.HASH_COMMENT_MODE = hljs.COMMENT('#', '$'); hljs.NUMBER_MODE = { className: 'number', begin: hljs.NUMBER_RE, relevance: 0 }; hljs.C_NUMBER_MODE = { className: 'number', begin: hljs.C_NUMBER_RE, relevance: 0 }; hljs.BINARY_NUMBER_MODE = { className: 'number', begin: hljs.BINARY_NUMBER_RE, relevance: 0 }; hljs.CSS_NUMBER_MODE = { className: 'number', begin: hljs.NUMBER_RE + '(' + '%|em|ex|ch|rem' + '|vw|vh|vmin|vmax' + '|cm|mm|in|pt|pc|px' + '|deg|grad|rad|turn' + '|s|ms' + '|Hz|kHz' + '|dpi|dpcm|dppx' + ')?', relevance: 0 }; hljs.REGEXP_MODE = { className: 'regexp', begin: /\//, end: /\/[gimuy]*/, illegal: /\n/, contains: [ hljs.BACKSLASH_ESCAPE, { begin: /\[/, end: /\]/, relevance: 0, contains: [hljs.BACKSLASH_ESCAPE] } ] }; hljs.TITLE_MODE = { className: 'title', begin: hljs.IDENT_RE, relevance: 0 }; hljs.UNDERSCORE_TITLE_MODE = { className: 'title', begin: hljs.UNDERSCORE_IDENT_RE, relevance: 0 }; hljs.METHOD_GUARD = { // excludes method names from keyword processing begin: '\\.\\s*' + hljs.UNDERSCORE_IDENT_RE, relevance: 0 }; return hljs; })); /***/ }, /* 171 */ /***/ function(module, exports) { module.exports = function(hljs){ var IDENT_RE_RU = '[a-zA-Zа-яА-Я][a-zA-Z0-9_а-яА-Я]*'; var OneS_KEYWORDS = 'возврат дата для если и или иначе иначеесли исключение конецесли ' + 'конецпопытки конецпроцедуры конецфункции конеццикла константа не перейти перем ' + 'перечисление по пока попытка прервать продолжить процедура строка тогда фс функция цикл ' + 'число экспорт'; var OneS_BUILT_IN = 'ansitooem oemtoansi ввестивидсубконто ввестидату ввестизначение ' + 'ввестиперечисление ввестипериод ввестиплансчетов ввестистроку ввестичисло вопрос ' + 'восстановитьзначение врег выбранныйплансчетов вызватьисключение датагод датамесяц ' + 'датачисло добавитьмесяц завершитьработусистемы заголовоксистемы записьжурналарегистрации ' + 'запуститьприложение зафиксироватьтранзакцию значениевстроку значениевстрокувнутр ' + 'значениевфайл значениеизстроки значениеизстрокивнутр значениеизфайла имякомпьютера ' + 'имяпользователя каталогвременныхфайлов каталогиб каталогпользователя каталогпрограммы ' + 'кодсимв командасистемы конгода конецпериодаби конецрассчитанногопериодаби ' + 'конецстандартногоинтервала конквартала конмесяца коннедели лев лог лог10 макс ' + 'максимальноеколичествосубконто мин монопольныйрежим названиеинтерфейса названиенабораправ ' + 'назначитьвид назначитьсчет найти найтипомеченныенаудаление найтиссылки началопериодаби ' + 'началостандартногоинтервала начатьтранзакцию начгода начквартала начмесяца начнедели ' + 'номерднягода номерднянедели номернеделигода нрег обработкаожидания окр описаниеошибки ' + 'основнойжурналрасчетов основнойплансчетов основнойязык открытьформу открытьформумодально ' + 'отменитьтранзакцию очиститьокносообщений периодстр полноеимяпользователя получитьвремята ' + 'получитьдатута получитьдокументта получитьзначенияотбора получитьпозициюта ' + 'получитьпустоезначение получитьта прав праводоступа предупреждение префиксавтонумерации ' + 'пустаястрока пустоезначение рабочаядаттьпустоезначение рабочаядата разделительстраниц ' + 'разделительстрок разм разобратьпозициюдокумента рассчитатьрегистрына ' + 'рассчитатьрегистрыпо сигнал симв символтабуляции создатьобъект сокрл сокрлп сокрп ' + 'сообщить состояние сохранитьзначение сред статусвозврата стрдлина стрзаменить ' + 'стрколичествострок стрполучитьстроку стрчисловхождений сформироватьпозициюдокумента ' + 'счетпокоду текущаядата текущеевремя типзначения типзначениястр удалитьобъекты ' + 'установитьтана установитьтапо фиксшаблон формат цел шаблон'; var DQUOTE = {begin: '""'}; var STR_START = { className: 'string', begin: '"', end: '"|$', contains: [DQUOTE] }; var STR_CONT = { className: 'string', begin: '\\|', end: '"|$', contains: [DQUOTE] }; return { case_insensitive: true, lexemes: IDENT_RE_RU, keywords: {keyword: OneS_KEYWORDS, built_in: OneS_BUILT_IN}, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.NUMBER_MODE, STR_START, STR_CONT, { className: 'function', begin: '(процедура|функция)', end: '$', lexemes: IDENT_RE_RU, keywords: 'процедура функция', contains: [ { begin: 'экспорт', endsWithParent: true, lexemes: IDENT_RE_RU, keywords: 'экспорт', contains: [hljs.C_LINE_COMMENT_MODE] }, { className: 'params', begin: '\\(', end: '\\)', lexemes: IDENT_RE_RU, keywords: 'знач', contains: [STR_START, STR_CONT] }, hljs.C_LINE_COMMENT_MODE, hljs.inherit(hljs.TITLE_MODE, {begin: IDENT_RE_RU}) ] }, {className: 'meta', begin: '#', end: '$'}, {className: 'number', begin: '\'\\d{2}\\.\\d{2}\\.(\\d{2}|\\d{4})\''} // date ] }; }; /***/ }, /* 172 */ /***/ function(module, exports) { module.exports = function(hljs) { var regexes = { ruleDeclaration: "^[a-zA-Z][a-zA-Z0-9-]*", unexpectedChars: "[!@#$^&',?+~`|:]" }; var keywords = [ "ALPHA", "BIT", "CHAR", "CR", "CRLF", "CTL", "DIGIT", "DQUOTE", "HEXDIG", "HTAB", "LF", "LWSP", "OCTET", "SP", "VCHAR", "WSP" ]; var commentMode = hljs.COMMENT(";", "$"); var terminalBinaryMode = { className: "symbol", begin: /%b[0-1]+(-[0-1]+|(\.[0-1]+)+){0,1}/ }; var terminalDecimalMode = { className: "symbol", begin: /%d[0-9]+(-[0-9]+|(\.[0-9]+)+){0,1}/ }; var terminalHexadecimalMode = { className: "symbol", begin: /%x[0-9A-F]+(-[0-9A-F]+|(\.[0-9A-F]+)+){0,1}/, }; var caseSensitivityIndicatorMode = { className: "symbol", begin: /%[si]/ }; var ruleDeclarationMode = { begin: regexes.ruleDeclaration + '\\s*=', returnBegin: true, end: /=/, relevance: 0, contains: [{className: "attribute", begin: regexes.ruleDeclaration}] }; return { illegal: regexes.unexpectedChars, keywords: keywords.join(" "), contains: [ ruleDeclarationMode, commentMode, terminalBinaryMode, terminalDecimalMode, terminalHexadecimalMode, caseSensitivityIndicatorMode, hljs.QUOTE_STRING_MODE, hljs.NUMBER_MODE ] }; }; /***/ }, /* 173 */ /***/ function(module, exports) { module.exports = function(hljs) { return { contains: [ // IP { className: 'number', begin: '\\b\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?\\b' }, // Other numbers { className: 'number', begin: '\\b\\d+\\b', relevance: 0 }, // Requests { className: 'string', begin: '"(GET|POST|HEAD|PUT|DELETE|CONNECT|OPTIONS|PATCH|TRACE)', end: '"', keywords: 'GET POST HEAD PUT DELETE CONNECT OPTIONS PATCH TRACE', illegal: '\\n', relevance: 10 }, // Dates { className: 'string', begin: /\[/, end: /\]/, illegal: '\\n' }, // Strings { className: 'string', begin: '"', end: '"', illegal: '\\n' } ] }; }; /***/ }, /* 174 */ /***/ function(module, exports) { module.exports = function(hljs) { var IDENT_RE = '[a-zA-Z_$][a-zA-Z0-9_$]*'; var IDENT_FUNC_RETURN_TYPE_RE = '([*]|[a-zA-Z_$][a-zA-Z0-9_$]*)'; var AS3_REST_ARG_MODE = { className: 'rest_arg', begin: '[.]{3}', end: IDENT_RE, relevance: 10 }; return { aliases: ['as'], keywords: { keyword: 'as break case catch class const continue default delete do dynamic each ' + 'else extends final finally for function get if implements import in include ' + 'instanceof interface internal is namespace native new override package private ' + 'protected public return set static super switch this throw try typeof use var void ' + 'while with', literal: 'true false null undefined' }, contains: [ hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.C_NUMBER_MODE, { className: 'class', beginKeywords: 'package', end: '{', contains: [hljs.TITLE_MODE] }, { className: 'class', beginKeywords: 'class interface', end: '{', excludeEnd: true, contains: [ { beginKeywords: 'extends implements' }, hljs.TITLE_MODE ] }, { className: 'meta', beginKeywords: 'import include', end: ';', keywords: {'meta-keyword': 'import include'} }, { className: 'function', beginKeywords: 'function', end: '[{;]', excludeEnd: true, illegal: '\\S', contains: [ hljs.TITLE_MODE, { className: 'params', begin: '\\(', end: '\\)', contains: [ hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, AS3_REST_ARG_MODE ] }, { begin: ':\\s*' + IDENT_FUNC_RETURN_TYPE_RE } ] }, hljs.METHOD_GUARD ], illegal: /#/ }; }; /***/ }, /* 175 */ /***/ function(module, exports) { module.exports = // We try to support full Ada2012 // // We highlight all appearances of types, keywords, literals (string, char, number, bool) // and titles (user defined function/procedure/package) // CSS classes are set accordingly // // Languages causing problems for language detection: // xml (broken by Foo : Bar type), elm (broken by Foo : Bar type), vbscript-html (broken by body keyword) // sql (ada default.txt has a lot of sql keywords) function(hljs) { // Regular expression for Ada numeric literals. // stolen form the VHDL highlighter // Decimal literal: var INTEGER_RE = '\\d(_|\\d)*'; var EXPONENT_RE = '[eE][-+]?' + INTEGER_RE; var DECIMAL_LITERAL_RE = INTEGER_RE + '(\\.' + INTEGER_RE + ')?' + '(' + EXPONENT_RE + ')?'; // Based literal: var BASED_INTEGER_RE = '\\w+'; var BASED_LITERAL_RE = INTEGER_RE + '#' + BASED_INTEGER_RE + '(\\.' + BASED_INTEGER_RE + ')?' + '#' + '(' + EXPONENT_RE + ')?'; var NUMBER_RE = '\\b(' + BASED_LITERAL_RE + '|' + DECIMAL_LITERAL_RE + ')'; // Identifier regex var ID_REGEX = '[A-Za-z](_?[A-Za-z0-9.])*'; // bad chars, only allowed in literals var BAD_CHARS = '[]{}%#\'\"' // Ada doesn't have block comments, only line comments var COMMENTS = hljs.COMMENT('--', '$'); // variable declarations of the form // Foo : Bar := Baz; // where only Bar will be highlighted var VAR_DECLS = { // TODO: These spaces are not required by the Ada syntax // however, I have yet to see handwritten Ada code where // someone does not put spaces around : begin: '\\s+:\\s+', end: '\\s*(:=|;|\\)|=>|$)', // endsWithParent: true, // returnBegin: true, illegal: BAD_CHARS, contains: [ { // workaround to avoid highlighting // named loops and declare blocks beginKeywords: 'loop for declare others', endsParent: true, }, { // properly highlight all modifiers className: 'keyword', beginKeywords: 'not null constant access function procedure in out aliased exception' }, { className: 'type', begin: ID_REGEX, endsParent: true, relevance: 0, } ] }; return { case_insensitive: true, keywords: { keyword: 'abort else new return abs elsif not reverse abstract end ' + 'accept entry select access exception of separate aliased exit or some ' + 'all others subtype and for out synchronized array function overriding ' + 'at tagged generic package task begin goto pragma terminate ' + 'body private then if procedure type case in protected constant interface ' + 'is raise use declare range delay limited record when delta loop rem while ' + 'digits renames with do mod requeue xor', literal: 'True False', }, contains: [ COMMENTS, // strings "foobar" { className: 'string', begin: /"/, end: /"/, contains: [{begin: /""/, relevance: 0}] }, // characters '' { // character literals always contain one char className: 'string', begin: /'.'/ }, { // number literals className: 'number', begin: NUMBER_RE, relevance: 0 }, { // Attributes className: 'symbol', begin: "'" + ID_REGEX, }, { // package definition, maybe inside generic className: 'title', begin: '(\\bwith\\s+)?(\\bprivate\\s+)?\\bpackage\\s+(\\bbody\\s+)?', end: '(is|$)', keywords: 'package body', excludeBegin: true, excludeEnd: true, illegal: BAD_CHARS }, { // function/procedure declaration/definition // maybe inside generic begin: '(\\b(with|overriding)\\s+)?\\b(function|procedure)\\s+', end: '(\\bis|\\bwith|\\brenames|\\)\\s*;)', keywords: 'overriding function procedure with is renames return', // we need to re-match the 'function' keyword, so that // the title mode below matches only exactly once returnBegin: true, contains: [ COMMENTS, { // name of the function/procedure className: 'title', begin: '(\\bwith\\s+)?\\b(function|procedure)\\s+', end: '(\\(|\\s+|$)', excludeBegin: true, excludeEnd: true, illegal: BAD_CHARS }, // 'self' // // parameter types VAR_DECLS, { // return type className: 'type', begin: '\\breturn\\s+', end: '(\\s+|;|$)', keywords: 'return', excludeBegin: true, excludeEnd: true, // we are done with functions endsParent: true, illegal: BAD_CHARS }, ] }, { // new type declarations // maybe inside generic className: 'type', begin: '\\b(sub)?type\\s+', end: '\\s+', keywords: 'type', excludeBegin: true, illegal: BAD_CHARS }, // see comment above the definition VAR_DECLS, // no markup // relevance boosters for small snippets // {begin: '\\s*=>\\s*'}, // {begin: '\\s*:=\\s*'}, // {begin: '\\s+:=\\s+'}, ] }; }; /***/ }, /* 176 */ /***/ function(module, exports) { module.exports = function(hljs) { var NUMBER = {className: 'number', begin: '[\\$%]\\d+'}; return { aliases: ['apacheconf'], case_insensitive: true, contains: [ hljs.HASH_COMMENT_MODE, {className: 'section', begin: '</?', end: '>'}, { className: 'attribute', begin: /\w+/, relevance: 0, // keywords aren’t needed for highlighting per se, they only boost relevance // for a very generally defined mode (starts with a word, ends with line-end keywords: { nomarkup: 'order deny allow setenv rewriterule rewriteengine rewritecond documentroot ' + 'sethandler errordocument loadmodule options header listen serverroot ' + 'servername' }, starts: { end: /$/, relevance: 0, keywords: { literal: 'on off all' }, contains: [ { className: 'meta', begin: '\\s\\[', end: '\\]$' }, { className: 'variable', begin: '[\\$%]\\{', end: '\\}', contains: ['self', NUMBER] }, NUMBER, hljs.QUOTE_STRING_MODE ] } } ], illegal: /\S/ }; }; /***/ }, /* 177 */ /***/ function(module, exports) { module.exports = function(hljs) { var STRING = hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: ''}); var PARAMS = { className: 'params', begin: '\\(', end: '\\)', contains: ['self', hljs.C_NUMBER_MODE, STRING] }; var COMMENT_MODE_1 = hljs.COMMENT('--', '$'); var COMMENT_MODE_2 = hljs.COMMENT( '\\(\\*', '\\*\\)', { contains: ['self', COMMENT_MODE_1] //allow nesting } ); var COMMENTS = [ COMMENT_MODE_1, COMMENT_MODE_2, hljs.HASH_COMMENT_MODE ]; return { aliases: ['osascript'], keywords: { keyword: 'about above after against and around as at back before beginning ' + 'behind below beneath beside between but by considering ' + 'contain contains continue copy div does eighth else end equal ' + 'equals error every exit fifth first for fourth from front ' + 'get given global if ignoring in into is it its last local me ' + 'middle mod my ninth not of on onto or over prop property put ref ' + 'reference repeat returning script second set seventh since ' + 'sixth some tell tenth that the|0 then third through thru ' + 'timeout times to transaction try until where while whose with ' + 'without', literal: 'AppleScript false linefeed return pi quote result space tab true', built_in: 'alias application boolean class constant date file integer list ' + 'number real record string text ' + 'activate beep count delay launch log offset read round ' + 'run say summarize write ' + 'character characters contents day frontmost id item length ' + 'month name paragraph paragraphs rest reverse running time version ' + 'weekday word words year' }, contains: [ STRING, hljs.C_NUMBER_MODE, { className: 'built_in', begin: '\\b(clipboard info|the clipboard|info for|list (disks|folder)|' + 'mount volume|path to|(close|open for) access|(get|set) eof|' + 'current date|do shell script|get volume settings|random number|' + 'set volume|system attribute|system info|time to GMT|' + '(load|run|store) script|scripting components|' + 'ASCII (character|number)|localized string|' + 'choose (application|color|file|file name|' + 'folder|from list|remote application|URL)|' + 'display (alert|dialog))\\b|^\\s*return\\b' }, { className: 'literal', begin: '\\b(text item delimiters|current application|missing value)\\b' }, { className: 'keyword', begin: '\\b(apart from|aside from|instead of|out of|greater than|' + "isn't|(doesn't|does not) (equal|come before|come after|contain)|" + '(greater|less) than( or equal)?|(starts?|ends|begins?) with|' + 'contained by|comes (before|after)|a (ref|reference)|POSIX file|' + 'POSIX path|(date|time) string|quoted form)\\b' }, { beginKeywords: 'on', illegal: '[${=;\\n]', contains: [hljs.UNDERSCORE_TITLE_MODE, PARAMS] } ].concat(COMMENTS), illegal: '//|->|=>|\\[\\[' }; }; /***/ }, /* 178 */ /***/ function(module, exports) { module.exports = function(hljs) { var CPP_PRIMITIVE_TYPES = { className: 'keyword', begin: '\\b[a-z\\d_]*_t\\b' }; var STRINGS = { className: 'string', variants: [ { begin: '(u8?|U)?L?"', end: '"', illegal: '\\n', contains: [hljs.BACKSLASH_ESCAPE] }, { begin: '(u8?|U)?R"', end: '"', contains: [hljs.BACKSLASH_ESCAPE] }, { begin: '\'\\\\?.', end: '\'', illegal: '.' } ] }; var NUMBERS = { className: 'number', variants: [ { begin: '\\b(0b[01\']+)' }, { begin: '\\b([\\d\']+(\\.[\\d\']*)?|\\.[\\d\']+)(u|U|l|L|ul|UL|f|F|b|B)' }, { begin: '(-?)(\\b0[xX][a-fA-F0-9\']+|(\\b[\\d\']+(\\.[\\d\']*)?|\\.[\\d\']+)([eE][-+]?[\\d\']+)?)' } ], relevance: 0 }; var PREPROCESSOR = { className: 'meta', begin: /#\s*[a-z]+\b/, end: /$/, keywords: { 'meta-keyword': 'if else elif endif define undef warning error line ' + 'pragma ifdef ifndef include' }, contains: [ { begin: /\\\n/, relevance: 0 }, hljs.inherit(STRINGS, {className: 'meta-string'}), { className: 'meta-string', begin: '<', end: '>', illegal: '\\n', }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }; var FUNCTION_TITLE = hljs.IDENT_RE + '\\s*\\('; var CPP_KEYWORDS = { keyword: 'int float while private char catch import module export virtual operator sizeof ' + 'dynamic_cast|10 typedef const_cast|10 const struct for static_cast|10 union namespace ' + 'unsigned long volatile static protected bool template mutable if public friend ' + 'do goto auto void enum else break extern using class asm case typeid ' + 'short reinterpret_cast|10 default double register explicit signed typename try this ' + 'switch continue inline delete alignof constexpr decltype ' + 'noexcept static_assert thread_local restrict _Bool complex _Complex _Imaginary ' + 'atomic_bool atomic_char atomic_schar ' + 'atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong ' + 'atomic_ullong new throw return', built_in: 'std string cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream ' + 'auto_ptr deque list queue stack vector map set bitset multiset multimap unordered_set ' + 'unordered_map unordered_multiset unordered_multimap array shared_ptr abort abs acos ' + 'asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp ' + 'fscanf isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper ' + 'isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow ' + 'printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp ' + 'strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan ' + 'vfprintf vprintf vsprintf endl initializer_list unique_ptr', literal: 'true false nullptr NULL' }; var EXPRESSION_CONTAINS = [ CPP_PRIMITIVE_TYPES, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, NUMBERS, STRINGS ]; return { aliases: ['c', 'cc', 'h', 'c++', 'h++', 'hpp'], keywords: CPP_KEYWORDS, illegal: '</', contains: EXPRESSION_CONTAINS.concat([ PREPROCESSOR, { begin: '\\b(deque|list|queue|stack|vector|map|set|bitset|multiset|multimap|unordered_map|unordered_set|unordered_multiset|unordered_multimap|array)\\s*<', end: '>', keywords: CPP_KEYWORDS, contains: ['self', CPP_PRIMITIVE_TYPES] }, { begin: hljs.IDENT_RE + '::', keywords: CPP_KEYWORDS }, { // This mode covers expression context where we can't expect a function // definition and shouldn't highlight anything that looks like one: // `return some()`, `else if()`, `(x*sum(1, 2))` variants: [ {begin: /=/, end: /;/}, {begin: /\(/, end: /\)/}, {beginKeywords: 'new throw return else', end: /;/} ], keywords: CPP_KEYWORDS, contains: EXPRESSION_CONTAINS.concat([ { begin: /\(/, end: /\)/, keywords: CPP_KEYWORDS, contains: EXPRESSION_CONTAINS.concat(['self']), relevance: 0 } ]), relevance: 0 }, { className: 'function', begin: '(' + hljs.IDENT_RE + '[\\*&\\s]+)+' + FUNCTION_TITLE, returnBegin: true, end: /[{;=]/, excludeEnd: true, keywords: CPP_KEYWORDS, illegal: /[^\w\s\*&]/, contains: [ { begin: FUNCTION_TITLE, returnBegin: true, contains: [hljs.TITLE_MODE], relevance: 0 }, { className: 'params', begin: /\(/, end: /\)/, keywords: CPP_KEYWORDS, relevance: 0, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, STRINGS, NUMBERS, CPP_PRIMITIVE_TYPES ] }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, PREPROCESSOR ] } ]), exports: { preprocessor: PREPROCESSOR, strings: STRINGS, keywords: CPP_KEYWORDS } }; }; /***/ }, /* 179 */ /***/ function(module, exports) { module.exports = function(hljs) { var CPP = hljs.getLanguage('cpp').exports; return { keywords: { keyword: 'boolean byte word string String array ' + CPP.keywords.keyword, built_in: 'setup loop while catch for if do goto try switch case else ' + 'default break continue return ' + 'KeyboardController MouseController SoftwareSerial ' + 'EthernetServer EthernetClient LiquidCrystal ' + 'RobotControl GSMVoiceCall EthernetUDP EsploraTFT ' + 'HttpClient RobotMotor WiFiClient GSMScanner ' + 'FileSystem Scheduler GSMServer YunClient YunServer ' + 'IPAddress GSMClient GSMModem Keyboard Ethernet ' + 'Console GSMBand Esplora Stepper Process ' + 'WiFiUDP GSM_SMS Mailbox USBHost Firmata PImage ' + 'Client Server GSMPIN FileIO Bridge Serial ' + 'EEPROM Stream Mouse Audio Servo File Task ' + 'GPRS WiFi Wire TFT GSM SPI SD ' + 'runShellCommandAsynchronously analogWriteResolution ' + 'retrieveCallingNumber printFirmwareVersion ' + 'analogReadResolution sendDigitalPortPair ' + 'noListenOnLocalhost readJoystickButton setFirmwareVersion ' + 'readJoystickSwitch scrollDisplayRight getVoiceCallStatus ' + 'scrollDisplayLeft writeMicroseconds delayMicroseconds ' + 'beginTransmission getSignalStrength runAsynchronously ' + 'getAsynchronously listenOnLocalhost getCurrentCarrier ' + 'readAccelerometer messageAvailable sendDigitalPorts ' + 'lineFollowConfig countryNameWrite runShellCommand ' + 'readStringUntil rewindDirectory readTemperature ' + 'setClockDivider readLightSensor endTransmission ' + 'analogReference detachInterrupt countryNameRead ' + 'attachInterrupt encryptionType readBytesUntil ' + 'robotNameWrite readMicrophone robotNameRead cityNameWrite ' + 'userNameWrite readJoystickY readJoystickX mouseReleased ' + 'openNextFile scanNetworks noInterrupts digitalWrite ' + 'beginSpeaker mousePressed isActionDone mouseDragged ' + 'displayLogos noAutoscroll addParameter remoteNumber ' + 'getModifiers keyboardRead userNameRead waitContinue ' + 'processInput parseCommand printVersion readNetworks ' + 'writeMessage blinkVersion cityNameRead readMessage ' + 'setDataMode parsePacket isListening setBitOrder ' + 'beginPacket isDirectory motorsWrite drawCompass ' + 'digitalRead clearScreen serialEvent rightToLeft ' + 'setTextSize leftToRight requestFrom keyReleased ' + 'compassRead analogWrite interrupts WiFiServer ' + 'disconnect playMelody parseFloat autoscroll ' + 'getPINUsed setPINUsed setTimeout sendAnalog ' + 'readSlider analogRead beginWrite createChar ' + 'motorsStop keyPressed tempoWrite readButton ' + 'subnetMask debugPrint macAddress writeGreen ' + 'randomSeed attachGPRS readString sendString ' + 'remotePort releaseAll mouseMoved background ' + 'getXChange getYChange answerCall getResult ' + 'voiceCall endPacket constrain getSocket writeJSON ' + 'getButton available connected findUntil readBytes ' + 'exitValue readGreen writeBlue startLoop IPAddress ' + 'isPressed sendSysex pauseMode gatewayIP setCursor ' + 'getOemKey tuneWrite noDisplay loadImage switchPIN ' + 'onRequest onReceive changePIN playFile noBuffer ' + 'parseInt overflow checkPIN knobRead beginTFT ' + 'bitClear updateIR bitWrite position writeRGB ' + 'highByte writeRed setSpeed readBlue noStroke ' + 'remoteIP transfer shutdown hangCall beginSMS ' + 'endWrite attached maintain noCursor checkReg ' + 'checkPUK shiftOut isValid shiftIn pulseIn ' + 'connect println localIP pinMode getIMEI ' + 'display noBlink process getBand running beginSD ' + 'drawBMP lowByte setBand release bitRead prepare ' + 'pointTo readRed setMode noFill remove listen ' + 'stroke detach attach noTone exists buffer ' + 'height bitSet circle config cursor random ' + 'IRread setDNS endSMS getKey micros ' + 'millis begin print write ready flush width ' + 'isPIN blink clear press mkdir rmdir close ' + 'point yield image BSSID click delay ' + 'read text move peek beep rect line open ' + 'seek fill size turn stop home find ' + 'step tone sqrt RSSI SSID ' + 'end bit tan cos sin pow map abs max ' + 'min get run put', literal: 'DIGITAL_MESSAGE FIRMATA_STRING ANALOG_MESSAGE ' + 'REPORT_DIGITAL REPORT_ANALOG INPUT_PULLUP ' + 'SET_PIN_MODE INTERNAL2V56 SYSTEM_RESET LED_BUILTIN ' + 'INTERNAL1V1 SYSEX_START INTERNAL EXTERNAL ' + 'DEFAULT OUTPUT INPUT HIGH LOW' }, contains: [ CPP.preprocessor, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE ] }; }; /***/ }, /* 180 */ /***/ function(module, exports) { module.exports = function(hljs) { //local labels: %?[FB]?[AT]?\d{1,2}\w+ return { case_insensitive: true, aliases: ['arm'], lexemes: '\\.?' + hljs.IDENT_RE, keywords: { meta: //GNU preprocs '.2byte .4byte .align .ascii .asciz .balign .byte .code .data .else .end .endif .endm .endr .equ .err .exitm .extern .global .hword .if .ifdef .ifndef .include .irp .long .macro .rept .req .section .set .skip .space .text .word .arm .thumb .code16 .code32 .force_thumb .thumb_func .ltorg '+ //ARM directives 'ALIAS ALIGN ARM AREA ASSERT ATTR CN CODE CODE16 CODE32 COMMON CP DATA DCB DCD DCDU DCDO DCFD DCFDU DCI DCQ DCQU DCW DCWU DN ELIF ELSE END ENDFUNC ENDIF ENDP ENTRY EQU EXPORT EXPORTAS EXTERN FIELD FILL FUNCTION GBLA GBLL GBLS GET GLOBAL IF IMPORT INCBIN INCLUDE INFO KEEP LCLA LCLL LCLS LTORG MACRO MAP MEND MEXIT NOFP OPT PRESERVE8 PROC QN READONLY RELOC REQUIRE REQUIRE8 RLIST FN ROUT SETA SETL SETS SN SPACE SUBT THUMB THUMBX TTL WHILE WEND ', built_in: 'r0 r1 r2 r3 r4 r5 r6 r7 r8 r9 r10 r11 r12 r13 r14 r15 '+ //standard registers 'pc lr sp ip sl sb fp '+ //typical regs plus backward compatibility 'a1 a2 a3 a4 v1 v2 v3 v4 v5 v6 v7 v8 f0 f1 f2 f3 f4 f5 f6 f7 '+ //more regs and fp 'p0 p1 p2 p3 p4 p5 p6 p7 p8 p9 p10 p11 p12 p13 p14 p15 '+ //coprocessor regs 'c0 c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 '+ //more coproc 'q0 q1 q2 q3 q4 q5 q6 q7 q8 q9 q10 q11 q12 q13 q14 q15 '+ //advanced SIMD NEON regs //program status registers 'cpsr_c cpsr_x cpsr_s cpsr_f cpsr_cx cpsr_cxs cpsr_xs cpsr_xsf cpsr_sf cpsr_cxsf '+ 'spsr_c spsr_x spsr_s spsr_f spsr_cx spsr_cxs spsr_xs spsr_xsf spsr_sf spsr_cxsf '+ //NEON and VFP registers 's0 s1 s2 s3 s4 s5 s6 s7 s8 s9 s10 s11 s12 s13 s14 s15 '+ 's16 s17 s18 s19 s20 s21 s22 s23 s24 s25 s26 s27 s28 s29 s30 s31 '+ 'd0 d1 d2 d3 d4 d5 d6 d7 d8 d9 d10 d11 d12 d13 d14 d15 '+ 'd16 d17 d18 d19 d20 d21 d22 d23 d24 d25 d26 d27 d28 d29 d30 d31 ' + '{PC} {VAR} {TRUE} {FALSE} {OPT} {CONFIG} {ENDIAN} {CODESIZE} {CPU} {FPU} {ARCHITECTURE} {PCSTOREOFFSET} {ARMASM_VERSION} {INTER} {ROPI} {RWPI} {SWST} {NOSWST} . @' }, contains: [ { className: 'keyword', begin: '\\b('+ //mnemonics 'adc|'+ '(qd?|sh?|u[qh]?)?add(8|16)?|usada?8|(q|sh?|u[qh]?)?(as|sa)x|'+ 'and|adrl?|sbc|rs[bc]|asr|b[lx]?|blx|bxj|cbn?z|tb[bh]|bic|'+ 'bfc|bfi|[su]bfx|bkpt|cdp2?|clz|clrex|cmp|cmn|cpsi[ed]|cps|'+ 'setend|dbg|dmb|dsb|eor|isb|it[te]{0,3}|lsl|lsr|ror|rrx|'+ 'ldm(([id][ab])|f[ds])?|ldr((s|ex)?[bhd])?|movt?|mvn|mra|mar|'+ 'mul|[us]mull|smul[bwt][bt]|smu[as]d|smmul|smmla|'+ 'mla|umlaal|smlal?([wbt][bt]|d)|mls|smlsl?[ds]|smc|svc|sev|'+ 'mia([bt]{2}|ph)?|mrr?c2?|mcrr2?|mrs|msr|orr|orn|pkh(tb|bt)|rbit|'+ 'rev(16|sh)?|sel|[su]sat(16)?|nop|pop|push|rfe([id][ab])?|'+ 'stm([id][ab])?|str(ex)?[bhd]?|(qd?)?sub|(sh?|q|u[qh]?)?sub(8|16)|'+ '[su]xt(a?h|a?b(16)?)|srs([id][ab])?|swpb?|swi|smi|tst|teq|'+ 'wfe|wfi|yield'+ ')'+ '(eq|ne|cs|cc|mi|pl|vs|vc|hi|ls|ge|lt|gt|le|al|hs|lo)?'+ //condition codes '[sptrx]?' , //legal postfixes end: '\\s' }, hljs.COMMENT('[;@]', '$', {relevance: 0}), hljs.C_BLOCK_COMMENT_MODE, hljs.QUOTE_STRING_MODE, { className: 'string', begin: '\'', end: '[^\\\\]\'', relevance: 0 }, { className: 'title', begin: '\\|', end: '\\|', illegal: '\\n', relevance: 0 }, { className: 'number', variants: [ {begin: '[#$=]?0x[0-9a-f]+'}, //hex {begin: '[#$=]?0b[01]+'}, //bin {begin: '[#$=]\\d+'}, //literal {begin: '\\b\\d+'} //bare number ], relevance: 0 }, { className: 'symbol', variants: [ {begin: '^[a-z_\\.\\$][a-z0-9_\\.\\$]+'}, //ARM syntax {begin: '^\\s*[a-z_\\.\\$][a-z0-9_\\.\\$]+:'}, //GNU ARM syntax {begin: '[=#]\\w+' } //label reference ], relevance: 0 } ] }; }; /***/ }, /* 181 */ /***/ function(module, exports) { module.exports = function(hljs) { var XML_IDENT_RE = '[A-Za-z0-9\\._:-]+'; var TAG_INTERNALS = { endsWithParent: true, illegal: /</, relevance: 0, contains: [ { className: 'attr', begin: XML_IDENT_RE, relevance: 0 }, { begin: /=\s*/, relevance: 0, contains: [ { className: 'string', endsParent: true, variants: [ {begin: /"/, end: /"/}, {begin: /'/, end: /'/}, {begin: /[^\s"'=<>`]+/} ] } ] } ] }; return { aliases: ['html', 'xhtml', 'rss', 'atom', 'xjb', 'xsd', 'xsl', 'plist'], case_insensitive: true, contains: [ { className: 'meta', begin: '<!DOCTYPE', end: '>', relevance: 10, contains: [{begin: '\\[', end: '\\]'}] }, hljs.COMMENT( '<!--', '-->', { relevance: 10 } ), { begin: '<\\!\\[CDATA\\[', end: '\\]\\]>', relevance: 10 }, { begin: /<\?(php)?/, end: /\?>/, subLanguage: 'php', contains: [{begin: '/\\*', end: '\\*/', skip: true}] }, { className: 'tag', /* The lookahead pattern (?=...) ensures that 'begin' only matches '<style' as a single word, followed by a whitespace or an ending braket. The '$' is needed for the lexeme to be recognized by hljs.subMode() that tests lexemes outside the stream. */ begin: '<style(?=\\s|>|$)', end: '>', keywords: {name: 'style'}, contains: [TAG_INTERNALS], starts: { end: '</style>', returnEnd: true, subLanguage: ['css', 'xml'] } }, { className: 'tag', // See the comment in the <style tag about the lookahead pattern begin: '<script(?=\\s|>|$)', end: '>', keywords: {name: 'script'}, contains: [TAG_INTERNALS], starts: { end: '\<\/script\>', returnEnd: true, subLanguage: ['actionscript', 'javascript', 'handlebars', 'xml'] } }, { className: 'meta', variants: [ {begin: /<\?xml/, end: /\?>/, relevance: 10}, {begin: /<\?\w+/, end: /\?>/} ] }, { className: 'tag', begin: '</?', end: '/?>', contains: [ { className: 'name', begin: /[^\/><\s]+/, relevance: 0 }, TAG_INTERNALS ] } ] }; }; /***/ }, /* 182 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['adoc'], contains: [ // block comment hljs.COMMENT( '^/{4,}\\n', '\\n/{4,}$', // can also be done as... //'^/{4,}$', //'^/{4,}$', { relevance: 10 } ), // line comment hljs.COMMENT( '^//', '$', { relevance: 0 } ), // title { className: 'title', begin: '^\\.\\w.*$' }, // example, admonition & sidebar blocks { begin: '^[=\\*]{4,}\\n', end: '\\n^[=\\*]{4,}$', relevance: 10 }, // headings { className: 'section', relevance: 10, variants: [ {begin: '^(={1,5}) .+?( \\1)?$'}, {begin: '^[^\\[\\]\\n]+?\\n[=\\-~\\^\\+]{2,}$'}, ] }, // document attributes { className: 'meta', begin: '^:.+?:', end: '\\s', excludeEnd: true, relevance: 10 }, // block attributes { className: 'meta', begin: '^\\[.+?\\]$', relevance: 0 }, // quoteblocks { className: 'quote', begin: '^_{4,}\\n', end: '\\n_{4,}$', relevance: 10 }, // listing and literal blocks { className: 'code', begin: '^[\\-\\.]{4,}\\n', end: '\\n[\\-\\.]{4,}$', relevance: 10 }, // passthrough blocks { begin: '^\\+{4,}\\n', end: '\\n\\+{4,}$', contains: [ { begin: '<', end: '>', subLanguage: 'xml', relevance: 0 } ], relevance: 10 }, // lists (can only capture indicators) { className: 'bullet', begin: '^(\\*+|\\-+|\\.+|[^\\n]+?::)\\s+' }, // admonition { className: 'symbol', begin: '^(NOTE|TIP|IMPORTANT|WARNING|CAUTION):\\s+', relevance: 10 }, // inline strong { className: 'strong', // must not follow a word character or be followed by an asterisk or space begin: '\\B\\*(?![\\*\\s])', end: '(\\n{2}|\\*)', // allow escaped asterisk followed by word char contains: [ { begin: '\\\\*\\w', relevance: 0 } ] }, // inline emphasis { className: 'emphasis', // must not follow a word character or be followed by a single quote or space begin: '\\B\'(?![\'\\s])', end: '(\\n{2}|\')', // allow escaped single quote followed by word char contains: [ { begin: '\\\\\'\\w', relevance: 0 } ], relevance: 0 }, // inline emphasis (alt) { className: 'emphasis', // must not follow a word character or be followed by an underline or space begin: '_(?![_\\s])', end: '(\\n{2}|_)', relevance: 0 }, // inline smart quotes { className: 'string', variants: [ {begin: "``.+?''"}, {begin: "`.+?'"} ] }, // inline code snippets (TODO should get same treatment as strong and emphasis) { className: 'code', begin: '(`.+?`|\\+.+?\\+)', relevance: 0 }, // indented literal block { className: 'code', begin: '^[ \\t]', end: '$', relevance: 0 }, // horizontal rules { begin: '^\'{3,}[ \\t]*$', relevance: 10 }, // images and links { begin: '(link:)?(http|https|ftp|file|irc|image:?):\\S+\\[.*?\\]', returnBegin: true, contains: [ { begin: '(link|image:?):', relevance: 0 }, { className: 'link', begin: '\\w', end: '[^\\[]+', relevance: 0 }, { className: 'string', begin: '\\[', end: '\\]', excludeBegin: true, excludeEnd: true, relevance: 0 } ], relevance: 10 } ] }; }; /***/ }, /* 183 */ /***/ function(module, exports) { module.exports = function (hljs) { var KEYWORDS = 'false synchronized int abstract float private char boolean static null if const ' + 'for true while long throw strictfp finally protected import native final return void ' + 'enum else extends implements break transient new catch instanceof byte super volatile case ' + 'assert short package default double public try this switch continue throws privileged ' + 'aspectOf adviceexecution proceed cflowbelow cflow initialization preinitialization ' + 'staticinitialization withincode target within execution getWithinTypeName handler ' + 'thisJoinPoint thisJoinPointStaticPart thisEnclosingJoinPointStaticPart declare parents '+ 'warning error soft precedence thisAspectInstance'; var SHORTKEYS = 'get set args call'; return { keywords : KEYWORDS, illegal : /<\/|#/, contains : [ hljs.COMMENT( '/\\*\\*', '\\*/', { relevance : 0, contains : [ { // eat up @'s in emails to prevent them to be recognized as doctags begin: /\w+@/, relevance: 0 }, { className : 'doctag', begin : '@[A-Za-z]+' } ] } ), hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, { className : 'class', beginKeywords : 'aspect', end : /[{;=]/, excludeEnd : true, illegal : /[:;"\[\]]/, contains : [ { beginKeywords : 'extends implements pertypewithin perthis pertarget percflowbelow percflow issingleton' }, hljs.UNDERSCORE_TITLE_MODE, { begin : /\([^\)]*/, end : /[)]+/, keywords : KEYWORDS + ' ' + SHORTKEYS, excludeEnd : false } ] }, { className : 'class', beginKeywords : 'class interface', end : /[{;=]/, excludeEnd : true, relevance: 0, keywords : 'class interface', illegal : /[:"\[\]]/, contains : [ {beginKeywords : 'extends implements'}, hljs.UNDERSCORE_TITLE_MODE ] }, { // AspectJ Constructs beginKeywords : 'pointcut after before around throwing returning', end : /[)]/, excludeEnd : false, illegal : /["\[\]]/, contains : [ { begin : hljs.UNDERSCORE_IDENT_RE + '\\s*\\(', returnBegin : true, contains : [hljs.UNDERSCORE_TITLE_MODE] } ] }, { begin : /[:]/, returnBegin : true, end : /[{;]/, relevance: 0, excludeEnd : false, keywords : KEYWORDS, illegal : /["\[\]]/, contains : [ { begin : hljs.UNDERSCORE_IDENT_RE + '\\s*\\(', keywords : KEYWORDS + ' ' + SHORTKEYS }, hljs.QUOTE_STRING_MODE ] }, { // this prevents 'new Name(...), or throw ...' from being recognized as a function definition beginKeywords : 'new throw', relevance : 0 }, { // the function class is a bit different for AspectJ compared to the Java language className : 'function', begin : /\w+ +\w+(\.)?\w+\s*\([^\)]*\)\s*((throws)[\w\s,]+)?[\{;]/, returnBegin : true, end : /[{;=]/, keywords : KEYWORDS, excludeEnd : true, contains : [ { begin : hljs.UNDERSCORE_IDENT_RE + '\\s*\\(', returnBegin : true, relevance: 0, contains : [hljs.UNDERSCORE_TITLE_MODE] }, { className : 'params', begin : /\(/, end : /\)/, relevance: 0, keywords : KEYWORDS, contains : [ hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, hljs.C_BLOCK_COMMENT_MODE ] }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }, hljs.C_NUMBER_MODE, { // annotation is also used in this language className : 'meta', begin : '@[A-Za-z]+' } ] }; }; /***/ }, /* 184 */ /***/ function(module, exports) { module.exports = function(hljs) { var BACKTICK_ESCAPE = { begin: /`[\s\S]/ }; return { case_insensitive: true, keywords: { keyword: 'Break Continue Else Gosub If Loop Return While', literal: 'A|0 true false NOT AND OR', built_in: 'ComSpec Clipboard ClipboardAll ErrorLevel', }, contains: [ { className: 'built_in', begin: 'A_[a-zA-Z0-9]+' }, BACKTICK_ESCAPE, hljs.inherit(hljs.QUOTE_STRING_MODE, {contains: [BACKTICK_ESCAPE]}), hljs.COMMENT(';', '$', {relevance: 0}), { className: 'number', begin: hljs.NUMBER_RE, relevance: 0 }, { className: 'variable', // FIXME begin: '%', end: '%', illegal: '\\n', contains: [BACKTICK_ESCAPE] }, { className: 'symbol', contains: [BACKTICK_ESCAPE], variants: [ {begin: '^[^\\n";]+::(?!=)'}, {begin: '^[^\\n";]+:(?!=)', relevance: 0} // zero relevance as it catches a lot of things // followed by a single ':' in many languages ] }, { // consecutive commas, not for highlighting but just for relevance begin: ',\\s*,' } ] } }; /***/ }, /* 185 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = 'ByRef Case Const ContinueCase ContinueLoop ' + 'Default Dim Do Else ElseIf EndFunc EndIf EndSelect ' + 'EndSwitch EndWith Enum Exit ExitLoop For Func ' + 'Global If In Local Next ReDim Return Select Static ' + 'Step Switch Then To Until Volatile WEnd While With', LITERAL = 'True False And Null Not Or', BUILT_IN = 'Abs ACos AdlibRegister AdlibUnRegister Asc AscW ASin Assign ATan AutoItSetOption AutoItWinGetTitle AutoItWinSetTitle Beep Binary BinaryLen BinaryMid BinaryToString BitAND BitNOT BitOR BitRotate BitShift BitXOR BlockInput Break Call CDTray Ceiling Chr ChrW ClipGet ClipPut ConsoleRead ConsoleWrite ConsoleWriteError ControlClick ControlCommand ControlDisable ControlEnable ControlFocus ControlGetFocus ControlGetHandle ControlGetPos ControlGetText ControlHide ControlListView ControlMove ControlSend ControlSetText ControlShow ControlTreeView Cos Dec DirCopy DirCreate DirGetSize DirMove DirRemove DllCall DllCallAddress DllCallbackFree DllCallbackGetPtr DllCallbackRegister DllClose DllOpen DllStructCreate DllStructGetData DllStructGetPtr DllStructGetSize DllStructSetData DriveGetDrive DriveGetFileSystem DriveGetLabel DriveGetSerial DriveGetType DriveMapAdd DriveMapDel DriveMapGet DriveSetLabel DriveSpaceFree DriveSpaceTotal DriveStatus EnvGet EnvSet EnvUpdate Eval Execute Exp FileChangeDir FileClose FileCopy FileCreateNTFSLink FileCreateShortcut FileDelete FileExists FileFindFirstFile FileFindNextFile FileFlush FileGetAttrib FileGetEncoding FileGetLongName FileGetPos FileGetShortcut FileGetShortName FileGetSize FileGetTime FileGetVersion FileInstall FileMove FileOpen FileOpenDialog FileRead FileReadLine FileReadToArray FileRecycle FileRecycleEmpty FileSaveDialog FileSelectFolder FileSetAttrib FileSetEnd FileSetPos FileSetTime FileWrite FileWriteLine Floor FtpSetProxy FuncName GUICreate GUICtrlCreateAvi GUICtrlCreateButton GUICtrlCreateCheckbox GUICtrlCreateCombo GUICtrlCreateContextMenu GUICtrlCreateDate GUICtrlCreateDummy GUICtrlCreateEdit GUICtrlCreateGraphic GUICtrlCreateGroup GUICtrlCreateIcon GUICtrlCreateInput GUICtrlCreateLabel GUICtrlCreateList GUICtrlCreateListView GUICtrlCreateListViewItem GUICtrlCreateMenu GUICtrlCreateMenuItem GUICtrlCreateMonthCal GUICtrlCreateObj GUICtrlCreatePic GUICtrlCreateProgress GUICtrlCreateRadio GUICtrlCreateSlider GUICtrlCreateTab GUICtrlCreateTabItem GUICtrlCreateTreeView GUICtrlCreateTreeViewItem GUICtrlCreateUpdown GUICtrlDelete GUICtrlGetHandle GUICtrlGetState GUICtrlRead GUICtrlRecvMsg GUICtrlRegisterListViewSort GUICtrlSendMsg GUICtrlSendToDummy GUICtrlSetBkColor GUICtrlSetColor GUICtrlSetCursor GUICtrlSetData GUICtrlSetDefBkColor GUICtrlSetDefColor GUICtrlSetFont GUICtrlSetGraphic GUICtrlSetImage GUICtrlSetLimit GUICtrlSetOnEvent GUICtrlSetPos GUICtrlSetResizing GUICtrlSetState GUICtrlSetStyle GUICtrlSetTip GUIDelete GUIGetCursorInfo GUIGetMsg GUIGetStyle GUIRegisterMsg GUISetAccelerators GUISetBkColor GUISetCoord GUISetCursor GUISetFont GUISetHelp GUISetIcon GUISetOnEvent GUISetState GUISetStyle GUIStartGroup GUISwitch Hex HotKeySet HttpSetProxy HttpSetUserAgent HWnd InetClose InetGet InetGetInfo InetGetSize InetRead IniDelete IniRead IniReadSection IniReadSectionNames IniRenameSection IniWrite IniWriteSection InputBox Int IsAdmin IsArray IsBinary IsBool IsDeclared IsDllStruct IsFloat IsFunc IsHWnd IsInt IsKeyword IsNumber IsObj IsPtr IsString Log MemGetStats Mod MouseClick MouseClickDrag MouseDown MouseGetCursor MouseGetPos MouseMove MouseUp MouseWheel MsgBox Number ObjCreate ObjCreateInterface ObjEvent ObjGet ObjName OnAutoItExitRegister OnAutoItExitUnRegister Ping PixelChecksum PixelGetColor PixelSearch ProcessClose ProcessExists ProcessGetStats ProcessList ProcessSetPriority ProcessWait ProcessWaitClose ProgressOff ProgressOn ProgressSet Ptr Random RegDelete RegEnumKey RegEnumVal RegRead RegWrite Round Run RunAs RunAsWait RunWait Send SendKeepActive SetError SetExtended ShellExecute ShellExecuteWait Shutdown Sin Sleep SoundPlay SoundSetWaveVolume SplashImageOn SplashOff SplashTextOn Sqrt SRandom StatusbarGetText StderrRead StdinWrite StdioClose StdoutRead String StringAddCR StringCompare StringFormat StringFromASCIIArray StringInStr StringIsAlNum StringIsAlpha StringIsASCII StringIsDigit StringIsFloat StringIsInt StringIsLower StringIsSpace StringIsUpper StringIsXDigit StringLeft StringLen StringLower StringMid StringRegExp StringRegExpReplace StringReplace StringReverse StringRight StringSplit StringStripCR StringStripWS StringToASCIIArray StringToBinary StringTrimLeft StringTrimRight StringUpper Tan TCPAccept TCPCloseSocket TCPConnect TCPListen TCPNameToIP TCPRecv TCPSend TCPShutdown, UDPShutdown TCPStartup, UDPStartup TimerDiff TimerInit ToolTip TrayCreateItem TrayCreateMenu TrayGetMsg TrayItemDelete TrayItemGetHandle TrayItemGetState TrayItemGetText TrayItemSetOnEvent TrayItemSetState TrayItemSetText TraySetClick TraySetIcon TraySetOnEvent TraySetPauseIcon TraySetState TraySetToolTip TrayTip UBound UDPBind UDPCloseSocket UDPOpen UDPRecv UDPSend VarGetType WinActivate WinActive WinClose WinExists WinFlash WinGetCaretPos WinGetClassList WinGetClientSize WinGetHandle WinGetPos WinGetProcess WinGetState WinGetText WinGetTitle WinKill WinList WinMenuSelectItem WinMinimizeAll WinMinimizeAllUndo WinMove WinSetOnTop WinSetState WinSetTitle WinSetTrans WinWait', COMMENT = { variants: [ hljs.COMMENT(';', '$', {relevance: 0}), hljs.COMMENT('#cs', '#ce'), hljs.COMMENT('#comments-start', '#comments-end') ] }, VARIABLE = { begin: '\\$[A-z0-9_]+' }, STRING = { className: 'string', variants: [{ begin: /"/, end: /"/, contains: [{ begin: /""/, relevance: 0 }] }, { begin: /'/, end: /'/, contains: [{ begin: /''/, relevance: 0 }] }] }, NUMBER = { variants: [hljs.BINARY_NUMBER_MODE, hljs.C_NUMBER_MODE] }, PREPROCESSOR = { className: 'meta', begin: '#', end: '$', keywords: {'meta-keyword': 'comments include include-once NoTrayIcon OnAutoItStartRegister pragma compile RequireAdmin'}, contains: [{ begin: /\\\n/, relevance: 0 }, { beginKeywords: 'include', keywords: {'meta-keyword': 'include'}, end: '$', contains: [ STRING, { className: 'meta-string', variants: [{ begin: '<', end: '>' }, { begin: /"/, end: /"/, contains: [{ begin: /""/, relevance: 0 }] }, { begin: /'/, end: /'/, contains: [{ begin: /''/, relevance: 0 }] }] } ] }, STRING, COMMENT ] }, CONSTANT = { className: 'symbol', // begin: '@', // end: '$', // keywords: 'AppDataCommonDir AppDataDir AutoItExe AutoItPID AutoItVersion AutoItX64 COM_EventObj CommonFilesDir Compiled ComputerName ComSpec CPUArch CR CRLF DesktopCommonDir DesktopDepth DesktopDir DesktopHeight DesktopRefresh DesktopWidth DocumentsCommonDir error exitCode exitMethod extended FavoritesCommonDir FavoritesDir GUI_CtrlHandle GUI_CtrlId GUI_DragFile GUI_DragId GUI_DropId GUI_WinHandle HomeDrive HomePath HomeShare HotKeyPressed HOUR IPAddress1 IPAddress2 IPAddress3 IPAddress4 KBLayout LF LocalAppDataDir LogonDNSDomain LogonDomain LogonServer MDAY MIN MON MSEC MUILang MyDocumentsDir NumParams OSArch OSBuild OSLang OSServicePack OSType OSVersion ProgramFilesDir ProgramsCommonDir ProgramsDir ScriptDir ScriptFullPath ScriptLineNumber ScriptName SEC StartMenuCommonDir StartMenuDir StartupCommonDir StartupDir SW_DISABLE SW_ENABLE SW_HIDE SW_LOCK SW_MAXIMIZE SW_MINIMIZE SW_RESTORE SW_SHOW SW_SHOWDEFAULT SW_SHOWMAXIMIZED SW_SHOWMINIMIZED SW_SHOWMINNOACTIVE SW_SHOWNA SW_SHOWNOACTIVATE SW_SHOWNORMAL SW_UNLOCK SystemDir TAB TempDir TRAY_ID TrayIconFlashing TrayIconVisible UserName UserProfileDir WDAY WindowsDir WorkingDir YDAY YEAR', // relevance: 5 begin: '@[A-z0-9_]+' }, FUNCTION = { className: 'function', beginKeywords: 'Func', end: '$', illegal: '\\$|\\[|%', contains: [ hljs.UNDERSCORE_TITLE_MODE, { className: 'params', begin: '\\(', end: '\\)', contains: [ VARIABLE, STRING, NUMBER ] } ] }; return { case_insensitive: true, illegal: /\/\*/, keywords: { keyword: KEYWORDS, built_in: BUILT_IN, literal: LITERAL }, contains: [ COMMENT, VARIABLE, STRING, NUMBER, PREPROCESSOR, CONSTANT, FUNCTION ] } }; /***/ }, /* 186 */ /***/ function(module, exports) { module.exports = function(hljs) { return { case_insensitive: true, lexemes: '\\.?' + hljs.IDENT_RE, keywords: { keyword: /* mnemonic */ 'adc add adiw and andi asr bclr bld brbc brbs brcc brcs break breq brge brhc brhs ' + 'brid brie brlo brlt brmi brne brpl brsh brtc brts brvc brvs bset bst call cbi cbr ' + 'clc clh cli cln clr cls clt clv clz com cp cpc cpi cpse dec eicall eijmp elpm eor ' + 'fmul fmuls fmulsu icall ijmp in inc jmp ld ldd ldi lds lpm lsl lsr mov movw mul ' + 'muls mulsu neg nop or ori out pop push rcall ret reti rjmp rol ror sbc sbr sbrc sbrs ' + 'sec seh sbi sbci sbic sbis sbiw sei sen ser ses set sev sez sleep spm st std sts sub ' + 'subi swap tst wdr', built_in: /* general purpose registers */ 'r0 r1 r2 r3 r4 r5 r6 r7 r8 r9 r10 r11 r12 r13 r14 r15 r16 r17 r18 r19 r20 r21 r22 ' + 'r23 r24 r25 r26 r27 r28 r29 r30 r31 x|0 xh xl y|0 yh yl z|0 zh zl ' + /* IO Registers (ATMega128) */ 'ucsr1c udr1 ucsr1a ucsr1b ubrr1l ubrr1h ucsr0c ubrr0h tccr3c tccr3a tccr3b tcnt3h ' + 'tcnt3l ocr3ah ocr3al ocr3bh ocr3bl ocr3ch ocr3cl icr3h icr3l etimsk etifr tccr1c ' + 'ocr1ch ocr1cl twcr twdr twar twsr twbr osccal xmcra xmcrb eicra spmcsr spmcr portg ' + 'ddrg ping portf ddrf sreg sph spl xdiv rampz eicrb eimsk gimsk gicr eifr gifr timsk ' + 'tifr mcucr mcucsr tccr0 tcnt0 ocr0 assr tccr1a tccr1b tcnt1h tcnt1l ocr1ah ocr1al ' + 'ocr1bh ocr1bl icr1h icr1l tccr2 tcnt2 ocr2 ocdr wdtcr sfior eearh eearl eedr eecr ' + 'porta ddra pina portb ddrb pinb portc ddrc pinc portd ddrd pind spdr spsr spcr udr0 ' + 'ucsr0a ucsr0b ubrr0l acsr admux adcsr adch adcl porte ddre pine pinf', meta: '.byte .cseg .db .def .device .dseg .dw .endmacro .equ .eseg .exit .include .list ' + '.listmac .macro .nolist .org .set' }, contains: [ hljs.C_BLOCK_COMMENT_MODE, hljs.COMMENT( ';', '$', { relevance: 0 } ), hljs.C_NUMBER_MODE, // 0x..., decimal, float hljs.BINARY_NUMBER_MODE, // 0b... { className: 'number', begin: '\\b(\\$[a-zA-Z0-9]+|0o[0-7]+)' // $..., 0o... }, hljs.QUOTE_STRING_MODE, { className: 'string', begin: '\'', end: '[^\\\\]\'', illegal: '[^\\\\][^\']' }, {className: 'symbol', begin: '^[A-Za-z0-9_.$]+:'}, {className: 'meta', begin: '#', end: '$'}, { // подстановка в «.macro» className: 'subst', begin: '@[0-9]+' } ] }; }; /***/ }, /* 187 */ /***/ function(module, exports) { module.exports = function(hljs) { var VARIABLE = { className: 'variable', variants: [ {begin: /\$[\w\d#@][\w\d_]*/}, {begin: /\$\{(.*?)}/} ] }; var KEYWORDS = 'BEGIN END if else while do for in break continue delete next nextfile function func exit|10'; var STRING = { className: 'string', contains: [hljs.BACKSLASH_ESCAPE], variants: [ { begin: /(u|b)?r?'''/, end: /'''/, relevance: 10 }, { begin: /(u|b)?r?"""/, end: /"""/, relevance: 10 }, { begin: /(u|r|ur)'/, end: /'/, relevance: 10 }, { begin: /(u|r|ur)"/, end: /"/, relevance: 10 }, { begin: /(b|br)'/, end: /'/ }, { begin: /(b|br)"/, end: /"/ }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE ] }; return { keywords: { keyword: KEYWORDS }, contains: [ VARIABLE, STRING, hljs.REGEXP_MODE, hljs.HASH_COMMENT_MODE, hljs.NUMBER_MODE ] } }; /***/ }, /* 188 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: 'false int abstract private char boolean static null if for true ' + 'while long throw finally protected final return void enum else ' + 'break new catch byte super case short default double public try this switch ' + 'continue reverse firstfast firstonly forupdate nofetch sum avg minof maxof count ' + 'order group by asc desc index hint like dispaly edit client server ttsbegin ' + 'ttscommit str real date container anytype common div mod', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, { className: 'meta', begin: '#', end: '$' }, { className: 'class', beginKeywords: 'class interface', end: '{', excludeEnd: true, illegal: ':', contains: [ {beginKeywords: 'extends implements'}, hljs.UNDERSCORE_TITLE_MODE ] } ] }; }; /***/ }, /* 189 */ /***/ function(module, exports) { module.exports = function(hljs) { var VAR = { className: 'variable', variants: [ {begin: /\$[\w\d#@][\w\d_]*/}, {begin: /\$\{(.*?)}/} ] }; var QUOTE_STRING = { className: 'string', begin: /"/, end: /"/, contains: [ hljs.BACKSLASH_ESCAPE, VAR, { className: 'variable', begin: /\$\(/, end: /\)/, contains: [hljs.BACKSLASH_ESCAPE] } ] }; var APOS_STRING = { className: 'string', begin: /'/, end: /'/ }; return { aliases: ['sh', 'zsh'], lexemes: /-?[a-z\._]+/, keywords: { keyword: 'if then else elif fi for while in do done case esac function', literal: 'true false', built_in: // Shell built-ins // http://www.gnu.org/software/bash/manual/html_node/Shell-Builtin-Commands.html 'break cd continue eval exec exit export getopts hash pwd readonly return shift test times ' + 'trap umask unset ' + // Bash built-ins 'alias bind builtin caller command declare echo enable help let local logout mapfile printf ' + 'read readarray source type typeset ulimit unalias ' + // Shell modifiers 'set shopt ' + // Zsh built-ins 'autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles ' + 'compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate ' + 'fc fg float functions getcap getln history integer jobs kill limit log noglob popd print ' + 'pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit ' + 'unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof ' + 'zpty zregexparse zsocket zstyle ztcp', _: '-ne -eq -lt -gt -f -d -e -s -l -a' // relevance booster }, contains: [ { className: 'meta', begin: /^#![^\n]+sh\s*$/, relevance: 10 }, { className: 'function', begin: /\w[\w\d_]*\s*\(\s*\)\s*\{/, returnBegin: true, contains: [hljs.inherit(hljs.TITLE_MODE, {begin: /\w[\w\d_]*/})], relevance: 0 }, hljs.HASH_COMMENT_MODE, QUOTE_STRING, APOS_STRING, VAR ] }; }; /***/ }, /* 190 */ /***/ function(module, exports) { module.exports = function(hljs) { return { case_insensitive: true, illegal: '^\.', // Support explicitely typed variables that end with $%! or #. lexemes: '[a-zA-Z][a-zA-Z0-9_\$\%\!\#]*', keywords: { keyword: 'ABS ASC AND ATN AUTO|0 BEEP BLOAD|10 BSAVE|10 CALL CALLS CDBL CHAIN CHDIR CHR$|10 CINT CIRCLE ' + 'CLEAR CLOSE CLS COLOR COM COMMON CONT COS CSNG CSRLIN CVD CVI CVS DATA DATE$ ' + 'DEFDBL DEFINT DEFSNG DEFSTR DEF|0 SEG USR DELETE DIM DRAW EDIT END ENVIRON ENVIRON$ ' + 'EOF EQV ERASE ERDEV ERDEV$ ERL ERR ERROR EXP FIELD FILES FIX FOR|0 FRE GET GOSUB|10 GOTO ' + 'HEX$ IF|0 THEN ELSE|0 INKEY$ INP INPUT INPUT# INPUT$ INSTR IMP INT IOCTL IOCTL$ KEY ON ' + 'OFF LIST KILL LEFT$ LEN LET LINE LLIST LOAD LOC LOCATE LOF LOG LPRINT USING LSET ' + 'MERGE MID$ MKDIR MKD$ MKI$ MKS$ MOD NAME NEW NEXT NOISE NOT OCT$ ON OR PEN PLAY STRIG OPEN OPTION ' + 'BASE OUT PAINT PALETTE PCOPY PEEK PMAP POINT POKE POS PRINT PRINT] PSET PRESET ' + 'PUT RANDOMIZE READ REM RENUM RESET|0 RESTORE RESUME RETURN|0 RIGHT$ RMDIR RND RSET ' + 'RUN SAVE SCREEN SGN SHELL SIN SOUND SPACE$ SPC SQR STEP STICK STOP STR$ STRING$ SWAP ' + 'SYSTEM TAB TAN TIME$ TIMER TROFF TRON TO USR VAL VARPTR VARPTR$ VIEW WAIT WHILE ' + 'WEND WIDTH WINDOW WRITE XOR' }, contains: [ hljs.QUOTE_STRING_MODE, hljs.COMMENT('REM', '$', {relevance: 10}), hljs.COMMENT('\'', '$', {relevance: 0}), { // Match line numbers className: 'symbol', begin: '^[0-9]+\ ', relevance: 10 }, { // Match typed numeric constants (1000, 12.34!, 1.2e5, 1.5#, 1.2D2) className: 'number', begin: '\\b([0-9]+[0-9edED\.]*[#\!]?)', relevance: 0 }, { // Match hexadecimal numbers (&Hxxxx) className: 'number', begin: '(\&[hH][0-9a-fA-F]{1,4})' }, { // Match octal numbers (&Oxxxxxx) className: 'number', begin: '(\&[oO][0-7]{1,6})' } ] }; }; /***/ }, /* 191 */ /***/ function(module, exports) { module.exports = function(hljs){ return { contains: [ // Attribute { className: 'attribute', begin: /</, end: />/ }, // Specific { begin: /::=/, starts: { end: /$/, contains: [ { begin: /</, end: />/ }, // Common hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE ] } } ] }; }; /***/ }, /* 192 */ /***/ function(module, exports) { module.exports = function(hljs){ var LITERAL = { className: 'literal', begin: '[\\+\\-]', relevance: 0 }; return { aliases: ['bf'], contains: [ hljs.COMMENT( '[^\\[\\]\\.,\\+\\-<> \r\n]', '[\\[\\]\\.,\\+\\-<> \r\n]', { returnEnd: true, relevance: 0 } ), { className: 'title', begin: '[\\[\\]]', relevance: 0 }, { className: 'string', begin: '[\\.,]', relevance: 0 }, { // this mode works as the only relevance counter begin: /\+\+|\-\-/, returnBegin: true, contains: [LITERAL] }, LITERAL ] }; }; /***/ }, /* 193 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = 'div mod in and or not xor asserterror begin case do downto else end exit for if of repeat then to ' + 'until while with var'; var LITERALS = 'false true'; var COMMENT_MODES = [ hljs.C_LINE_COMMENT_MODE, hljs.COMMENT( /\{/, /\}/, { relevance: 0 } ), hljs.COMMENT( /\(\*/, /\*\)/, { relevance: 10 } ) ]; var STRING = { className: 'string', begin: /'/, end: /'/, contains: [{begin: /''/}] }; var CHAR_STRING = { className: 'string', begin: /(#\d+)+/ }; var DATE = { className: 'number', begin: '\\b\\d+(\\.\\d+)?(DT|D|T)', relevance: 0 }; var DBL_QUOTED_VARIABLE = { className: 'string', // not a string technically but makes sense to be highlighted in the same style begin: '"', end: '"' }; var PROCEDURE = { className: 'function', beginKeywords: 'procedure', end: /[:;]/, keywords: 'procedure|10', contains: [ hljs.TITLE_MODE, { className: 'params', begin: /\(/, end: /\)/, keywords: KEYWORDS, contains: [STRING, CHAR_STRING] } ].concat(COMMENT_MODES) }; var OBJECT = { className: 'class', begin: 'OBJECT (Table|Form|Report|Dataport|Codeunit|XMLport|MenuSuite|Page|Query) (\\d+) ([^\\r\\n]+)', returnBegin: true, contains: [ hljs.TITLE_MODE, PROCEDURE ] }; return { case_insensitive: true, keywords: { keyword: KEYWORDS, literal: LITERALS }, illegal: /\/\*/, contains: [ STRING, CHAR_STRING, DATE, DBL_QUOTED_VARIABLE, hljs.NUMBER_MODE, OBJECT, PROCEDURE ] }; }; /***/ }, /* 194 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['capnp'], keywords: { keyword: 'struct enum interface union group import using const annotation extends in of on as with from fixed', built_in: 'Void Bool Int8 Int16 Int32 Int64 UInt8 UInt16 UInt32 UInt64 Float32 Float64 ' + 'Text Data AnyPointer AnyStruct Capability List', literal: 'true false' }, contains: [ hljs.QUOTE_STRING_MODE, hljs.NUMBER_MODE, hljs.HASH_COMMENT_MODE, { className: 'meta', begin: /@0x[\w\d]{16};/, illegal: /\n/ }, { className: 'symbol', begin: /@\d+\b/ }, { className: 'class', beginKeywords: 'struct enum', end: /\{/, illegal: /\n/, contains: [ hljs.inherit(hljs.TITLE_MODE, { starts: {endsWithParent: true, excludeEnd: true} // hack: eating everything after the first title }) ] }, { className: 'class', beginKeywords: 'interface', end: /\{/, illegal: /\n/, contains: [ hljs.inherit(hljs.TITLE_MODE, { starts: {endsWithParent: true, excludeEnd: true} // hack: eating everything after the first title }) ] } ] }; }; /***/ }, /* 195 */ /***/ function(module, exports) { module.exports = function(hljs) { // 2.3. Identifiers and keywords var KEYWORDS = 'assembly module package import alias class interface object given value ' + 'assign void function new of extends satisfies abstracts in out return ' + 'break continue throw assert dynamic if else switch case for while try ' + 'catch finally then let this outer super is exists nonempty'; // 7.4.1 Declaration Modifiers var DECLARATION_MODIFIERS = 'shared abstract formal default actual variable late native deprecated' + 'final sealed annotation suppressWarnings small'; // 7.4.2 Documentation var DOCUMENTATION = 'doc by license see throws tagged'; var SUBST = { className: 'subst', excludeBegin: true, excludeEnd: true, begin: /``/, end: /``/, keywords: KEYWORDS, relevance: 10 }; var EXPRESSIONS = [ { // verbatim string className: 'string', begin: '"""', end: '"""', relevance: 10 }, { // string literal or template className: 'string', begin: '"', end: '"', contains: [SUBST] }, { // character literal className: 'string', begin: "'", end: "'" }, { // numeric literal className: 'number', begin: '#[0-9a-fA-F_]+|\\$[01_]+|[0-9_]+(?:\\.[0-9_](?:[eE][+-]?\\d+)?)?[kMGTPmunpf]?', relevance: 0 } ]; SUBST.contains = EXPRESSIONS; return { keywords: { keyword: KEYWORDS + ' ' + DECLARATION_MODIFIERS, meta: DOCUMENTATION }, illegal: '\\$[^01]|#[^0-9a-fA-F]', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.COMMENT('/\\*', '\\*/', {contains: ['self']}), { // compiler annotation className: 'meta', begin: '@[a-z]\\w*(?:\\:\"[^\"]*\")?' } ].concat(EXPRESSIONS) }; }; /***/ }, /* 196 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['clean','icl','dcl'], keywords: { keyword: 'if let in with where case of class instance otherwise ' + 'implementation definition system module from import qualified as ' + 'special code inline foreign export ccall stdcall generic derive ' + 'infix infixl infixr', literal: 'True False' }, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, {begin: '->|<-[|:]?|::|#!?|>>=|\\{\\||\\|\\}|:==|=:|\\.\\.|<>|`'} // relevance booster ] }; }; /***/ }, /* 197 */ /***/ function(module, exports) { module.exports = function(hljs) { var keywords = { 'builtin-name': // Clojure keywords 'def defonce cond apply if-not if-let if not not= = < > <= >= == + / * - rem '+ 'quot neg? pos? delay? symbol? keyword? true? false? integer? empty? coll? list? '+ 'set? ifn? fn? associative? sequential? sorted? counted? reversible? number? decimal? '+ 'class? distinct? isa? float? rational? reduced? ratio? odd? even? char? seq? vector? '+ 'string? map? nil? contains? zero? instance? not-every? not-any? libspec? -> ->> .. . '+ 'inc compare do dotimes mapcat take remove take-while drop letfn drop-last take-last '+ 'drop-while while intern condp case reduced cycle split-at split-with repeat replicate '+ 'iterate range merge zipmap declare line-seq sort comparator sort-by dorun doall nthnext '+ 'nthrest partition eval doseq await await-for let agent atom send send-off release-pending-sends '+ 'add-watch mapv filterv remove-watch agent-error restart-agent set-error-handler error-handler '+ 'set-error-mode! error-mode shutdown-agents quote var fn loop recur throw try monitor-enter '+ 'monitor-exit defmacro defn defn- macroexpand macroexpand-1 for dosync and or '+ 'when when-not when-let comp juxt partial sequence memoize constantly complement identity assert '+ 'peek pop doto proxy defstruct first rest cons defprotocol cast coll deftype defrecord last butlast '+ 'sigs reify second ffirst fnext nfirst nnext defmulti defmethod meta with-meta ns in-ns create-ns import '+ 'refer keys select-keys vals key val rseq name namespace promise into transient persistent! conj! '+ 'assoc! dissoc! pop! disj! use class type num float double short byte boolean bigint biginteger '+ 'bigdec print-method print-dup throw-if printf format load compile get-in update-in pr pr-on newline '+ 'flush read slurp read-line subvec with-open memfn time re-find re-groups rand-int rand mod locking '+ 'assert-valid-fdecl alias resolve ref deref refset swap! reset! set-validator! compare-and-set! alter-meta! '+ 'reset-meta! commute get-validator alter ref-set ref-history-count ref-min-history ref-max-history ensure sync io! '+ 'new next conj set! to-array future future-call into-array aset gen-class reduce map filter find empty '+ 'hash-map hash-set sorted-map sorted-map-by sorted-set sorted-set-by vec vector seq flatten reverse assoc dissoc list '+ 'disj get union difference intersection extend extend-type extend-protocol int nth delay count concat chunk chunk-buffer '+ 'chunk-append chunk-first chunk-rest max min dec unchecked-inc-int unchecked-inc unchecked-dec-inc unchecked-dec unchecked-negate '+ 'unchecked-add-int unchecked-add unchecked-subtract-int unchecked-subtract chunk-next chunk-cons chunked-seq? prn vary-meta '+ 'lazy-seq spread list* str find-keyword keyword symbol gensym force rationalize' }; var SYMBOLSTART = 'a-zA-Z_\\-!.?+*=<>&#\''; var SYMBOL_RE = '[' + SYMBOLSTART + '][' + SYMBOLSTART + '0-9/;:]*'; var SIMPLE_NUMBER_RE = '[-+]?\\d+(\\.\\d+)?'; var SYMBOL = { begin: SYMBOL_RE, relevance: 0 }; var NUMBER = { className: 'number', begin: SIMPLE_NUMBER_RE, relevance: 0 }; var STRING = hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}); var COMMENT = hljs.COMMENT( ';', '$', { relevance: 0 } ); var LITERAL = { className: 'literal', begin: /\b(true|false|nil)\b/ }; var COLLECTION = { begin: '[\\[\\{]', end: '[\\]\\}]' }; var HINT = { className: 'comment', begin: '\\^' + SYMBOL_RE }; var HINT_COL = hljs.COMMENT('\\^\\{', '\\}'); var KEY = { className: 'symbol', begin: '[:]{1,2}' + SYMBOL_RE }; var LIST = { begin: '\\(', end: '\\)' }; var BODY = { endsWithParent: true, relevance: 0 }; var NAME = { keywords: keywords, lexemes: SYMBOL_RE, className: 'name', begin: SYMBOL_RE, starts: BODY }; var DEFAULT_CONTAINS = [LIST, STRING, HINT, HINT_COL, COMMENT, KEY, COLLECTION, NUMBER, LITERAL, SYMBOL]; LIST.contains = [hljs.COMMENT('comment', ''), NAME, BODY]; BODY.contains = DEFAULT_CONTAINS; COLLECTION.contains = DEFAULT_CONTAINS; return { aliases: ['clj'], illegal: /\S/, contains: [LIST, STRING, HINT, HINT_COL, COMMENT, KEY, COLLECTION, NUMBER, LITERAL] } }; /***/ }, /* 198 */ /***/ function(module, exports) { module.exports = function(hljs) { return { contains: [ { className: 'meta', begin: /^([\w.-]+|\s*#_)=>/, starts: { end: /$/, subLanguage: 'clojure' } } ] } }; /***/ }, /* 199 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['cmake.in'], case_insensitive: true, keywords: { keyword: 'add_custom_command add_custom_target add_definitions add_dependencies ' + 'add_executable add_library add_subdirectory add_test aux_source_directory ' + 'break build_command cmake_minimum_required cmake_policy configure_file ' + 'create_test_sourcelist define_property else elseif enable_language enable_testing ' + 'endforeach endfunction endif endmacro endwhile execute_process export find_file ' + 'find_library find_package find_path find_program fltk_wrap_ui foreach function ' + 'get_cmake_property get_directory_property get_filename_component get_property ' + 'get_source_file_property get_target_property get_test_property if include ' + 'include_directories include_external_msproject include_regular_expression install ' + 'link_directories load_cache load_command macro mark_as_advanced message option ' + 'output_required_files project qt_wrap_cpp qt_wrap_ui remove_definitions return ' + 'separate_arguments set set_directory_properties set_property ' + 'set_source_files_properties set_target_properties set_tests_properties site_name ' + 'source_group string target_link_libraries try_compile try_run unset variable_watch ' + 'while build_name exec_program export_library_dependencies install_files ' + 'install_programs install_targets link_libraries make_directory remove subdir_depends ' + 'subdirs use_mangled_mesa utility_source variable_requires write_file ' + 'qt5_use_modules qt5_use_package qt5_wrap_cpp on off true false and or ' + 'equal less greater strless strgreater strequal matches' }, contains: [ { className: 'variable', begin: '\\${', end: '}' }, hljs.HASH_COMMENT_MODE, hljs.QUOTE_STRING_MODE, hljs.NUMBER_MODE ] }; }; /***/ }, /* 200 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = { keyword: // JS keywords 'in if for while finally new do return else break catch instanceof throw try this ' + 'switch continue typeof delete debugger super yield import export from as default await ' + // Coffee keywords 'then unless until loop of by when and or is isnt not', literal: // JS literals 'true false null undefined ' + // Coffee literals 'yes no on off', built_in: 'npm require console print module global window document' }; var JS_IDENT_RE = '[A-Za-z$_][0-9A-Za-z$_]*'; var SUBST = { className: 'subst', begin: /#\{/, end: /}/, keywords: KEYWORDS }; var EXPRESSIONS = [ hljs.BINARY_NUMBER_MODE, hljs.inherit(hljs.C_NUMBER_MODE, {starts: {end: '(\\s*/)?', relevance: 0}}), // a number tries to eat the following slash to prevent treating it as a regexp { className: 'string', variants: [ { begin: /'''/, end: /'''/, contains: [hljs.BACKSLASH_ESCAPE] }, { begin: /'/, end: /'/, contains: [hljs.BACKSLASH_ESCAPE] }, { begin: /"""/, end: /"""/, contains: [hljs.BACKSLASH_ESCAPE, SUBST] }, { begin: /"/, end: /"/, contains: [hljs.BACKSLASH_ESCAPE, SUBST] } ] }, { className: 'regexp', variants: [ { begin: '///', end: '///', contains: [SUBST, hljs.HASH_COMMENT_MODE] }, { begin: '//[gim]*', relevance: 0 }, { // regex can't start with space to parse x / 2 / 3 as two divisions // regex can't start with *, and it supports an "illegal" in the main mode begin: /\/(?![ *])(\\\/|.)*?\/[gim]*(?=\W|$)/ } ] }, { begin: '@' + JS_IDENT_RE // relevance booster }, { subLanguage: 'javascript', excludeBegin: true, excludeEnd: true, variants: [ { begin: '```', end: '```', }, { begin: '`', end: '`', } ] } ]; SUBST.contains = EXPRESSIONS; var TITLE = hljs.inherit(hljs.TITLE_MODE, {begin: JS_IDENT_RE}); var PARAMS_RE = '(\\(.*\\))?\\s*\\B[-=]>'; var PARAMS = { className: 'params', begin: '\\([^\\(]', returnBegin: true, /* We need another contained nameless mode to not have every nested pair of parens to be called "params" */ contains: [{ begin: /\(/, end: /\)/, keywords: KEYWORDS, contains: ['self'].concat(EXPRESSIONS) }] }; return { aliases: ['coffee', 'cson', 'iced'], keywords: KEYWORDS, illegal: /\/\*/, contains: EXPRESSIONS.concat([ hljs.COMMENT('###', '###'), hljs.HASH_COMMENT_MODE, { className: 'function', begin: '^\\s*' + JS_IDENT_RE + '\\s*=\\s*' + PARAMS_RE, end: '[-=]>', returnBegin: true, contains: [TITLE, PARAMS] }, { // anonymous function start begin: /[:\(,=]\s*/, relevance: 0, contains: [ { className: 'function', begin: PARAMS_RE, end: '[-=]>', returnBegin: true, contains: [PARAMS] } ] }, { className: 'class', beginKeywords: 'class', end: '$', illegal: /[:="\[\]]/, contains: [ { beginKeywords: 'extends', endsWithParent: true, illegal: /[:="\[\]]/, contains: [TITLE] }, TITLE ] }, { begin: JS_IDENT_RE + ':', end: ':', returnBegin: true, returnEnd: true, relevance: 0 } ]) }; }; /***/ }, /* 201 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: { keyword: '_ as at cofix else end exists exists2 fix for forall fun if IF in let ' + 'match mod Prop return Set then Type using where with ' + 'Abort About Add Admit Admitted All Arguments Assumptions Axiom Back BackTo ' + 'Backtrack Bind Blacklist Canonical Cd Check Class Classes Close Coercion ' + 'Coercions CoFixpoint CoInductive Collection Combined Compute Conjecture ' + 'Conjectures Constant constr Constraint Constructors Context Corollary ' + 'CreateHintDb Cut Declare Defined Definition Delimit Dependencies Dependent' + 'Derive Drop eauto End Equality Eval Example Existential Existentials ' + 'Existing Export exporting Extern Extract Extraction Fact Field Fields File ' + 'Fixpoint Focus for From Function Functional Generalizable Global Goal Grab ' + 'Grammar Graph Guarded Heap Hint HintDb Hints Hypotheses Hypothesis ident ' + 'Identity If Immediate Implicit Import Include Inductive Infix Info Initial ' + 'Inline Inspect Instance Instances Intro Intros Inversion Inversion_clear ' + 'Language Left Lemma Let Libraries Library Load LoadPath Local Locate Ltac ML ' + 'Mode Module Modules Monomorphic Morphism Next NoInline Notation Obligation ' + 'Obligations Opaque Open Optimize Options Parameter Parameters Parametric ' + 'Path Paths pattern Polymorphic Preterm Print Printing Program Projections ' + 'Proof Proposition Pwd Qed Quit Rec Record Recursive Redirect Relation Remark ' + 'Remove Require Reserved Reset Resolve Restart Rewrite Right Ring Rings Save ' + 'Scheme Scope Scopes Script Search SearchAbout SearchHead SearchPattern ' + 'SearchRewrite Section Separate Set Setoid Show Solve Sorted Step Strategies ' + 'Strategy Structure SubClass Table Tables Tactic Term Test Theorem Time ' + 'Timeout Transparent Type Typeclasses Types Undelimit Undo Unfocus Unfocused ' + 'Unfold Universe Universes Unset Unshelve using Variable Variables Variant ' + 'Verbose Visibility where with', built_in: 'abstract absurd admit after apply as assert assumption at auto autorewrite ' + 'autounfold before bottom btauto by case case_eq cbn cbv change ' + 'classical_left classical_right clear clearbody cofix compare compute ' + 'congruence constr_eq constructor contradict contradiction cut cutrewrite ' + 'cycle decide decompose dependent destruct destruction dintuition ' + 'discriminate discrR do double dtauto eapply eassumption eauto ecase ' + 'econstructor edestruct ediscriminate eelim eexact eexists einduction ' + 'einjection eleft elim elimtype enough equality erewrite eright ' + 'esimplify_eq esplit evar exact exactly_once exfalso exists f_equal fail ' + 'field field_simplify field_simplify_eq first firstorder fix fold fourier ' + 'functional generalize generalizing gfail give_up has_evar hnf idtac in ' + 'induction injection instantiate intro intro_pattern intros intuition ' + 'inversion inversion_clear is_evar is_var lapply lazy left lia lra move ' + 'native_compute nia nsatz omega once pattern pose progress proof psatz quote ' + 'record red refine reflexivity remember rename repeat replace revert ' + 'revgoals rewrite rewrite_strat right ring ring_simplify rtauto set ' + 'setoid_reflexivity setoid_replace setoid_rewrite setoid_symmetry ' + 'setoid_transitivity shelve shelve_unifiable simpl simple simplify_eq solve ' + 'specialize split split_Rabs split_Rmult stepl stepr subst sum swap ' + 'symmetry tactic tauto time timeout top transitivity trivial try tryif ' + 'unfold unify until using vm_compute with' }, contains: [ hljs.QUOTE_STRING_MODE, hljs.COMMENT('\\(\\*', '\\*\\)'), hljs.C_NUMBER_MODE, { className: 'type', excludeBegin: true, begin: '\\|\\s*', end: '\\w+' }, {begin: /[-=]>/} // relevance booster ] }; }; /***/ }, /* 202 */ /***/ function(module, exports) { module.exports = function cos (hljs) { var STRINGS = { className: 'string', variants: [ { begin: '"', end: '"', contains: [{ // escaped begin: "\"\"", relevance: 0 }] } ] }; var NUMBERS = { className: "number", begin: "\\b(\\d+(\\.\\d*)?|\\.\\d+)", relevance: 0 }; var COS_KEYWORDS = 'property parameter class classmethod clientmethod extends as break ' + 'catch close continue do d|0 else elseif for goto halt hang h|0 if job ' + 'j|0 kill k|0 lock l|0 merge new open quit q|0 read r|0 return set s|0 ' + 'tcommit throw trollback try tstart use view while write w|0 xecute x|0 ' + 'zkill znspace zn ztrap zwrite zw zzdump zzwrite print zbreak zinsert ' + 'zload zprint zremove zsave zzprint mv mvcall mvcrt mvdim mvprint zquit ' + 'zsync ascii'; // registered function - no need in them due to all functions are highlighted, // but I'll just leave this here. //"$bit", "$bitcount", //"$bitfind", "$bitlogic", "$case", "$char", "$classmethod", "$classname", //"$compile", "$data", "$decimal", "$double", "$extract", "$factor", //"$find", "$fnumber", "$get", "$increment", "$inumber", "$isobject", //"$isvaliddouble", "$isvalidnum", "$justify", "$length", "$list", //"$listbuild", "$listdata", "$listfind", "$listfromstring", "$listget", //"$listlength", "$listnext", "$listsame", "$listtostring", "$listvalid", //"$locate", "$match", "$method", "$name", "$nconvert", "$next", //"$normalize", "$now", "$number", "$order", "$parameter", "$piece", //"$prefetchoff", "$prefetchon", "$property", "$qlength", "$qsubscript", //"$query", "$random", "$replace", "$reverse", "$sconvert", "$select", //"$sortbegin", "$sortend", "$stack", "$text", "$translate", "$view", //"$wascii", "$wchar", "$wextract", "$wfind", "$wiswide", "$wlength", //"$wreverse", "$xecute", "$zabs", "$zarccos", "$zarcsin", "$zarctan", //"$zcos", "$zcot", "$zcsc", "$zdate", "$zdateh", "$zdatetime", //"$zdatetimeh", "$zexp", "$zhex", "$zln", "$zlog", "$zpower", "$zsec", //"$zsin", "$zsqr", "$ztan", "$ztime", "$ztimeh", "$zboolean", //"$zconvert", "$zcrc", "$zcyc", "$zdascii", "$zdchar", "$zf", //"$ziswide", "$zlascii", "$zlchar", "$zname", "$zposition", "$zqascii", //"$zqchar", "$zsearch", "$zseek", "$zstrip", "$zwascii", "$zwchar", //"$zwidth", "$zwpack", "$zwbpack", "$zwunpack", "$zwbunpack", "$zzenkaku", //"$change", "$mv", "$mvat", "$mvfmt", "$mvfmts", "$mviconv", //"$mviconvs", "$mvinmat", "$mvlover", "$mvoconv", "$mvoconvs", "$mvraise", //"$mvtrans", "$mvv", "$mvname", "$zbitand", "$zbitcount", "$zbitfind", //"$zbitget", "$zbitlen", "$zbitnot", "$zbitor", "$zbitset", "$zbitstr", //"$zbitxor", "$zincrement", "$znext", "$zorder", "$zprevious", "$zsort", //"device", "$ecode", "$estack", "$etrap", "$halt", "$horolog", //"$io", "$job", "$key", "$namespace", "$principal", "$quit", "$roles", //"$storage", "$system", "$test", "$this", "$tlevel", "$username", //"$x", "$y", "$za", "$zb", "$zchild", "$zeof", "$zeos", "$zerror", //"$zhorolog", "$zio", "$zjob", "$zmode", "$znspace", "$zparent", "$zpi", //"$zpos", "$zreference", "$zstorage", "$ztimestamp", "$ztimezone", //"$ztrap", "$zversion" return { case_insensitive: true, aliases: ["cos", "cls"], keywords: COS_KEYWORDS, contains: [ NUMBERS, STRINGS, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: "comment", begin: /;/, end: "$", relevance: 0 }, { // Functions and user-defined functions: write $ztime(60*60*3), $$myFunc(10), $$^Val(1) className: "built_in", begin: /(?:\$\$?|\.\.)\^?[a-zA-Z]+/ }, { // Macro command: quit $$$OK className: "built_in", begin: /\$\$\$[a-zA-Z]+/ }, { // Special (global) variables: write %request.Content; Built-in classes: %Library.Integer className: "built_in", begin: /%[a-z]+(?:\.[a-z]+)*/ }, { // Global variable: set ^globalName = 12 write ^globalName className: "symbol", begin: /\^%?[a-zA-Z][\w]*/ }, { // Some control constructions: do ##class(Package.ClassName).Method(), ##super() className: "keyword", begin: /##class|##super|#define|#dim/ }, // sub-languages: are not fully supported by hljs by 11/15/2015 // left for the future implementation. { begin: /&sql\(/, end: /\)/, excludeBegin: true, excludeEnd: true, subLanguage: "sql" }, { begin: /&(js|jscript|javascript)</, end: />/, excludeBegin: true, excludeEnd: true, subLanguage: "javascript" }, { // this brakes first and last tag, but this is the only way to embed a valid html begin: /&html<\s*</, end: />\s*>/, subLanguage: "xml" } ] }; }; /***/ }, /* 203 */ /***/ function(module, exports) { module.exports = function(hljs) { var RESOURCES = 'primitive rsc_template'; var COMMANDS = 'group clone ms master location colocation order fencing_topology ' + 'rsc_ticket acl_target acl_group user role ' + 'tag xml'; var PROPERTY_SETS = 'property rsc_defaults op_defaults'; var KEYWORDS = 'params meta operations op rule attributes utilization'; var OPERATORS = 'read write deny defined not_defined in_range date spec in ' + 'ref reference attribute type xpath version and or lt gt tag ' + 'lte gte eq ne \\'; var TYPES = 'number string'; var LITERALS = 'Master Started Slave Stopped start promote demote stop monitor true false'; return { aliases: ['crm', 'pcmk'], case_insensitive: true, keywords: { keyword: KEYWORDS + ' ' + OPERATORS + ' ' + TYPES, literal: LITERALS }, contains: [ hljs.HASH_COMMENT_MODE, { beginKeywords: 'node', starts: { end: '\\s*([\\w_-]+:)?', starts: { className: 'title', end: '\\s*[\\$\\w_][\\w_-]*' } } }, { beginKeywords: RESOURCES, starts: { className: 'title', end: '\\s*[\\$\\w_][\\w_-]*', starts: { end: '\\s*@?[\\w_][\\w_\\.:-]*' } } }, { begin: '\\b(' + COMMANDS.split(' ').join('|') + ')\\s+', keywords: COMMANDS, starts: { className: 'title', end: '[\\$\\w_][\\w_-]*' } }, { beginKeywords: PROPERTY_SETS, starts: { className: 'title', end: '\\s*([\\w_-]+:)?' } }, hljs.QUOTE_STRING_MODE, { className: 'meta', begin: '(ocf|systemd|service|lsb):[\\w_:-]+', relevance: 0 }, { className: 'number', begin: '\\b\\d+(\\.\\d+)?(ms|s|h|m)?', relevance: 0 }, { className: 'literal', begin: '[-]?(infinity|inf)', relevance: 0 }, { className: 'attr', begin: /([A-Za-z\$_\#][\w_-]+)=/, relevance: 0 }, { className: 'tag', begin: '</?', end: '/?>', relevance: 0 } ] }; }; /***/ }, /* 204 */ /***/ function(module, exports) { module.exports = function(hljs) { var NUM_SUFFIX = '(_[uif](8|16|32|64))?'; var CRYSTAL_IDENT_RE = '[a-zA-Z_]\\w*[!?=]?'; var RE_STARTER = '!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|' + '>>|>|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~'; var CRYSTAL_METHOD_RE = '[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\][=?]?'; var CRYSTAL_KEYWORDS = { keyword: 'abstract alias as asm begin break case class def do else elsif end ensure enum extend for fun if ifdef ' + 'include instance_sizeof is_a? lib macro module next of out pointerof private protected rescue responds_to? ' + 'return require self sizeof struct super then type typeof union unless until when while with yield ' + '__DIR__ __FILE__ __LINE__', literal: 'false nil true' }; var SUBST = { className: 'subst', begin: '#{', end: '}', keywords: CRYSTAL_KEYWORDS }; var EXPANSION = { className: 'template-variable', variants: [ {begin: '\\{\\{', end: '\\}\\}'}, {begin: '\\{%', end: '%\\}'} ], keywords: CRYSTAL_KEYWORDS }; function recursiveParen(begin, end) { var contains = [{begin: begin, end: end}]; contains[0].contains = contains; return contains; } var STRING = { className: 'string', contains: [hljs.BACKSLASH_ESCAPE, SUBST], variants: [ {begin: /'/, end: /'/}, {begin: /"/, end: /"/}, {begin: /`/, end: /`/}, {begin: '%w?\\(', end: '\\)', contains: recursiveParen('\\(', '\\)')}, {begin: '%w?\\[', end: '\\]', contains: recursiveParen('\\[', '\\]')}, {begin: '%w?{', end: '}', contains: recursiveParen('{', '}')}, {begin: '%w?<', end: '>', contains: recursiveParen('<', '>')}, {begin: '%w?/', end: '/'}, {begin: '%w?%', end: '%'}, {begin: '%w?-', end: '-'}, {begin: '%w?\\|', end: '\\|'}, ], relevance: 0, }; var REGEXP = { begin: '(' + RE_STARTER + ')\\s*', contains: [ { className: 'regexp', contains: [hljs.BACKSLASH_ESCAPE, SUBST], variants: [ {begin: '//[a-z]*', relevance: 0}, {begin: '/', end: '/[a-z]*'}, {begin: '%r\\(', end: '\\)', contains: recursiveParen('\\(', '\\)')}, {begin: '%r\\[', end: '\\]', contains: recursiveParen('\\[', '\\]')}, {begin: '%r{', end: '}', contains: recursiveParen('{', '}')}, {begin: '%r<', end: '>', contains: recursiveParen('<', '>')}, {begin: '%r/', end: '/'}, {begin: '%r%', end: '%'}, {begin: '%r-', end: '-'}, {begin: '%r\\|', end: '\\|'}, ] } ], relevance: 0 }; var REGEXP2 = { className: 'regexp', contains: [hljs.BACKSLASH_ESCAPE, SUBST], variants: [ {begin: '%r\\(', end: '\\)', contains: recursiveParen('\\(', '\\)')}, {begin: '%r\\[', end: '\\]', contains: recursiveParen('\\[', '\\]')}, {begin: '%r{', end: '}', contains: recursiveParen('{', '}')}, {begin: '%r<', end: '>', contains: recursiveParen('<', '>')}, {begin: '%r/', end: '/'}, {begin: '%r%', end: '%'}, {begin: '%r-', end: '-'}, {begin: '%r\\|', end: '\\|'}, ], relevance: 0 }; var ATTRIBUTE = { className: 'meta', begin: '@\\[', end: '\\]', contains: [ hljs.inherit(hljs.QUOTE_STRING_MODE, {className: 'meta-string'}) ] }; var CRYSTAL_DEFAULT_CONTAINS = [ EXPANSION, STRING, REGEXP, REGEXP2, ATTRIBUTE, hljs.HASH_COMMENT_MODE, { className: 'class', beginKeywords: 'class module struct', end: '$|;', illegal: /=/, contains: [ hljs.HASH_COMMENT_MODE, hljs.inherit(hljs.TITLE_MODE, {begin: '[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?'}), {begin: '<'} // relevance booster for inheritance ] }, { className: 'class', beginKeywords: 'lib enum union', end: '$|;', illegal: /=/, contains: [ hljs.HASH_COMMENT_MODE, hljs.inherit(hljs.TITLE_MODE, {begin: '[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?'}), ], relevance: 10 }, { className: 'function', beginKeywords: 'def', end: /\B\b/, contains: [ hljs.inherit(hljs.TITLE_MODE, { begin: CRYSTAL_METHOD_RE, endsParent: true }) ] }, { className: 'function', beginKeywords: 'fun macro', end: /\B\b/, contains: [ hljs.inherit(hljs.TITLE_MODE, { begin: CRYSTAL_METHOD_RE, endsParent: true }) ], relevance: 5 }, { className: 'symbol', begin: hljs.UNDERSCORE_IDENT_RE + '(\\!|\\?)?:', relevance: 0 }, { className: 'symbol', begin: ':', contains: [STRING, {begin: CRYSTAL_METHOD_RE}], relevance: 0 }, { className: 'number', variants: [ { begin: '\\b0b([01_]*[01])' + NUM_SUFFIX }, { begin: '\\b0o([0-7_]*[0-7])' + NUM_SUFFIX }, { begin: '\\b0x([A-Fa-f0-9_]*[A-Fa-f0-9])' + NUM_SUFFIX }, { begin: '\\b(([0-9][0-9_]*[0-9]|[0-9])(\\.[0-9_]*[0-9])?([eE][+-]?[0-9_]*[0-9])?)' + NUM_SUFFIX} ], relevance: 0 } ]; SUBST.contains = CRYSTAL_DEFAULT_CONTAINS; EXPANSION.contains = CRYSTAL_DEFAULT_CONTAINS.slice(1); // without EXPANSION return { aliases: ['cr'], lexemes: CRYSTAL_IDENT_RE, keywords: CRYSTAL_KEYWORDS, contains: CRYSTAL_DEFAULT_CONTAINS }; }; /***/ }, /* 205 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = { keyword: // Normal keywords. 'abstract as base bool break byte case catch char checked const continue decimal ' + 'default delegate do double else enum event explicit extern finally fixed float ' + 'for foreach goto if implicit in int interface internal is lock long ' + 'object operator out override params private protected public readonly ref sbyte ' + 'sealed short sizeof stackalloc static string struct switch this try typeof ' + 'uint ulong unchecked unsafe ushort using virtual void volatile while ' + 'nameof ' + // Contextual keywords. 'add alias ascending async await by descending dynamic equals from get global group into join ' + 'let on orderby partial remove select set value var where yield', literal: 'null false true' }; var VERBATIM_STRING = { className: 'string', begin: '@"', end: '"', contains: [{begin: '""'}] }; var VERBATIM_STRING_NO_LF = hljs.inherit(VERBATIM_STRING, {illegal: /\n/}); var SUBST = { className: 'subst', begin: '{', end: '}', keywords: KEYWORDS }; var SUBST_NO_LF = hljs.inherit(SUBST, {illegal: /\n/}); var INTERPOLATED_STRING = { className: 'string', begin: /\$"/, end: '"', illegal: /\n/, contains: [{begin: '{{'}, {begin: '}}'}, hljs.BACKSLASH_ESCAPE, SUBST_NO_LF] }; var INTERPOLATED_VERBATIM_STRING = { className: 'string', begin: /\$@"/, end: '"', contains: [{begin: '{{'}, {begin: '}}'}, {begin: '""'}, SUBST] }; var INTERPOLATED_VERBATIM_STRING_NO_LF = hljs.inherit(INTERPOLATED_VERBATIM_STRING, { illegal: /\n/, contains: [{begin: '{{'}, {begin: '}}'}, {begin: '""'}, SUBST_NO_LF] }); SUBST.contains = [ INTERPOLATED_VERBATIM_STRING, INTERPOLATED_STRING, VERBATIM_STRING, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, hljs.C_BLOCK_COMMENT_MODE ]; SUBST_NO_LF.contains = [ INTERPOLATED_VERBATIM_STRING_NO_LF, INTERPOLATED_STRING, VERBATIM_STRING_NO_LF, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, hljs.inherit(hljs.C_BLOCK_COMMENT_MODE, {illegal: /\n/}) ]; var STRING = { variants: [ INTERPOLATED_VERBATIM_STRING, INTERPOLATED_STRING, VERBATIM_STRING, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE ] }; var TYPE_IDENT_RE = hljs.IDENT_RE + '(<' + hljs.IDENT_RE + '(\\s*,\\s*' + hljs.IDENT_RE + ')*>)?(\\[\\])?'; return { aliases: ['csharp'], keywords: KEYWORDS, illegal: /::/, contains: [ hljs.COMMENT( '///', '$', { returnBegin: true, contains: [ { className: 'doctag', variants: [ { begin: '///', relevance: 0 }, { begin: '<!--|-->' }, { begin: '</?', end: '>' } ] } ] } ), hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: 'meta', begin: '#', end: '$', keywords: {'meta-keyword': 'if else elif endif define undef warning error line region endregion pragma checksum'} }, STRING, hljs.C_NUMBER_MODE, { beginKeywords: 'class interface', end: /[{;=]/, illegal: /[^\s:]/, contains: [ hljs.TITLE_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }, { beginKeywords: 'namespace', end: /[{;=]/, illegal: /[^\s:]/, contains: [ hljs.inherit(hljs.TITLE_MODE, {begin: '[a-zA-Z](\\.?\\w)*'}), hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }, { // Expression keywords prevent 'keyword Name(...)' from being // recognized as a function definition beginKeywords: 'new return throw await', relevance: 0 }, { className: 'function', begin: '(' + TYPE_IDENT_RE + '\\s+)+' + hljs.IDENT_RE + '\\s*\\(', returnBegin: true, end: /[{;=]/, excludeEnd: true, keywords: KEYWORDS, contains: [ { begin: hljs.IDENT_RE + '\\s*\\(', returnBegin: true, contains: [hljs.TITLE_MODE], relevance: 0 }, { className: 'params', begin: /\(/, end: /\)/, excludeBegin: true, excludeEnd: true, keywords: KEYWORDS, relevance: 0, contains: [ STRING, hljs.C_NUMBER_MODE, hljs.C_BLOCK_COMMENT_MODE ] }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] } ] }; }; /***/ }, /* 206 */ /***/ function(module, exports) { module.exports = function(hljs) { return { case_insensitive: false, lexemes: '[a-zA-Z][a-zA-Z0-9_-]*', keywords: { keyword: 'base-uri child-src connect-src default-src font-src form-action' + ' frame-ancestors frame-src img-src media-src object-src plugin-types' + ' report-uri sandbox script-src style-src', }, contains: [ { className: 'string', begin: "'", end: "'" }, { className: 'attribute', begin: '^Content', end: ':', excludeEnd: true, }, ] }; }; /***/ }, /* 207 */ /***/ function(module, exports) { module.exports = function(hljs) { var IDENT_RE = '[a-zA-Z-][a-zA-Z0-9_-]*'; var RULE = { begin: /[A-Z\_\.\-]+\s*:/, returnBegin: true, end: ';', endsWithParent: true, contains: [ { className: 'attribute', begin: /\S/, end: ':', excludeEnd: true, starts: { endsWithParent: true, excludeEnd: true, contains: [ { begin: /[\w-]+\(/, returnBegin: true, contains: [ { className: 'built_in', begin: /[\w-]+/ }, { begin: /\(/, end: /\)/, contains: [ hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE ] } ] }, hljs.CSS_NUMBER_MODE, hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: 'number', begin: '#[0-9A-Fa-f]+' }, { className: 'meta', begin: '!important' } ] } } ] }; return { case_insensitive: true, illegal: /[=\/|'\$]/, contains: [ hljs.C_BLOCK_COMMENT_MODE, { className: 'selector-id', begin: /#[A-Za-z0-9_-]+/ }, { className: 'selector-class', begin: /\.[A-Za-z0-9_-]+/ }, { className: 'selector-attr', begin: /\[/, end: /\]/, illegal: '$' }, { className: 'selector-pseudo', begin: /:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/ }, { begin: '@(font-face|page)', lexemes: '[a-z-]+', keywords: 'font-face page' }, { begin: '@', end: '[{;]', // at_rule eating first "{" is a good thing // because it doesn’t let it to be parsed as // a rule set but instead drops parser into // the default mode which is how it should be. illegal: /:/, // break on Less variables @var: ... contains: [ { className: 'keyword', begin: /\w+/ }, { begin: /\s/, endsWithParent: true, excludeEnd: true, relevance: 0, contains: [ hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.CSS_NUMBER_MODE ] } ] }, { className: 'selector-tag', begin: IDENT_RE, relevance: 0 }, { begin: '{', end: '}', illegal: /\S/, contains: [ hljs.C_BLOCK_COMMENT_MODE, RULE, ] } ] }; }; /***/ }, /* 208 */ /***/ function(module, exports) { module.exports = /** * Known issues: * * - invalid hex string literals will be recognized as a double quoted strings * but 'x' at the beginning of string will not be matched * * - delimited string literals are not checked for matching end delimiter * (not possible to do with js regexp) * * - content of token string is colored as a string (i.e. no keyword coloring inside a token string) * also, content of token string is not validated to contain only valid D tokens * * - special token sequence rule is not strictly following D grammar (anything following #line * up to the end of line is matched as special token sequence) */ function(hljs) { /** * Language keywords * * @type {Object} */ var D_KEYWORDS = { keyword: 'abstract alias align asm assert auto body break byte case cast catch class ' + 'const continue debug default delete deprecated do else enum export extern final ' + 'finally for foreach foreach_reverse|10 goto if immutable import in inout int ' + 'interface invariant is lazy macro mixin module new nothrow out override package ' + 'pragma private protected public pure ref return scope shared static struct ' + 'super switch synchronized template this throw try typedef typeid typeof union ' + 'unittest version void volatile while with __FILE__ __LINE__ __gshared|10 ' + '__thread __traits __DATE__ __EOF__ __TIME__ __TIMESTAMP__ __VENDOR__ __VERSION__', built_in: 'bool cdouble cent cfloat char creal dchar delegate double dstring float function ' + 'idouble ifloat ireal long real short string ubyte ucent uint ulong ushort wchar ' + 'wstring', literal: 'false null true' }; /** * Number literal regexps * * @type {String} */ var decimal_integer_re = '(0|[1-9][\\d_]*)', decimal_integer_nosus_re = '(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)', binary_integer_re = '0[bB][01_]+', hexadecimal_digits_re = '([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*)', hexadecimal_integer_re = '0[xX]' + hexadecimal_digits_re, decimal_exponent_re = '([eE][+-]?' + decimal_integer_nosus_re + ')', decimal_float_re = '(' + decimal_integer_nosus_re + '(\\.\\d*|' + decimal_exponent_re + ')|' + '\\d+\\.' + decimal_integer_nosus_re + decimal_integer_nosus_re + '|' + '\\.' + decimal_integer_re + decimal_exponent_re + '?' + ')', hexadecimal_float_re = '(0[xX](' + hexadecimal_digits_re + '\\.' + hexadecimal_digits_re + '|'+ '\\.?' + hexadecimal_digits_re + ')[pP][+-]?' + decimal_integer_nosus_re + ')', integer_re = '(' + decimal_integer_re + '|' + binary_integer_re + '|' + hexadecimal_integer_re + ')', float_re = '(' + hexadecimal_float_re + '|' + decimal_float_re + ')'; /** * Escape sequence supported in D string and character literals * * @type {String} */ var escape_sequence_re = '\\\\(' + '[\'"\\?\\\\abfnrtv]|' + // common escapes 'u[\\dA-Fa-f]{4}|' + // four hex digit unicode codepoint '[0-7]{1,3}|' + // one to three octal digit ascii char code 'x[\\dA-Fa-f]{2}|' + // two hex digit ascii char code 'U[\\dA-Fa-f]{8}' + // eight hex digit unicode codepoint ')|' + '&[a-zA-Z\\d]{2,};'; // named character entity /** * D integer number literals * * @type {Object} */ var D_INTEGER_MODE = { className: 'number', begin: '\\b' + integer_re + '(L|u|U|Lu|LU|uL|UL)?', relevance: 0 }; /** * [D_FLOAT_MODE description] * @type {Object} */ var D_FLOAT_MODE = { className: 'number', begin: '\\b(' + float_re + '([fF]|L|i|[fF]i|Li)?|' + integer_re + '(i|[fF]i|Li)' + ')', relevance: 0 }; /** * D character literal * * @type {Object} */ var D_CHARACTER_MODE = { className: 'string', begin: '\'(' + escape_sequence_re + '|.)', end: '\'', illegal: '.' }; /** * D string escape sequence * * @type {Object} */ var D_ESCAPE_SEQUENCE = { begin: escape_sequence_re, relevance: 0 }; /** * D double quoted string literal * * @type {Object} */ var D_STRING_MODE = { className: 'string', begin: '"', contains: [D_ESCAPE_SEQUENCE], end: '"[cwd]?' }; /** * D wysiwyg and delimited string literals * * @type {Object} */ var D_WYSIWYG_DELIMITED_STRING_MODE = { className: 'string', begin: '[rq]"', end: '"[cwd]?', relevance: 5 }; /** * D alternate wysiwyg string literal * * @type {Object} */ var D_ALTERNATE_WYSIWYG_STRING_MODE = { className: 'string', begin: '`', end: '`[cwd]?' }; /** * D hexadecimal string literal * * @type {Object} */ var D_HEX_STRING_MODE = { className: 'string', begin: 'x"[\\da-fA-F\\s\\n\\r]*"[cwd]?', relevance: 10 }; /** * D delimited string literal * * @type {Object} */ var D_TOKEN_STRING_MODE = { className: 'string', begin: 'q"\\{', end: '\\}"' }; /** * Hashbang support * * @type {Object} */ var D_HASHBANG_MODE = { className: 'meta', begin: '^#!', end: '$', relevance: 5 }; /** * D special token sequence * * @type {Object} */ var D_SPECIAL_TOKEN_SEQUENCE_MODE = { className: 'meta', begin: '#(line)', end: '$', relevance: 5 }; /** * D attributes * * @type {Object} */ var D_ATTRIBUTE_MODE = { className: 'keyword', begin: '@[a-zA-Z_][a-zA-Z_\\d]*' }; /** * D nesting comment * * @type {Object} */ var D_NESTING_COMMENT_MODE = hljs.COMMENT( '\\/\\+', '\\+\\/', { contains: ['self'], relevance: 10 } ); return { lexemes: hljs.UNDERSCORE_IDENT_RE, keywords: D_KEYWORDS, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, D_NESTING_COMMENT_MODE, D_HEX_STRING_MODE, D_STRING_MODE, D_WYSIWYG_DELIMITED_STRING_MODE, D_ALTERNATE_WYSIWYG_STRING_MODE, D_TOKEN_STRING_MODE, D_FLOAT_MODE, D_INTEGER_MODE, D_CHARACTER_MODE, D_HASHBANG_MODE, D_SPECIAL_TOKEN_SEQUENCE_MODE, D_ATTRIBUTE_MODE ] }; }; /***/ }, /* 209 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['md', 'mkdown', 'mkd'], contains: [ // highlight headers { className: 'section', variants: [ { begin: '^#{1,6}', end: '$' }, { begin: '^.+?\\n[=-]{2,}$' } ] }, // inline html { begin: '<', end: '>', subLanguage: 'xml', relevance: 0 }, // lists (indicators only) { className: 'bullet', begin: '^([*+-]|(\\d+\\.))\\s+' }, // strong segments { className: 'strong', begin: '[*_]{2}.+?[*_]{2}' }, // emphasis segments { className: 'emphasis', variants: [ { begin: '\\*.+?\\*' }, { begin: '_.+?_' , relevance: 0 } ] }, // blockquotes { className: 'quote', begin: '^>\\s+', end: '$' }, // code snippets { className: 'code', variants: [ { begin: '^```\w*\s*$', end: '^```\s*$' }, { begin: '`.+?`' }, { begin: '^( {4}|\t)', end: '$', relevance: 0 } ] }, // horizontal rules { begin: '^[-\\*]{3,}', end: '$' }, // using links - title and link { begin: '\\[.+?\\][\\(\\[].*?[\\)\\]]', returnBegin: true, contains: [ { className: 'string', begin: '\\[', end: '\\]', excludeBegin: true, returnEnd: true, relevance: 0 }, { className: 'link', begin: '\\]\\(', end: '\\)', excludeBegin: true, excludeEnd: true }, { className: 'symbol', begin: '\\]\\[', end: '\\]', excludeBegin: true, excludeEnd: true } ], relevance: 10 }, { begin: /^\[[^\n]+\]:/, returnBegin: true, contains: [ { className: 'symbol', begin: /\[/, end: /\]/, excludeBegin: true, excludeEnd: true }, { className: 'link', begin: /:\s*/, end: /$/, excludeBegin: true } ] } ] }; }; /***/ }, /* 210 */ /***/ function(module, exports) { module.exports = function (hljs) { var SUBST = { className: 'subst', begin: '\\$\\{', end: '}', keywords: 'true false null this is new super' }; var STRING = { className: 'string', variants: [ { begin: 'r\'\'\'', end: '\'\'\'' }, { begin: 'r"""', end: '"""' }, { begin: 'r\'', end: '\'', illegal: '\\n' }, { begin: 'r"', end: '"', illegal: '\\n' }, { begin: '\'\'\'', end: '\'\'\'', contains: [hljs.BACKSLASH_ESCAPE, SUBST] }, { begin: '"""', end: '"""', contains: [hljs.BACKSLASH_ESCAPE, SUBST] }, { begin: '\'', end: '\'', illegal: '\\n', contains: [hljs.BACKSLASH_ESCAPE, SUBST] }, { begin: '"', end: '"', illegal: '\\n', contains: [hljs.BACKSLASH_ESCAPE, SUBST] } ] }; SUBST.contains = [ hljs.C_NUMBER_MODE, STRING ]; var KEYWORDS = { keyword: 'assert async await break case catch class const continue default do else enum extends false final ' + 'finally for if in is new null rethrow return super switch sync this throw true try var void while with yield ' + 'abstract as dynamic export external factory get implements import library operator part set static typedef', built_in: // dart:core 'print Comparable DateTime Duration Function Iterable Iterator List Map Match Null Object Pattern RegExp Set ' + 'Stopwatch String StringBuffer StringSink Symbol Type Uri bool double int num ' + // dart:html 'document window querySelector querySelectorAll Element ElementList' }; return { keywords: KEYWORDS, contains: [ STRING, hljs.COMMENT( '/\\*\\*', '\\*/', { subLanguage: 'markdown' } ), hljs.COMMENT( '///', '$', { subLanguage: 'markdown' } ), hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: 'class', beginKeywords: 'class interface', end: '{', excludeEnd: true, contains: [ { beginKeywords: 'extends implements' }, hljs.UNDERSCORE_TITLE_MODE ] }, hljs.C_NUMBER_MODE, { className: 'meta', begin: '@[A-Za-z]+' }, { begin: '=>' // No markup, just a relevance booster } ] } }; /***/ }, /* 211 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = 'exports register file shl array record property for mod while set ally label uses raise not ' + 'stored class safecall var interface or private static exit index inherited to else stdcall ' + 'override shr asm far resourcestring finalization packed virtual out and protected library do ' + 'xorwrite goto near function end div overload object unit begin string on inline repeat until ' + 'destructor write message program with read initialization except default nil if case cdecl in ' + 'downto threadvar of try pascal const external constructor type public then implementation ' + 'finally published procedure absolute reintroduce operator as is abstract alias assembler ' + 'bitpacked break continue cppdecl cvar enumerator experimental platform deprecated ' + 'unimplemented dynamic export far16 forward generic helper implements interrupt iochecks ' + 'local name nodefault noreturn nostackframe oldfpccall otherwise saveregisters softfloat ' + 'specialize strict unaligned varargs '; var COMMENT_MODES = [ hljs.C_LINE_COMMENT_MODE, hljs.COMMENT(/\{/, /\}/, {relevance: 0}), hljs.COMMENT(/\(\*/, /\*\)/, {relevance: 10}) ]; var DIRECTIVE = { className: 'meta', variants: [ {begin: /\{\$/, end: /\}/}, {begin: /\(\*\$/, end: /\*\)/} ] }; var STRING = { className: 'string', begin: /'/, end: /'/, contains: [{begin: /''/}] }; var CHAR_STRING = { className: 'string', begin: /(#\d+)+/ }; var CLASS = { begin: hljs.IDENT_RE + '\\s*=\\s*class\\s*\\(', returnBegin: true, contains: [ hljs.TITLE_MODE ] }; var FUNCTION = { className: 'function', beginKeywords: 'function constructor destructor procedure', end: /[:;]/, keywords: 'function constructor|10 destructor|10 procedure|10', contains: [ hljs.TITLE_MODE, { className: 'params', begin: /\(/, end: /\)/, keywords: KEYWORDS, contains: [STRING, CHAR_STRING, DIRECTIVE].concat(COMMENT_MODES) }, DIRECTIVE ].concat(COMMENT_MODES) }; return { aliases: ['dpr', 'dfm', 'pas', 'pascal', 'freepascal', 'lazarus', 'lpr', 'lfm'], case_insensitive: true, keywords: KEYWORDS, illegal: /"|\$[G-Zg-z]|\/\*|<\/|\|/, contains: [ STRING, CHAR_STRING, hljs.NUMBER_MODE, CLASS, FUNCTION, DIRECTIVE ].concat(COMMENT_MODES) }; }; /***/ }, /* 212 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['patch'], contains: [ { className: 'meta', relevance: 10, variants: [ {begin: /^@@ +\-\d+,\d+ +\+\d+,\d+ +@@$/}, {begin: /^\*\*\* +\d+,\d+ +\*\*\*\*$/}, {begin: /^\-\-\- +\d+,\d+ +\-\-\-\-$/} ] }, { className: 'comment', variants: [ {begin: /Index: /, end: /$/}, {begin: /={3,}/, end: /$/}, {begin: /^\-{3}/, end: /$/}, {begin: /^\*{3} /, end: /$/}, {begin: /^\+{3}/, end: /$/}, {begin: /\*{5}/, end: /\*{5}$/} ] }, { className: 'addition', begin: '^\\+', end: '$' }, { className: 'deletion', begin: '^\\-', end: '$' }, { className: 'addition', begin: '^\\!', end: '$' } ] }; }; /***/ }, /* 213 */ /***/ function(module, exports) { module.exports = function(hljs) { var FILTER = { begin: /\|[A-Za-z]+:?/, keywords: { name: 'truncatewords removetags linebreaksbr yesno get_digit timesince random striptags ' + 'filesizeformat escape linebreaks length_is ljust rjust cut urlize fix_ampersands ' + 'title floatformat capfirst pprint divisibleby add make_list unordered_list urlencode ' + 'timeuntil urlizetrunc wordcount stringformat linenumbers slice date dictsort ' + 'dictsortreversed default_if_none pluralize lower join center default ' + 'truncatewords_html upper length phone2numeric wordwrap time addslashes slugify first ' + 'escapejs force_escape iriencode last safe safeseq truncatechars localize unlocalize ' + 'localtime utc timezone' }, contains: [ hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE ] }; return { aliases: ['jinja'], case_insensitive: true, subLanguage: 'xml', contains: [ hljs.COMMENT(/\{%\s*comment\s*%}/, /\{%\s*endcomment\s*%}/), hljs.COMMENT(/\{#/, /#}/), { className: 'template-tag', begin: /\{%/, end: /%}/, contains: [ { className: 'name', begin: /\w+/, keywords: { name: 'comment endcomment load templatetag ifchanged endifchanged if endif firstof for ' + 'endfor ifnotequal endifnotequal widthratio extends include spaceless ' + 'endspaceless regroup ifequal endifequal ssi now with cycle url filter ' + 'endfilter debug block endblock else autoescape endautoescape csrf_token empty elif ' + 'endwith static trans blocktrans endblocktrans get_static_prefix get_media_prefix ' + 'plural get_current_language language get_available_languages ' + 'get_current_language_bidi get_language_info get_language_info_list localize ' + 'endlocalize localtime endlocaltime timezone endtimezone get_current_timezone ' + 'verbatim' }, starts: { endsWithParent: true, keywords: 'in by as', contains: [FILTER], relevance: 0 } } ] }, { className: 'template-variable', begin: /\{\{/, end: /}}/, contains: [FILTER] } ] }; }; /***/ }, /* 214 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['bind', 'zone'], keywords: { keyword: 'IN A AAAA AFSDB APL CAA CDNSKEY CDS CERT CNAME DHCID DLV DNAME DNSKEY DS HIP IPSECKEY KEY KX ' + 'LOC MX NAPTR NS NSEC NSEC3 NSEC3PARAM PTR RRSIG RP SIG SOA SRV SSHFP TA TKEY TLSA TSIG TXT' }, contains: [ hljs.COMMENT(';', '$', {relevance: 0}), { className: 'meta', begin: /^\$(TTL|GENERATE|INCLUDE|ORIGIN)\b/ }, // IPv6 { className: 'number', begin: '((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:)))\\b' }, // IPv4 { className: 'number', begin: '((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\b' }, hljs.inherit(hljs.NUMBER_MODE, {begin: /\b\d+[dhwm]?/}) ] }; }; /***/ }, /* 215 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['docker'], case_insensitive: true, keywords: 'from maintainer expose env user onbuild', contains: [ hljs.HASH_COMMENT_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.NUMBER_MODE, { beginKeywords: 'run cmd entrypoint volume add copy workdir label healthcheck', starts: { end: /[^\\]\n/, subLanguage: 'bash' } } ], illegal: '</' } }; /***/ }, /* 216 */ /***/ function(module, exports) { module.exports = function(hljs) { var COMMENT = hljs.COMMENT( /^\s*@?rem\b/, /$/, { relevance: 10 } ); var LABEL = { className: 'symbol', begin: '^\\s*[A-Za-z._?][A-Za-z0-9_$#@~.?]*(:|\\s+label)', relevance: 0 }; return { aliases: ['bat', 'cmd'], case_insensitive: true, illegal: /\/\*/, keywords: { keyword: 'if else goto for in do call exit not exist errorlevel defined ' + 'equ neq lss leq gtr geq', built_in: 'prn nul lpt3 lpt2 lpt1 con com4 com3 com2 com1 aux ' + 'shift cd dir echo setlocal endlocal set pause copy ' + 'append assoc at attrib break cacls cd chcp chdir chkdsk chkntfs cls cmd color ' + 'comp compact convert date dir diskcomp diskcopy doskey erase fs ' + 'find findstr format ftype graftabl help keyb label md mkdir mode more move path ' + 'pause print popd pushd promt rd recover rem rename replace restore rmdir shift' + 'sort start subst time title tree type ver verify vol ' + // winutils 'ping net ipconfig taskkill xcopy ren del' }, contains: [ { className: 'variable', begin: /%%[^ ]|%[^ ]+?%|![^ ]+?!/ }, { className: 'function', begin: LABEL.begin, end: 'goto:eof', contains: [ hljs.inherit(hljs.TITLE_MODE, {begin: '([_a-zA-Z]\\w*\\.)*([_a-zA-Z]\\w*:)?[_a-zA-Z]\\w*'}), COMMENT ] }, { className: 'number', begin: '\\b\\d+', relevance: 0 }, COMMENT ] }; }; /***/ }, /* 217 */ /***/ function(module, exports) { module.exports = function(hljs) { var QUOTED_PROPERTY = { className: 'string', begin: /"/, end: /"/ }; var APOS_PROPERTY = { className: 'string', begin: /'/, end: /'/ }; var UNQUOTED_PROPERTY = { className: 'string', begin: '[\\w-?]+:\\w+', end: '\\W', relevance: 0 }; var VALUELESS_PROPERTY = { className: 'string', begin: '\\w+-?\\w+', end: '\\W', relevance: 0 }; return { keywords: 'dsconfig', contains: [ { className: 'keyword', begin: '^dsconfig', end: '\\s', excludeEnd: true, relevance: 10 }, { className: 'built_in', begin: '(list|create|get|set|delete)-(\\w+)', end: '\\s', excludeEnd: true, illegal: '!@#$%^&*()', relevance: 10 }, { className: 'built_in', begin: '--(\\w+)', end: '\\s', excludeEnd: true }, QUOTED_PROPERTY, APOS_PROPERTY, UNQUOTED_PROPERTY, VALUELESS_PROPERTY, hljs.HASH_COMMENT_MODE ] }; }; /***/ }, /* 218 */ /***/ function(module, exports) { module.exports = function(hljs) { var STRINGS = { className: 'string', variants: [ hljs.inherit(hljs.QUOTE_STRING_MODE, { begin: '((u8?|U)|L)?"' }), { begin: '(u8?|U)?R"', end: '"', contains: [hljs.BACKSLASH_ESCAPE] }, { begin: '\'\\\\?.', end: '\'', illegal: '.' } ] }; var NUMBERS = { className: 'number', variants: [ { begin: '\\b(\\d+(\\.\\d*)?|\\.\\d+)(u|U|l|L|ul|UL|f|F)' }, { begin: hljs.C_NUMBER_RE } ], relevance: 0 }; var PREPROCESSOR = { className: 'meta', begin: '#', end: '$', keywords: {'meta-keyword': 'if else elif endif define undef ifdef ifndef'}, contains: [ { begin: /\\\n/, relevance: 0 }, { beginKeywords: 'include', end: '$', keywords: {'meta-keyword': 'include'}, contains: [ hljs.inherit(STRINGS, {className: 'meta-string'}), { className: 'meta-string', begin: '<', end: '>', illegal: '\\n' } ] }, STRINGS, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }; var DTS_REFERENCE = { className: 'variable', begin: '\\&[a-z\\d_]*\\b' }; var DTS_KEYWORD = { className: 'meta-keyword', begin: '/[a-z][a-z\\d-]*/' }; var DTS_LABEL = { className: 'symbol', begin: '^\\s*[a-zA-Z_][a-zA-Z\\d_]*:' }; var DTS_CELL_PROPERTY = { className: 'params', begin: '<', end: '>', contains: [ NUMBERS, DTS_REFERENCE ] }; var DTS_NODE = { className: 'class', begin: /[a-zA-Z_][a-zA-Z\d_@]*\s{/, end: /[{;=]/, returnBegin: true, excludeEnd: true }; var DTS_ROOT_NODE = { className: 'class', begin: '/\\s*{', end: '};', relevance: 10, contains: [ DTS_REFERENCE, DTS_KEYWORD, DTS_LABEL, DTS_NODE, DTS_CELL_PROPERTY, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, NUMBERS, STRINGS ] }; return { keywords: "", contains: [ DTS_ROOT_NODE, DTS_REFERENCE, DTS_KEYWORD, DTS_LABEL, DTS_NODE, DTS_CELL_PROPERTY, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, NUMBERS, STRINGS, PREPROCESSOR, { begin: hljs.IDENT_RE + '::', keywords: "" } ] }; }; /***/ }, /* 219 */ /***/ function(module, exports) { module.exports = function(hljs) { var EXPRESSION_KEYWORDS = 'if eq ne lt lte gt gte select default math sep'; return { aliases: ['dst'], case_insensitive: true, subLanguage: 'xml', contains: [ { className: 'template-tag', begin: /\{[#\/]/, end: /\}/, illegal: /;/, contains: [ { className: 'name', begin: /[a-zA-Z\.-]+/, starts: { endsWithParent: true, relevance: 0, contains: [ hljs.QUOTE_STRING_MODE ] } } ] }, { className: 'template-variable', begin: /\{/, end: /\}/, illegal: /;/, keywords: EXPRESSION_KEYWORDS } ] }; }; /***/ }, /* 220 */ /***/ function(module, exports) { module.exports = function(hljs) { var commentMode = hljs.COMMENT(/\(\*/, /\*\)/); var nonTerminalMode = { className: "attribute", begin: /^[ ]*[a-zA-Z][a-zA-Z-]*([\s-]+[a-zA-Z][a-zA-Z]*)*/ }; var specialSequenceMode = { className: "meta", begin: /\?.*\?/ }; var ruleBodyMode = { begin: /=/, end: /;/, contains: [ commentMode, specialSequenceMode, // terminals hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE ] }; return { illegal: /\S/, contains: [ commentMode, nonTerminalMode, ruleBodyMode ] }; }; /***/ }, /* 221 */ /***/ function(module, exports) { module.exports = function(hljs) { var ELIXIR_IDENT_RE = '[a-zA-Z_][a-zA-Z0-9_]*(\\!|\\?)?'; var ELIXIR_METHOD_RE = '[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?'; var ELIXIR_KEYWORDS = 'and false then defined module in return redo retry end for true self when ' + 'next until do begin unless nil break not case cond alias while ensure or ' + 'include use alias fn quote'; var SUBST = { className: 'subst', begin: '#\\{', end: '}', lexemes: ELIXIR_IDENT_RE, keywords: ELIXIR_KEYWORDS }; var STRING = { className: 'string', contains: [hljs.BACKSLASH_ESCAPE, SUBST], variants: [ { begin: /'/, end: /'/ }, { begin: /"/, end: /"/ } ] }; var FUNCTION = { className: 'function', beginKeywords: 'def defp defmacro', end: /\B\b/, // the mode is ended by the title contains: [ hljs.inherit(hljs.TITLE_MODE, { begin: ELIXIR_IDENT_RE, endsParent: true }) ] }; var CLASS = hljs.inherit(FUNCTION, { className: 'class', beginKeywords: 'defimpl defmodule defprotocol defrecord', end: /\bdo\b|$|;/ }); var ELIXIR_DEFAULT_CONTAINS = [ STRING, hljs.HASH_COMMENT_MODE, CLASS, FUNCTION, { className: 'symbol', begin: ':(?!\\s)', contains: [STRING, {begin: ELIXIR_METHOD_RE}], relevance: 0 }, { className: 'symbol', begin: ELIXIR_IDENT_RE + ':', relevance: 0 }, { className: 'number', begin: '(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b', relevance: 0 }, { className: 'variable', begin: '(\\$\\W)|((\\$|\\@\\@?)(\\w+))' }, { begin: '->' }, { // regexp container begin: '(' + hljs.RE_STARTERS_RE + ')\\s*', contains: [ hljs.HASH_COMMENT_MODE, { className: 'regexp', illegal: '\\n', contains: [hljs.BACKSLASH_ESCAPE, SUBST], variants: [ { begin: '/', end: '/[a-z]*' }, { begin: '%r\\[', end: '\\][a-z]*' } ] } ], relevance: 0 } ]; SUBST.contains = ELIXIR_DEFAULT_CONTAINS; return { lexemes: ELIXIR_IDENT_RE, keywords: ELIXIR_KEYWORDS, contains: ELIXIR_DEFAULT_CONTAINS }; }; /***/ }, /* 222 */ /***/ function(module, exports) { module.exports = function(hljs) { var COMMENT = { variants: [ hljs.COMMENT('--', '$'), hljs.COMMENT( '{-', '-}', { contains: ['self'] } ) ] }; var CONSTRUCTOR = { className: 'type', begin: '\\b[A-Z][\\w\']*', // TODO: other constructors (built-in, infix). relevance: 0 }; var LIST = { begin: '\\(', end: '\\)', illegal: '"', contains: [ {className: 'type', begin: '\\b[A-Z][\\w]*(\\((\\.\\.|,|\\w+)\\))?'}, COMMENT ] }; var RECORD = { begin: '{', end: '}', contains: LIST.contains }; return { keywords: 'let in if then else case of where module import exposing ' + 'type alias as infix infixl infixr port effect command subscription', contains: [ // Top-level constructions. { beginKeywords: 'port effect module', end: 'exposing', keywords: 'port effect module where command subscription exposing', contains: [LIST, COMMENT], illegal: '\\W\\.|;' }, { begin: 'import', end: '$', keywords: 'import as exposing', contains: [LIST, COMMENT], illegal: '\\W\\.|;' }, { begin: 'type', end: '$', keywords: 'type alias', contains: [CONSTRUCTOR, LIST, RECORD, COMMENT] }, { beginKeywords: 'infix infixl infixr', end: '$', contains: [hljs.C_NUMBER_MODE, COMMENT] }, { begin: 'port', end: '$', keywords: 'port', contains: [COMMENT] }, // Literals and names. // TODO: characters. hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, CONSTRUCTOR, hljs.inherit(hljs.TITLE_MODE, {begin: '^[_a-z][\\w\']*'}), COMMENT, {begin: '->|<-'} // No markup, relevance booster ] }; }; /***/ }, /* 223 */ /***/ function(module, exports) { module.exports = function(hljs) { var RUBY_METHOD_RE = '[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?'; var RUBY_KEYWORDS = { keyword: 'and then defined module in return redo if BEGIN retry end for self when ' + 'next until do begin unless END rescue else break undef not super class case ' + 'require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor', literal: 'true false nil' }; var YARDOCTAG = { className: 'doctag', begin: '@[A-Za-z]+' }; var IRB_OBJECT = { begin: '#<', end: '>' }; var COMMENT_MODES = [ hljs.COMMENT( '#', '$', { contains: [YARDOCTAG] } ), hljs.COMMENT( '^\\=begin', '^\\=end', { contains: [YARDOCTAG], relevance: 10 } ), hljs.COMMENT('^__END__', '\\n$') ]; var SUBST = { className: 'subst', begin: '#\\{', end: '}', keywords: RUBY_KEYWORDS }; var STRING = { className: 'string', contains: [hljs.BACKSLASH_ESCAPE, SUBST], variants: [ {begin: /'/, end: /'/}, {begin: /"/, end: /"/}, {begin: /`/, end: /`/}, {begin: '%[qQwWx]?\\(', end: '\\)'}, {begin: '%[qQwWx]?\\[', end: '\\]'}, {begin: '%[qQwWx]?{', end: '}'}, {begin: '%[qQwWx]?<', end: '>'}, {begin: '%[qQwWx]?/', end: '/'}, {begin: '%[qQwWx]?%', end: '%'}, {begin: '%[qQwWx]?-', end: '-'}, {begin: '%[qQwWx]?\\|', end: '\\|'}, { // \B in the beginning suppresses recognition of ?-sequences where ? // is the last character of a preceding identifier, as in: `func?4` begin: /\B\?(\\\d{1,3}|\\x[A-Fa-f0-9]{1,2}|\\u[A-Fa-f0-9]{4}|\\?\S)\b/ }, { begin: /<<(-?)\w+$/, end: /^\s*\w+$/, } ] }; var PARAMS = { className: 'params', begin: '\\(', end: '\\)', endsParent: true, keywords: RUBY_KEYWORDS }; var RUBY_DEFAULT_CONTAINS = [ STRING, IRB_OBJECT, { className: 'class', beginKeywords: 'class module', end: '$|;', illegal: /=/, contains: [ hljs.inherit(hljs.TITLE_MODE, {begin: '[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?'}), { begin: '<\\s*', contains: [{ begin: '(' + hljs.IDENT_RE + '::)?' + hljs.IDENT_RE }] } ].concat(COMMENT_MODES) }, { className: 'function', beginKeywords: 'def', end: '$|;', contains: [ hljs.inherit(hljs.TITLE_MODE, {begin: RUBY_METHOD_RE}), PARAMS ].concat(COMMENT_MODES) }, { // swallow namespace qualifiers before symbols begin: hljs.IDENT_RE + '::' }, { className: 'symbol', begin: hljs.UNDERSCORE_IDENT_RE + '(\\!|\\?)?:', relevance: 0 }, { className: 'symbol', begin: ':(?!\\s)', contains: [STRING, {begin: RUBY_METHOD_RE}], relevance: 0 }, { className: 'number', begin: '(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b', relevance: 0 }, { begin: '(\\$\\W)|((\\$|\\@\\@?)(\\w+))' // variables }, { className: 'params', begin: /\|/, end: /\|/, keywords: RUBY_KEYWORDS }, { // regexp container begin: '(' + hljs.RE_STARTERS_RE + '|unless)\\s*', contains: [ IRB_OBJECT, { className: 'regexp', contains: [hljs.BACKSLASH_ESCAPE, SUBST], illegal: /\n/, variants: [ {begin: '/', end: '/[a-z]*'}, {begin: '%r{', end: '}[a-z]*'}, {begin: '%r\\(', end: '\\)[a-z]*'}, {begin: '%r!', end: '![a-z]*'}, {begin: '%r\\[', end: '\\][a-z]*'} ] } ].concat(COMMENT_MODES), relevance: 0 } ].concat(COMMENT_MODES); SUBST.contains = RUBY_DEFAULT_CONTAINS; PARAMS.contains = RUBY_DEFAULT_CONTAINS; var SIMPLE_PROMPT = "[>?]>"; var DEFAULT_PROMPT = "[\\w#]+\\(\\w+\\):\\d+:\\d+>"; var RVM_PROMPT = "(\\w+-)?\\d+\\.\\d+\\.\\d(p\\d+)?[^>]+>"; var IRB_DEFAULT = [ { begin: /^\s*=>/, starts: { end: '$', contains: RUBY_DEFAULT_CONTAINS } }, { className: 'meta', begin: '^('+SIMPLE_PROMPT+"|"+DEFAULT_PROMPT+'|'+RVM_PROMPT+')', starts: { end: '$', contains: RUBY_DEFAULT_CONTAINS } } ]; return { aliases: ['rb', 'gemspec', 'podspec', 'thor', 'irb'], keywords: RUBY_KEYWORDS, illegal: /\/\*/, contains: COMMENT_MODES.concat(IRB_DEFAULT).concat(RUBY_DEFAULT_CONTAINS) }; }; /***/ }, /* 224 */ /***/ function(module, exports) { module.exports = function(hljs) { return { subLanguage: 'xml', contains: [ hljs.COMMENT('<%#', '%>'), { begin: '<%[%=-]?', end: '[%-]?%>', subLanguage: 'ruby', excludeBegin: true, excludeEnd: true } ] }; }; /***/ }, /* 225 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: { built_in: 'spawn spawn_link self', keyword: 'after and andalso|10 band begin bnot bor bsl bsr bxor case catch cond div end fun if ' + 'let not of or orelse|10 query receive rem try when xor' }, contains: [ { className: 'meta', begin: '^[0-9]+> ', relevance: 10 }, hljs.COMMENT('%', '$'), { className: 'number', begin: '\\b(\\d+#[a-fA-F0-9]+|\\d+(\\.\\d+)?([eE][-+]?\\d+)?)', relevance: 0 }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, { begin: '\\?(::)?([A-Z]\\w*(::)?)+' }, { begin: '->' }, { begin: 'ok' }, { begin: '!' }, { begin: '(\\b[a-z\'][a-zA-Z0-9_\']*:[a-z\'][a-zA-Z0-9_\']*)|(\\b[a-z\'][a-zA-Z0-9_\']*)', relevance: 0 }, { begin: '[A-Z][a-zA-Z0-9_\']*', relevance: 0 } ] }; }; /***/ }, /* 226 */ /***/ function(module, exports) { module.exports = function(hljs) { var BASIC_ATOM_RE = '[a-z\'][a-zA-Z0-9_\']*'; var FUNCTION_NAME_RE = '(' + BASIC_ATOM_RE + ':' + BASIC_ATOM_RE + '|' + BASIC_ATOM_RE + ')'; var ERLANG_RESERVED = { keyword: 'after and andalso|10 band begin bnot bor bsl bzr bxor case catch cond div end fun if ' + 'let not of orelse|10 query receive rem try when xor', literal: 'false true' }; var COMMENT = hljs.COMMENT('%', '$'); var NUMBER = { className: 'number', begin: '\\b(\\d+#[a-fA-F0-9]+|\\d+(\\.\\d+)?([eE][-+]?\\d+)?)', relevance: 0 }; var NAMED_FUN = { begin: 'fun\\s+' + BASIC_ATOM_RE + '/\\d+' }; var FUNCTION_CALL = { begin: FUNCTION_NAME_RE + '\\(', end: '\\)', returnBegin: true, relevance: 0, contains: [ { begin: FUNCTION_NAME_RE, relevance: 0 }, { begin: '\\(', end: '\\)', endsWithParent: true, returnEnd: true, relevance: 0 // "contains" defined later } ] }; var TUPLE = { begin: '{', end: '}', relevance: 0 // "contains" defined later }; var VAR1 = { begin: '\\b_([A-Z][A-Za-z0-9_]*)?', relevance: 0 }; var VAR2 = { begin: '[A-Z][a-zA-Z0-9_]*', relevance: 0 }; var RECORD_ACCESS = { begin: '#' + hljs.UNDERSCORE_IDENT_RE, relevance: 0, returnBegin: true, contains: [ { begin: '#' + hljs.UNDERSCORE_IDENT_RE, relevance: 0 }, { begin: '{', end: '}', relevance: 0 // "contains" defined later } ] }; var BLOCK_STATEMENTS = { beginKeywords: 'fun receive if try case', end: 'end', keywords: ERLANG_RESERVED }; BLOCK_STATEMENTS.contains = [ COMMENT, NAMED_FUN, hljs.inherit(hljs.APOS_STRING_MODE, {className: ''}), BLOCK_STATEMENTS, FUNCTION_CALL, hljs.QUOTE_STRING_MODE, NUMBER, TUPLE, VAR1, VAR2, RECORD_ACCESS ]; var BASIC_MODES = [ COMMENT, NAMED_FUN, BLOCK_STATEMENTS, FUNCTION_CALL, hljs.QUOTE_STRING_MODE, NUMBER, TUPLE, VAR1, VAR2, RECORD_ACCESS ]; FUNCTION_CALL.contains[1].contains = BASIC_MODES; TUPLE.contains = BASIC_MODES; RECORD_ACCESS.contains[1].contains = BASIC_MODES; var PARAMS = { className: 'params', begin: '\\(', end: '\\)', contains: BASIC_MODES }; return { aliases: ['erl'], keywords: ERLANG_RESERVED, illegal: '(</|\\*=|\\+=|-=|/\\*|\\*/|\\(\\*|\\*\\))', contains: [ { className: 'function', begin: '^' + BASIC_ATOM_RE + '\\s*\\(', end: '->', returnBegin: true, illegal: '\\(|#|//|/\\*|\\\\|:|;', contains: [ PARAMS, hljs.inherit(hljs.TITLE_MODE, {begin: BASIC_ATOM_RE}) ], starts: { end: ';|\\.', keywords: ERLANG_RESERVED, contains: BASIC_MODES } }, COMMENT, { begin: '^-', end: '\\.', relevance: 0, excludeEnd: true, returnBegin: true, lexemes: '-' + hljs.IDENT_RE, keywords: '-module -record -undef -export -ifdef -ifndef -author -copyright -doc -vsn ' + '-import -include -include_lib -compile -define -else -endif -file -behaviour ' + '-behavior -spec', contains: [PARAMS] }, NUMBER, hljs.QUOTE_STRING_MODE, RECORD_ACCESS, VAR1, VAR2, TUPLE, {begin: /\.$/} // relevance booster ] }; }; /***/ }, /* 227 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['xlsx', 'xls'], case_insensitive: true, lexemes: /[a-zA-Z][\w\.]*/, // built-in functions imported from https://web.archive.org/web/20160513042710/https://support.office.com/en-us/article/Excel-functions-alphabetical-b3944572-255d-4efb-bb96-c6d90033e188 keywords: { built_in: 'ABS ACCRINT ACCRINTM ACOS ACOSH ACOT ACOTH AGGREGATE ADDRESS AMORDEGRC AMORLINC AND ARABIC AREAS ASC ASIN ASINH ATAN ATAN2 ATANH AVEDEV AVERAGE AVERAGEA AVERAGEIF AVERAGEIFS BAHTTEXT BASE BESSELI BESSELJ BESSELK BESSELY BETADIST BETA.DIST BETAINV BETA.INV BIN2DEC BIN2HEX BIN2OCT BINOMDIST BINOM.DIST BINOM.DIST.RANGE BINOM.INV BITAND BITLSHIFT BITOR BITRSHIFT BITXOR CALL CEILING CEILING.MATH CEILING.PRECISE CELL CHAR CHIDIST CHIINV CHITEST CHISQ.DIST CHISQ.DIST.RT CHISQ.INV CHISQ.INV.RT CHISQ.TEST CHOOSE CLEAN CODE COLUMN COLUMNS COMBIN COMBINA COMPLEX CONCAT CONCATENATE CONFIDENCE CONFIDENCE.NORM CONFIDENCE.T CONVERT CORREL COS COSH COT COTH COUNT COUNTA COUNTBLANK COUNTIF COUNTIFS COUPDAYBS COUPDAYS COUPDAYSNC COUPNCD COUPNUM COUPPCD COVAR COVARIANCE.P COVARIANCE.S CRITBINOM CSC CSCH CUBEKPIMEMBER CUBEMEMBER CUBEMEMBERPROPERTY CUBERANKEDMEMBER CUBESET CUBESETCOUNT CUBEVALUE CUMIPMT CUMPRINC DATE DATEDIF DATEVALUE DAVERAGE DAY DAYS DAYS360 DB DBCS DCOUNT DCOUNTA DDB DEC2BIN DEC2HEX DEC2OCT DECIMAL DEGREES DELTA DEVSQ DGET DISC DMAX DMIN DOLLAR DOLLARDE DOLLARFR DPRODUCT DSTDEV DSTDEVP DSUM DURATION DVAR DVARP EDATE EFFECT ENCODEURL EOMONTH ERF ERF.PRECISE ERFC ERFC.PRECISE ERROR.TYPE EUROCONVERT EVEN EXACT EXP EXPON.DIST EXPONDIST FACT FACTDOUBLE FALSE|0 F.DIST FDIST F.DIST.RT FILTERXML FIND FINDB F.INV F.INV.RT FINV FISHER FISHERINV FIXED FLOOR FLOOR.MATH FLOOR.PRECISE FORECAST FORECAST.ETS FORECAST.ETS.CONFINT FORECAST.ETS.SEASONALITY FORECAST.ETS.STAT FORECAST.LINEAR FORMULATEXT FREQUENCY F.TEST FTEST FV FVSCHEDULE GAMMA GAMMA.DIST GAMMADIST GAMMA.INV GAMMAINV GAMMALN GAMMALN.PRECISE GAUSS GCD GEOMEAN GESTEP GETPIVOTDATA GROWTH HARMEAN HEX2BIN HEX2DEC HEX2OCT HLOOKUP HOUR HYPERLINK HYPGEOM.DIST HYPGEOMDIST IF|0 IFERROR IFNA IFS IMABS IMAGINARY IMARGUMENT IMCONJUGATE IMCOS IMCOSH IMCOT IMCSC IMCSCH IMDIV IMEXP IMLN IMLOG10 IMLOG2 IMPOWER IMPRODUCT IMREAL IMSEC IMSECH IMSIN IMSINH IMSQRT IMSUB IMSUM IMTAN INDEX INDIRECT INFO INT INTERCEPT INTRATE IPMT IRR ISBLANK ISERR ISERROR ISEVEN ISFORMULA ISLOGICAL ISNA ISNONTEXT ISNUMBER ISODD ISREF ISTEXT ISO.CEILING ISOWEEKNUM ISPMT JIS KURT LARGE LCM LEFT LEFTB LEN LENB LINEST LN LOG LOG10 LOGEST LOGINV LOGNORM.DIST LOGNORMDIST LOGNORM.INV LOOKUP LOWER MATCH MAX MAXA MAXIFS MDETERM MDURATION MEDIAN MID MIDBs MIN MINIFS MINA MINUTE MINVERSE MIRR MMULT MOD MODE MODE.MULT MODE.SNGL MONTH MROUND MULTINOMIAL MUNIT N NA NEGBINOM.DIST NEGBINOMDIST NETWORKDAYS NETWORKDAYS.INTL NOMINAL NORM.DIST NORMDIST NORMINV NORM.INV NORM.S.DIST NORMSDIST NORM.S.INV NORMSINV NOT NOW NPER NPV NUMBERVALUE OCT2BIN OCT2DEC OCT2HEX ODD ODDFPRICE ODDFYIELD ODDLPRICE ODDLYIELD OFFSET OR PDURATION PEARSON PERCENTILE.EXC PERCENTILE.INC PERCENTILE PERCENTRANK.EXC PERCENTRANK.INC PERCENTRANK PERMUT PERMUTATIONA PHI PHONETIC PI PMT POISSON.DIST POISSON POWER PPMT PRICE PRICEDISC PRICEMAT PROB PRODUCT PROPER PV QUARTILE QUARTILE.EXC QUARTILE.INC QUOTIENT RADIANS RAND RANDBETWEEN RANK.AVG RANK.EQ RANK RATE RECEIVED REGISTER.ID REPLACE REPLACEB REPT RIGHT RIGHTB ROMAN ROUND ROUNDDOWN ROUNDUP ROW ROWS RRI RSQ RTD SEARCH SEARCHB SEC SECH SECOND SERIESSUM SHEET SHEETS SIGN SIN SINH SKEW SKEW.P SLN SLOPE SMALL SQL.REQUEST SQRT SQRTPI STANDARDIZE STDEV STDEV.P STDEV.S STDEVA STDEVP STDEVPA STEYX SUBSTITUTE SUBTOTAL SUM SUMIF SUMIFS SUMPRODUCT SUMSQ SUMX2MY2 SUMX2PY2 SUMXMY2 SWITCH SYD T TAN TANH TBILLEQ TBILLPRICE TBILLYIELD T.DIST T.DIST.2T T.DIST.RT TDIST TEXT TEXTJOIN TIME TIMEVALUE T.INV T.INV.2T TINV TODAY TRANSPOSE TREND TRIM TRIMMEAN TRUE|0 TRUNC T.TEST TTEST TYPE UNICHAR UNICODE UPPER VALUE VAR VAR.P VAR.S VARA VARP VARPA VDB VLOOKUP WEBSERVICE WEEKDAY WEEKNUM WEIBULL WEIBULL.DIST WORKDAY WORKDAY.INTL XIRR XNPV XOR YEAR YEARFRAC YIELD YIELDDISC YIELDMAT Z.TEST ZTEST' }, contains: [ { /* matches a beginning equal sign found in Excel formula examples */ begin: /^=/, end: /[^=]/, returnEnd: true, illegal: /=/, /* only allow single equal sign at front of line */ relevance: 10 }, /* technically, there can be more than 2 letters in column names, but this prevents conflict with some keywords */ { /* matches a reference to a single cell */ className: 'symbol', begin: /\b[A-Z]{1,2}\d+\b/, end: /[^\d]/, excludeEnd: true, relevance: 0 }, { /* matches a reference to a range of cells */ className: 'symbol', begin: /[A-Z]{0,2}\d*:[A-Z]{0,2}\d*/, relevance: 0 }, hljs.BACKSLASH_ESCAPE, hljs.QUOTE_STRING_MODE, { className: 'number', begin: hljs.NUMBER_RE + '(%)?', relevance: 0 }, /* Excel formula comments are done by putting the comment in a function call to N() */ hljs.COMMENT(/\bN\(/,/\)/, { excludeBegin: true, excludeEnd: true, illegal: /\n/ }) ] }; }; /***/ }, /* 228 */ /***/ function(module, exports) { module.exports = function(hljs) { return { contains: [ { begin: /[^\u2401\u0001]+/, end: /[\u2401\u0001]/, excludeEnd: true, returnBegin: true, returnEnd: false, contains: [ { begin: /([^\u2401\u0001=]+)/, end: /=([^\u2401\u0001=]+)/, returnEnd: true, returnBegin: false, className: 'attr' }, { begin: /=/, end: /([\u2401\u0001])/, excludeEnd: true, excludeBegin: true, className: 'string' }] }], case_insensitive: true }; }; /***/ }, /* 229 */ /***/ function(module, exports) { module.exports = function (hljs) { var CHAR = { className: 'string', begin: /'(.|\\[xXuU][a-zA-Z0-9]+)'/ }; var STRING = { className: 'string', variants: [ { begin: '"', end: '"' } ] }; var NAME = { className: 'title', begin: /[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/ }; var METHOD = { className: 'function', beginKeywords: 'def', end: /[:={\[(\n;]/, excludeEnd: true, contains: [NAME] }; return { keywords: { literal: 'true false', keyword: 'case class def else enum if impl import in lat rel index let match namespace switch type yield with' }, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, CHAR, STRING, METHOD, hljs.C_NUMBER_MODE ] }; }; /***/ }, /* 230 */ /***/ function(module, exports) { module.exports = function(hljs) { var PARAMS = { className: 'params', begin: '\\(', end: '\\)' }; var F_KEYWORDS = { literal: '.False. .True.', keyword: 'kind do while private call intrinsic where elsewhere ' + 'type endtype endmodule endselect endinterface end enddo endif if forall endforall only contains default return stop then ' + 'public subroutine|10 function program .and. .or. .not. .le. .eq. .ge. .gt. .lt. ' + 'goto save else use module select case ' + 'access blank direct exist file fmt form formatted iostat name named nextrec number opened rec recl sequential status unformatted unit ' + 'continue format pause cycle exit ' + 'c_null_char c_alert c_backspace c_form_feed flush wait decimal round iomsg ' + 'synchronous nopass non_overridable pass protected volatile abstract extends import ' + 'non_intrinsic value deferred generic final enumerator class associate bind enum ' + 'c_int c_short c_long c_long_long c_signed_char c_size_t c_int8_t c_int16_t c_int32_t c_int64_t c_int_least8_t c_int_least16_t ' + 'c_int_least32_t c_int_least64_t c_int_fast8_t c_int_fast16_t c_int_fast32_t c_int_fast64_t c_intmax_t C_intptr_t c_float c_double ' + 'c_long_double c_float_complex c_double_complex c_long_double_complex c_bool c_char c_null_ptr c_null_funptr ' + 'c_new_line c_carriage_return c_horizontal_tab c_vertical_tab iso_c_binding c_loc c_funloc c_associated c_f_pointer ' + 'c_ptr c_funptr iso_fortran_env character_storage_size error_unit file_storage_size input_unit iostat_end iostat_eor ' + 'numeric_storage_size output_unit c_f_procpointer ieee_arithmetic ieee_support_underflow_control ' + 'ieee_get_underflow_mode ieee_set_underflow_mode newunit contiguous recursive ' + 'pad position action delim readwrite eor advance nml interface procedure namelist include sequence elemental pure ' + 'integer real character complex logical dimension allocatable|10 parameter ' + 'external implicit|10 none double precision assign intent optional pointer ' + 'target in out common equivalence data', built_in: 'alog alog10 amax0 amax1 amin0 amin1 amod cabs ccos cexp clog csin csqrt dabs dacos dasin datan datan2 dcos dcosh ddim dexp dint ' + 'dlog dlog10 dmax1 dmin1 dmod dnint dsign dsin dsinh dsqrt dtan dtanh float iabs idim idint idnint ifix isign max0 max1 min0 min1 sngl ' + 'algama cdabs cdcos cdexp cdlog cdsin cdsqrt cqabs cqcos cqexp cqlog cqsin cqsqrt dcmplx dconjg derf derfc dfloat dgamma dimag dlgama ' + 'iqint qabs qacos qasin qatan qatan2 qcmplx qconjg qcos qcosh qdim qerf qerfc qexp qgamma qimag qlgama qlog qlog10 qmax1 qmin1 qmod ' + 'qnint qsign qsin qsinh qsqrt qtan qtanh abs acos aimag aint anint asin atan atan2 char cmplx conjg cos cosh exp ichar index int log ' + 'log10 max min nint sign sin sinh sqrt tan tanh print write dim lge lgt lle llt mod nullify allocate deallocate ' + 'adjustl adjustr all allocated any associated bit_size btest ceiling count cshift date_and_time digits dot_product ' + 'eoshift epsilon exponent floor fraction huge iand ibclr ibits ibset ieor ior ishft ishftc lbound len_trim matmul ' + 'maxexponent maxloc maxval merge minexponent minloc minval modulo mvbits nearest pack present product ' + 'radix random_number random_seed range repeat reshape rrspacing scale scan selected_int_kind selected_real_kind ' + 'set_exponent shape size spacing spread sum system_clock tiny transpose trim ubound unpack verify achar iachar transfer ' + 'dble entry dprod cpu_time command_argument_count get_command get_command_argument get_environment_variable is_iostat_end ' + 'ieee_arithmetic ieee_support_underflow_control ieee_get_underflow_mode ieee_set_underflow_mode ' + 'is_iostat_eor move_alloc new_line selected_char_kind same_type_as extends_type_of' + 'acosh asinh atanh bessel_j0 bessel_j1 bessel_jn bessel_y0 bessel_y1 bessel_yn erf erfc erfc_scaled gamma log_gamma hypot norm2 ' + 'atomic_define atomic_ref execute_command_line leadz trailz storage_size merge_bits ' + 'bge bgt ble blt dshiftl dshiftr findloc iall iany iparity image_index lcobound ucobound maskl maskr ' + 'num_images parity popcnt poppar shifta shiftl shiftr this_image' }; return { case_insensitive: true, aliases: ['f90', 'f95'], keywords: F_KEYWORDS, illegal: /\/\*/, contains: [ hljs.inherit(hljs.APOS_STRING_MODE, {className: 'string', relevance: 0}), hljs.inherit(hljs.QUOTE_STRING_MODE, {className: 'string', relevance: 0}), { className: 'function', beginKeywords: 'subroutine function program', illegal: '[${=\\n]', contains: [hljs.UNDERSCORE_TITLE_MODE, PARAMS] }, hljs.COMMENT('!', '$', {relevance: 0}), { className: 'number', begin: '(?=\\b|\\+|\\-|\\.)(?=\\.\\d|\\d)(?:\\d+)?(?:\\.?\\d*)(?:[de][+-]?\\d+)?\\b\\.?', relevance: 0 } ] }; }; /***/ }, /* 231 */ /***/ function(module, exports) { module.exports = function(hljs) { var TYPEPARAM = { begin: '<', end: '>', contains: [ hljs.inherit(hljs.TITLE_MODE, {begin: /'[a-zA-Z0-9_]+/}) ] }; return { aliases: ['fs'], keywords: 'abstract and as assert base begin class default delegate do done ' + 'downcast downto elif else end exception extern false finally for ' + 'fun function global if in inherit inline interface internal lazy let ' + 'match member module mutable namespace new null of open or ' + 'override private public rec return sig static struct then to ' + 'true try type upcast use val void when while with yield', illegal: /\/\*/, contains: [ { // monad builder keywords (matches before non-bang kws) className: 'keyword', begin: /\b(yield|return|let|do)!/ }, { className: 'string', begin: '@"', end: '"', contains: [{begin: '""'}] }, { className: 'string', begin: '"""', end: '"""' }, hljs.COMMENT('\\(\\*', '\\*\\)'), { className: 'class', beginKeywords: 'type', end: '\\(|=|$', excludeEnd: true, contains: [ hljs.UNDERSCORE_TITLE_MODE, TYPEPARAM ] }, { className: 'meta', begin: '\\[<', end: '>\\]', relevance: 10 }, { className: 'symbol', begin: '\\B(\'[A-Za-z])\\b', contains: [hljs.BACKSLASH_ESCAPE] }, hljs.C_LINE_COMMENT_MODE, hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}), hljs.C_NUMBER_MODE ] }; }; /***/ }, /* 232 */ /***/ function(module, exports) { module.exports = function (hljs) { var KEYWORDS = { 'keyword': 'abort acronym acronyms alias all and assign binary card diag display ' + 'else eq file files for free ge gt if integer le loop lt maximizing ' + 'minimizing model models ne negative no not option options or ord ' + 'positive prod put putpage puttl repeat sameas semicont semiint smax ' + 'smin solve sos1 sos2 sum system table then until using while xor yes', 'literal': 'eps inf na', 'built-in': 'abs arccos arcsin arctan arctan2 Beta betaReg binomial ceil centropy ' + 'cos cosh cvPower div div0 eDist entropy errorf execSeed exp fact ' + 'floor frac gamma gammaReg log logBeta logGamma log10 log2 mapVal max ' + 'min mod ncpCM ncpF ncpVUpow ncpVUsin normal pi poly power ' + 'randBinomial randLinear randTriangle round rPower sigmoid sign ' + 'signPower sin sinh slexp sllog10 slrec sqexp sqlog10 sqr sqrec sqrt ' + 'tan tanh trunc uniform uniformInt vcPower bool_and bool_eqv bool_imp ' + 'bool_not bool_or bool_xor ifThen rel_eq rel_ge rel_gt rel_le rel_lt ' + 'rel_ne gday gdow ghour gleap gmillisec gminute gmonth gsecond gyear ' + 'jdate jnow jstart jtime errorLevel execError gamsRelease gamsVersion ' + 'handleCollect handleDelete handleStatus handleSubmit heapFree ' + 'heapLimit heapSize jobHandle jobKill jobStatus jobTerminate ' + 'licenseLevel licenseStatus maxExecError sleep timeClose timeComp ' + 'timeElapsed timeExec timeStart' }; var PARAMS = { className: 'params', begin: /\(/, end: /\)/, excludeBegin: true, excludeEnd: true, }; var SYMBOLS = { className: 'symbol', variants: [ {begin: /\=[lgenxc]=/}, {begin: /\$/}, ] }; var QSTR = { // One-line quoted comment string className: 'comment', variants: [ {begin: '\'', end: '\''}, {begin: '"', end: '"'}, ], illegal: '\\n', contains: [hljs.BACKSLASH_ESCAPE] }; var ASSIGNMENT = { begin: '/', end: '/', keywords: KEYWORDS, contains: [ QSTR, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, hljs.C_NUMBER_MODE, ], }; var DESCTEXT = { // Parameter/set/variable description text begin: /[a-z][a-z0-9_]*(\([a-z0-9_, ]*\))?[ \t]+/, excludeBegin: true, end: '$', endsWithParent: true, contains: [ QSTR, ASSIGNMENT, { className: 'comment', begin: /([ ]*[a-z0-9&#*=?@>\\<:\-,()$\[\]_.{}!+%^]+)+/, relevance: 0 }, ], }; return { aliases: ['gms'], case_insensitive: true, keywords: KEYWORDS, contains: [ hljs.COMMENT(/^\$ontext/, /^\$offtext/), { className: 'meta', begin: '^\\$[a-z0-9]+', end: '$', returnBegin: true, contains: [ { className: 'meta-keyword', begin: '^\\$[a-z0-9]+', } ] }, hljs.COMMENT('^\\*', '$'), hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, // Declarations { beginKeywords: 'set sets parameter parameters variable variables ' + 'scalar scalars equation equations', end: ';', contains: [ hljs.COMMENT('^\\*', '$'), hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, ASSIGNMENT, DESCTEXT, ] }, { // table environment beginKeywords: 'table', end: ';', returnBegin: true, contains: [ { // table header row beginKeywords: 'table', end: '$', contains: [DESCTEXT], }, hljs.COMMENT('^\\*', '$'), hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, hljs.C_NUMBER_MODE, // Table does not contain DESCTEXT or ASSIGNMENT ] }, // Function definitions { className: 'function', begin: /^[a-z][a-z0-9_,\-+' ()$]+\.{2}/, returnBegin: true, contains: [ { // Function title className: 'title', begin: /^[a-z][a-z0-9_]+/, }, PARAMS, SYMBOLS, ], }, hljs.C_NUMBER_MODE, SYMBOLS, ] }; }; /***/ }, /* 233 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = { keyword: 'and bool break call callexe checkinterrupt clear clearg closeall cls comlog compile ' + 'continue create debug declare delete disable dlibrary dllcall do dos ed edit else ' + 'elseif enable end endfor endif endp endo errorlog errorlogat expr external fn ' + 'for format goto gosub graph if keyword let lib library line load loadarray loadexe ' + 'loadf loadk loadm loadp loads loadx local locate loopnextindex lprint lpwidth lshow ' + 'matrix msym ndpclex new not open or output outwidth plot plotsym pop prcsn print ' + 'printdos proc push retp return rndcon rndmod rndmult rndseed run save saveall screen ' + 'scroll setarray show sparse stop string struct system trace trap threadfor ' + 'threadendfor threadbegin threadjoin threadstat threadend until use while winprint', built_in: 'abs acf aconcat aeye amax amean AmericanBinomCall AmericanBinomCall_Greeks AmericanBinomCall_ImpVol ' + 'AmericanBinomPut AmericanBinomPut_Greeks AmericanBinomPut_ImpVol AmericanBSCall AmericanBSCall_Greeks ' + 'AmericanBSCall_ImpVol AmericanBSPut AmericanBSPut_Greeks AmericanBSPut_ImpVol amin amult annotationGetDefaults ' + 'annotationSetBkd annotationSetFont annotationSetLineColor annotationSetLineStyle annotationSetLineThickness ' + 'annualTradingDays arccos arcsin areshape arrayalloc arrayindex arrayinit arraytomat asciiload asclabel astd ' + 'astds asum atan atan2 atranspose axmargin balance band bandchol bandcholsol bandltsol bandrv bandsolpd bar ' + 'base10 begwind besselj bessely beta box boxcox cdfBeta cdfBetaInv cdfBinomial cdfBinomialInv cdfBvn cdfBvn2 ' + 'cdfBvn2e cdfCauchy cdfCauchyInv cdfChic cdfChii cdfChinc cdfChincInv cdfExp cdfExpInv cdfFc cdfFnc cdfFncInv ' + 'cdfGam cdfGenPareto cdfHyperGeo cdfLaplace cdfLaplaceInv cdfLogistic cdfLogisticInv cdfmControlCreate cdfMvn ' + 'cdfMvn2e cdfMvnce cdfMvne cdfMvt2e cdfMvtce cdfMvte cdfN cdfN2 cdfNc cdfNegBinomial cdfNegBinomialInv cdfNi ' + 'cdfPoisson cdfPoissonInv cdfRayleigh cdfRayleighInv cdfTc cdfTci cdfTnc cdfTvn cdfWeibull cdfWeibullInv cdir ' + 'ceil ChangeDir chdir chiBarSquare chol choldn cholsol cholup chrs close code cols colsf combinate combinated ' + 'complex con cond conj cons ConScore contour conv convertsatostr convertstrtosa corrm corrms corrvc corrx corrxs ' + 'cos cosh counts countwts crossprd crout croutp csrcol csrlin csvReadM csvReadSA cumprodc cumsumc curve cvtos ' + 'datacreate datacreatecomplex datalist dataload dataloop dataopen datasave date datestr datestring datestrymd ' + 'dayinyr dayofweek dbAddDatabase dbClose dbCommit dbCreateQuery dbExecQuery dbGetConnectOptions dbGetDatabaseName ' + 'dbGetDriverName dbGetDrivers dbGetHostName dbGetLastErrorNum dbGetLastErrorText dbGetNumericalPrecPolicy ' + 'dbGetPassword dbGetPort dbGetTableHeaders dbGetTables dbGetUserName dbHasFeature dbIsDriverAvailable dbIsOpen ' + 'dbIsOpenError dbOpen dbQueryBindValue dbQueryClear dbQueryCols dbQueryExecPrepared dbQueryFetchAllM dbQueryFetchAllSA ' + 'dbQueryFetchOneM dbQueryFetchOneSA dbQueryFinish dbQueryGetBoundValue dbQueryGetBoundValues dbQueryGetField ' + 'dbQueryGetLastErrorNum dbQueryGetLastErrorText dbQueryGetLastInsertID dbQueryGetLastQuery dbQueryGetPosition ' + 'dbQueryIsActive dbQueryIsForwardOnly dbQueryIsNull dbQueryIsSelect dbQueryIsValid dbQueryPrepare dbQueryRows ' + 'dbQuerySeek dbQuerySeekFirst dbQuerySeekLast dbQuerySeekNext dbQuerySeekPrevious dbQuerySetForwardOnly ' + 'dbRemoveDatabase dbRollback dbSetConnectOptions dbSetDatabaseName dbSetHostName dbSetNumericalPrecPolicy ' + 'dbSetPort dbSetUserName dbTransaction DeleteFile delif delrows denseToSp denseToSpRE denToZero design det detl ' + 'dfft dffti diag diagrv digamma doswin DOSWinCloseall DOSWinOpen dotfeq dotfeqmt dotfge dotfgemt dotfgt dotfgtmt ' + 'dotfle dotflemt dotflt dotfltmt dotfne dotfnemt draw drop dsCreate dstat dstatmt dstatmtControlCreate dtdate dtday ' + 'dttime dttodtv dttostr dttoutc dtvnormal dtvtodt dtvtoutc dummy dummybr dummydn eig eigh eighv eigv elapsedTradingDays ' + 'endwind envget eof eqSolve eqSolvemt eqSolvemtControlCreate eqSolvemtOutCreate eqSolveset erf erfc erfccplx erfcplx error ' + 'etdays ethsec etstr EuropeanBinomCall EuropeanBinomCall_Greeks EuropeanBinomCall_ImpVol EuropeanBinomPut ' + 'EuropeanBinomPut_Greeks EuropeanBinomPut_ImpVol EuropeanBSCall EuropeanBSCall_Greeks EuropeanBSCall_ImpVol ' + 'EuropeanBSPut EuropeanBSPut_Greeks EuropeanBSPut_ImpVol exctsmpl exec execbg exp extern eye fcheckerr fclearerr feq ' + 'feqmt fflush fft ffti fftm fftmi fftn fge fgemt fgets fgetsa fgetsat fgetst fgt fgtmt fileinfo filesa fle flemt ' + 'floor flt fltmt fmod fne fnemt fonts fopen formatcv formatnv fputs fputst fseek fstrerror ftell ftocv ftos ftostrC ' + 'gamma gammacplx gammaii gausset gdaAppend gdaCreate gdaDStat gdaDStatMat gdaGetIndex gdaGetName gdaGetNames gdaGetOrders ' + 'gdaGetType gdaGetTypes gdaGetVarInfo gdaIsCplx gdaLoad gdaPack gdaRead gdaReadByIndex gdaReadSome gdaReadSparse ' + 'gdaReadStruct gdaReportVarInfo gdaSave gdaUpdate gdaUpdateAndPack gdaVars gdaWrite gdaWrite32 gdaWriteSome getarray ' + 'getdims getf getGAUSShome getmatrix getmatrix4D getname getnamef getNextTradingDay getNextWeekDay getnr getorders ' + 'getpath getPreviousTradingDay getPreviousWeekDay getRow getscalar3D getscalar4D getTrRow getwind glm gradcplx gradMT ' + 'gradMTm gradMTT gradMTTm gradp graphprt graphset hasimag header headermt hess hessMT hessMTg hessMTgw hessMTm ' + 'hessMTmw hessMTT hessMTTg hessMTTgw hessMTTm hessMTw hessp hist histf histp hsec imag indcv indexcat indices indices2 ' + 'indicesf indicesfn indnv indsav indx integrate1d integrateControlCreate intgrat2 intgrat3 inthp1 inthp2 inthp3 inthp4 ' + 'inthpControlCreate intquad1 intquad2 intquad3 intrleav intrleavsa intrsect intsimp inv invpd invswp iscplx iscplxf ' + 'isden isinfnanmiss ismiss key keyav keyw lag lag1 lagn lapEighb lapEighi lapEighvb lapEighvi lapgEig lapgEigh lapgEighv ' + 'lapgEigv lapgSchur lapgSvdcst lapgSvds lapgSvdst lapSvdcusv lapSvds lapSvdusv ldlp ldlsol linSolve listwise ln lncdfbvn ' + 'lncdfbvn2 lncdfmvn lncdfn lncdfn2 lncdfnc lnfact lngammacplx lnpdfmvn lnpdfmvt lnpdfn lnpdft loadd loadstruct loadwind ' + 'loess loessmt loessmtControlCreate log loglog logx logy lower lowmat lowmat1 ltrisol lu lusol machEpsilon make makevars ' + 'makewind margin matalloc matinit mattoarray maxbytes maxc maxindc maxv maxvec mbesselei mbesselei0 mbesselei1 mbesseli ' + 'mbesseli0 mbesseli1 meanc median mergeby mergevar minc minindc minv miss missex missrv moment momentd movingave ' + 'movingaveExpwgt movingaveWgt nextindex nextn nextnevn nextwind ntos null null1 numCombinations ols olsmt olsmtControlCreate ' + 'olsqr olsqr2 olsqrmt ones optn optnevn orth outtyp pacf packedToSp packr parse pause pdfCauchy pdfChi pdfExp pdfGenPareto ' + 'pdfHyperGeo pdfLaplace pdfLogistic pdfn pdfPoisson pdfRayleigh pdfWeibull pi pinv pinvmt plotAddArrow plotAddBar plotAddBox ' + 'plotAddHist plotAddHistF plotAddHistP plotAddPolar plotAddScatter plotAddShape plotAddTextbox plotAddTS plotAddXY plotArea ' + 'plotBar plotBox plotClearLayout plotContour plotCustomLayout plotGetDefaults plotHist plotHistF plotHistP plotLayout ' + 'plotLogLog plotLogX plotLogY plotOpenWindow plotPolar plotSave plotScatter plotSetAxesPen plotSetBar plotSetBarFill ' + 'plotSetBarStacked plotSetBkdColor plotSetFill plotSetGrid plotSetLegend plotSetLineColor plotSetLineStyle plotSetLineSymbol ' + 'plotSetLineThickness plotSetNewWindow plotSetTitle plotSetWhichYAxis plotSetXAxisShow plotSetXLabel plotSetXRange ' + 'plotSetXTicInterval plotSetXTicLabel plotSetYAxisShow plotSetYLabel plotSetYRange plotSetZAxisShow plotSetZLabel ' + 'plotSurface plotTS plotXY polar polychar polyeval polygamma polyint polymake polymat polymroot polymult polyroot ' + 'pqgwin previousindex princomp printfm printfmt prodc psi putarray putf putvals pvCreate pvGetIndex pvGetParNames ' + 'pvGetParVector pvLength pvList pvPack pvPacki pvPackm pvPackmi pvPacks pvPacksi pvPacksm pvPacksmi pvPutParVector ' + 'pvTest pvUnpack QNewton QNewtonmt QNewtonmtControlCreate QNewtonmtOutCreate QNewtonSet QProg QProgmt QProgmtInCreate ' + 'qqr qqre qqrep qr qre qrep qrsol qrtsol qtyr qtyre qtyrep quantile quantiled qyr qyre qyrep qz rank rankindx readr ' + 'real reclassify reclassifyCuts recode recserar recsercp recserrc rerun rescale reshape rets rev rfft rffti rfftip rfftn ' + 'rfftnp rfftp rndBernoulli rndBeta rndBinomial rndCauchy rndChiSquare rndCon rndCreateState rndExp rndGamma rndGeo rndGumbel ' + 'rndHyperGeo rndi rndKMbeta rndKMgam rndKMi rndKMn rndKMnb rndKMp rndKMu rndKMvm rndLaplace rndLCbeta rndLCgam rndLCi rndLCn ' + 'rndLCnb rndLCp rndLCu rndLCvm rndLogNorm rndMTu rndMVn rndMVt rndn rndnb rndNegBinomial rndp rndPoisson rndRayleigh ' + 'rndStateSkip rndu rndvm rndWeibull rndWishart rotater round rows rowsf rref sampleData satostrC saved saveStruct savewind ' + 'scale scale3d scalerr scalinfnanmiss scalmiss schtoc schur searchsourcepath seekr select selif seqa seqm setdif setdifsa ' + 'setvars setvwrmode setwind shell shiftr sin singleindex sinh sleep solpd sortc sortcc sortd sorthc sorthcc sortind ' + 'sortindc sortmc sortr sortrc spBiconjGradSol spChol spConjGradSol spCreate spDenseSubmat spDiagRvMat spEigv spEye spLDL ' + 'spline spLU spNumNZE spOnes spreadSheetReadM spreadSheetReadSA spreadSheetWrite spScale spSubmat spToDense spTrTDense ' + 'spTScalar spZeros sqpSolve sqpSolveMT sqpSolveMTControlCreate sqpSolveMTlagrangeCreate sqpSolveMToutCreate sqpSolveSet ' + 'sqrt statements stdc stdsc stocv stof strcombine strindx strlen strput strrindx strsect strsplit strsplitPad strtodt ' + 'strtof strtofcplx strtriml strtrimr strtrunc strtruncl strtruncpad strtruncr submat subscat substute subvec sumc sumr ' + 'surface svd svd1 svd2 svdcusv svds svdusv sysstate tab tan tanh tempname threadBegin threadEnd threadEndFor threadFor ' + 'threadJoin threadStat time timedt timestr timeutc title tkf2eps tkf2ps tocart todaydt toeplitz token topolar trapchk ' + 'trigamma trimr trunc type typecv typef union unionsa uniqindx uniqindxsa unique uniquesa upmat upmat1 upper utctodt ' + 'utctodtv utrisol vals varCovMS varCovXS varget vargetl varmall varmares varput varputl vartypef vcm vcms vcx vcxs ' + 'vec vech vecr vector vget view viewxyz vlist vnamecv volume vput vread vtypecv wait waitc walkindex where window ' + 'writer xlabel xlsGetSheetCount xlsGetSheetSize xlsGetSheetTypes xlsMakeRange xlsReadM xlsReadSA xlsWrite xlsWriteM ' + 'xlsWriteSA xpnd xtics xy xyz ylabel ytics zeros zeta zlabel ztics', literal: 'DB_AFTER_LAST_ROW DB_ALL_TABLES DB_BATCH_OPERATIONS DB_BEFORE_FIRST_ROW DB_BLOB DB_EVENT_NOTIFICATIONS ' + 'DB_FINISH_QUERY DB_HIGH_PRECISION DB_LAST_INSERT_ID DB_LOW_PRECISION_DOUBLE DB_LOW_PRECISION_INT32 ' + 'DB_LOW_PRECISION_INT64 DB_LOW_PRECISION_NUMBERS DB_MULTIPLE_RESULT_SETS DB_NAMED_PLACEHOLDERS ' + 'DB_POSITIONAL_PLACEHOLDERS DB_PREPARED_QUERIES DB_QUERY_SIZE DB_SIMPLE_LOCKING DB_SYSTEM_TABLES DB_TABLES ' + 'DB_TRANSACTIONS DB_UNICODE DB_VIEWS' }; var PREPROCESSOR = { className: 'meta', begin: '#', end: '$', keywords: {'meta-keyword': 'define definecs|10 undef ifdef ifndef iflight ifdllcall ifmac ifos2win ifunix else endif lineson linesoff srcfile srcline'}, contains: [ { begin: /\\\n/, relevance: 0 }, { beginKeywords: 'include', end: '$', keywords: {'meta-keyword': 'include'}, contains: [ { className: 'meta-string', begin: '"', end: '"', illegal: '\\n' } ] }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }; var FUNCTION_TITLE = hljs.UNDERSCORE_IDENT_RE + '\\s*\\(?'; var PARSE_PARAMS = [ { className: 'params', begin: /\(/, end: /\)/, keywords: KEYWORDS, relevance: 0, contains: [ hljs.C_NUMBER_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] } ]; return { aliases: ['gss'], case_insensitive: true, // language is case-insensitive keywords: KEYWORDS, illegal: '(\\{[%#]|[%#]\\})', contains: [ hljs.C_NUMBER_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.COMMENT('@', '@'), PREPROCESSOR, { className: 'string', begin: '"', end: '"', contains: [hljs.BACKSLASH_ESCAPE] }, { className: 'function', beginKeywords: 'proc keyword', end: ';', excludeEnd: true, keywords: KEYWORDS, contains: [ { begin: FUNCTION_TITLE, returnBegin: true, contains: [hljs.UNDERSCORE_TITLE_MODE], relevance: 0 }, hljs.C_NUMBER_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, PREPROCESSOR ].concat(PARSE_PARAMS) }, { className: 'function', beginKeywords: 'fn', end: ';', excludeEnd: true, keywords: KEYWORDS, contains: [ { begin: FUNCTION_TITLE + hljs.IDENT_RE + '\\)?\\s*\\=\\s*', returnBegin: true, contains: [hljs.UNDERSCORE_TITLE_MODE], relevance: 0 }, hljs.C_NUMBER_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ].concat(PARSE_PARAMS) }, { className: 'function', begin: '\\bexternal (proc|keyword|fn)\\s+', end: ';', excludeEnd: true, keywords: KEYWORDS, contains: [ { begin: FUNCTION_TITLE, returnBegin: true, contains: [hljs.UNDERSCORE_TITLE_MODE], relevance: 0 }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }, { className: 'function', begin: '\\bexternal (matrix|string|array|sparse matrix|struct ' + hljs.IDENT_RE + ')\\s+', end: ';', excludeEnd: true, keywords: KEYWORDS, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] } ] }; }; /***/ }, /* 234 */ /***/ function(module, exports) { module.exports = function(hljs) { var GCODE_IDENT_RE = '[A-Z_][A-Z0-9_.]*'; var GCODE_CLOSE_RE = '\\%'; var GCODE_KEYWORDS = 'IF DO WHILE ENDWHILE CALL ENDIF SUB ENDSUB GOTO REPEAT ENDREPEAT ' + 'EQ LT GT NE GE LE OR XOR'; var GCODE_START = { className: 'meta', begin: '([O])([0-9]+)' }; var GCODE_CODE = [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.COMMENT(/\(/, /\)/), hljs.inherit(hljs.C_NUMBER_MODE, {begin: '([-+]?([0-9]*\\.?[0-9]+\\.?))|' + hljs.C_NUMBER_RE}), hljs.inherit(hljs.APOS_STRING_MODE, {illegal: null}), hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}), { className: 'name', begin: '([G])([0-9]+\\.?[0-9]?)' }, { className: 'name', begin: '([M])([0-9]+\\.?[0-9]?)' }, { className: 'attr', begin: '(VC|VS|#)', end: '(\\d+)' }, { className: 'attr', begin: '(VZOFX|VZOFY|VZOFZ)' }, { className: 'built_in', begin: '(ATAN|ABS|ACOS|ASIN|SIN|COS|EXP|FIX|FUP|ROUND|LN|TAN)(\\[)', end: '([-+]?([0-9]*\\.?[0-9]+\\.?))(\\])' }, { className: 'symbol', variants: [ { begin: 'N', end: '\\d+', illegal: '\\W' } ] } ]; return { aliases: ['nc'], // Some implementations (CNC controls) of G-code are interoperable with uppercase and lowercase letters seamlessly. // However, most prefer all uppercase and uppercase is customary. case_insensitive: true, lexemes: GCODE_IDENT_RE, keywords: GCODE_KEYWORDS, contains: [ { className: 'meta', begin: GCODE_CLOSE_RE }, GCODE_START ].concat(GCODE_CODE) }; }; /***/ }, /* 235 */ /***/ function(module, exports) { module.exports = function (hljs) { return { aliases: ['feature'], keywords: 'Feature Background Ability Business\ Need Scenario Scenarios Scenario\ Outline Scenario\ Template Examples Given And Then But When', contains: [ { className: 'symbol', begin: '\\*', relevance: 0 }, { className: 'meta', begin: '@[^@\\s]+' }, { begin: '\\|', end: '\\|\\w*$', contains: [ { className: 'string', begin: '[^|]+' } ] }, { className: 'variable', begin: '<', end: '>' }, hljs.HASH_COMMENT_MODE, { className: 'string', begin: '"""', end: '"""' }, hljs.QUOTE_STRING_MODE ] }; }; /***/ }, /* 236 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: { keyword: // Statements 'break continue discard do else for if return while switch case default ' + // Qualifiers 'attribute binding buffer ccw centroid centroid varying coherent column_major const cw ' + 'depth_any depth_greater depth_less depth_unchanged early_fragment_tests equal_spacing ' + 'flat fractional_even_spacing fractional_odd_spacing highp in index inout invariant ' + 'invocations isolines layout line_strip lines lines_adjacency local_size_x local_size_y ' + 'local_size_z location lowp max_vertices mediump noperspective offset origin_upper_left ' + 'out packed patch pixel_center_integer point_mode points precise precision quads r11f_g11f_b10f '+ 'r16 r16_snorm r16f r16i r16ui r32f r32i r32ui r8 r8_snorm r8i r8ui readonly restrict ' + 'rg16 rg16_snorm rg16f rg16i rg16ui rg32f rg32i rg32ui rg8 rg8_snorm rg8i rg8ui rgb10_a2 ' + 'rgb10_a2ui rgba16 rgba16_snorm rgba16f rgba16i rgba16ui rgba32f rgba32i rgba32ui rgba8 ' + 'rgba8_snorm rgba8i rgba8ui row_major sample shared smooth std140 std430 stream triangle_strip ' + 'triangles triangles_adjacency uniform varying vertices volatile writeonly', type: 'atomic_uint bool bvec2 bvec3 bvec4 dmat2 dmat2x2 dmat2x3 dmat2x4 dmat3 dmat3x2 dmat3x3 ' + 'dmat3x4 dmat4 dmat4x2 dmat4x3 dmat4x4 double dvec2 dvec3 dvec4 float iimage1D iimage1DArray ' + 'iimage2D iimage2DArray iimage2DMS iimage2DMSArray iimage2DRect iimage3D iimageBuffer' + 'iimageCube iimageCubeArray image1D image1DArray image2D image2DArray image2DMS image2DMSArray ' + 'image2DRect image3D imageBuffer imageCube imageCubeArray int isampler1D isampler1DArray ' + 'isampler2D isampler2DArray isampler2DMS isampler2DMSArray isampler2DRect isampler3D ' + 'isamplerBuffer isamplerCube isamplerCubeArray ivec2 ivec3 ivec4 mat2 mat2x2 mat2x3 ' + 'mat2x4 mat3 mat3x2 mat3x3 mat3x4 mat4 mat4x2 mat4x3 mat4x4 sampler1D sampler1DArray ' + 'sampler1DArrayShadow sampler1DShadow sampler2D sampler2DArray sampler2DArrayShadow ' + 'sampler2DMS sampler2DMSArray sampler2DRect sampler2DRectShadow sampler2DShadow sampler3D ' + 'samplerBuffer samplerCube samplerCubeArray samplerCubeArrayShadow samplerCubeShadow ' + 'image1D uimage1DArray uimage2D uimage2DArray uimage2DMS uimage2DMSArray uimage2DRect ' + 'uimage3D uimageBuffer uimageCube uimageCubeArray uint usampler1D usampler1DArray ' + 'usampler2D usampler2DArray usampler2DMS usampler2DMSArray usampler2DRect usampler3D ' + 'samplerBuffer usamplerCube usamplerCubeArray uvec2 uvec3 uvec4 vec2 vec3 vec4 void', built_in: // Constants 'gl_MaxAtomicCounterBindings gl_MaxAtomicCounterBufferSize gl_MaxClipDistances gl_MaxClipPlanes ' + 'gl_MaxCombinedAtomicCounterBuffers gl_MaxCombinedAtomicCounters gl_MaxCombinedImageUniforms ' + 'gl_MaxCombinedImageUnitsAndFragmentOutputs gl_MaxCombinedTextureImageUnits gl_MaxComputeAtomicCounterBuffers ' + 'gl_MaxComputeAtomicCounters gl_MaxComputeImageUniforms gl_MaxComputeTextureImageUnits ' + 'gl_MaxComputeUniformComponents gl_MaxComputeWorkGroupCount gl_MaxComputeWorkGroupSize ' + 'gl_MaxDrawBuffers gl_MaxFragmentAtomicCounterBuffers gl_MaxFragmentAtomicCounters ' + 'gl_MaxFragmentImageUniforms gl_MaxFragmentInputComponents gl_MaxFragmentInputVectors ' + 'gl_MaxFragmentUniformComponents gl_MaxFragmentUniformVectors gl_MaxGeometryAtomicCounterBuffers ' + 'gl_MaxGeometryAtomicCounters gl_MaxGeometryImageUniforms gl_MaxGeometryInputComponents ' + 'gl_MaxGeometryOutputComponents gl_MaxGeometryOutputVertices gl_MaxGeometryTextureImageUnits ' + 'gl_MaxGeometryTotalOutputComponents gl_MaxGeometryUniformComponents gl_MaxGeometryVaryingComponents ' + 'gl_MaxImageSamples gl_MaxImageUnits gl_MaxLights gl_MaxPatchVertices gl_MaxProgramTexelOffset ' + 'gl_MaxTessControlAtomicCounterBuffers gl_MaxTessControlAtomicCounters gl_MaxTessControlImageUniforms ' + 'gl_MaxTessControlInputComponents gl_MaxTessControlOutputComponents gl_MaxTessControlTextureImageUnits ' + 'gl_MaxTessControlTotalOutputComponents gl_MaxTessControlUniformComponents ' + 'gl_MaxTessEvaluationAtomicCounterBuffers gl_MaxTessEvaluationAtomicCounters ' + 'gl_MaxTessEvaluationImageUniforms gl_MaxTessEvaluationInputComponents gl_MaxTessEvaluationOutputComponents ' + 'gl_MaxTessEvaluationTextureImageUnits gl_MaxTessEvaluationUniformComponents ' + 'gl_MaxTessGenLevel gl_MaxTessPatchComponents gl_MaxTextureCoords gl_MaxTextureImageUnits ' + 'gl_MaxTextureUnits gl_MaxVaryingComponents gl_MaxVaryingFloats gl_MaxVaryingVectors ' + 'gl_MaxVertexAtomicCounterBuffers gl_MaxVertexAtomicCounters gl_MaxVertexAttribs gl_MaxVertexImageUniforms ' + 'gl_MaxVertexOutputComponents gl_MaxVertexOutputVectors gl_MaxVertexTextureImageUnits ' + 'gl_MaxVertexUniformComponents gl_MaxVertexUniformVectors gl_MaxViewports gl_MinProgramTexelOffset ' + // Variables 'gl_BackColor gl_BackLightModelProduct gl_BackLightProduct gl_BackMaterial ' + 'gl_BackSecondaryColor gl_ClipDistance gl_ClipPlane gl_ClipVertex gl_Color ' + 'gl_DepthRange gl_EyePlaneQ gl_EyePlaneR gl_EyePlaneS gl_EyePlaneT gl_Fog gl_FogCoord ' + 'gl_FogFragCoord gl_FragColor gl_FragCoord gl_FragData gl_FragDepth gl_FrontColor ' + 'gl_FrontFacing gl_FrontLightModelProduct gl_FrontLightProduct gl_FrontMaterial ' + 'gl_FrontSecondaryColor gl_GlobalInvocationID gl_InstanceID gl_InvocationID gl_Layer gl_LightModel ' + 'gl_LightSource gl_LocalInvocationID gl_LocalInvocationIndex gl_ModelViewMatrix ' + 'gl_ModelViewMatrixInverse gl_ModelViewMatrixInverseTranspose gl_ModelViewMatrixTranspose ' + 'gl_ModelViewProjectionMatrix gl_ModelViewProjectionMatrixInverse gl_ModelViewProjectionMatrixInverseTranspose ' + 'gl_ModelViewProjectionMatrixTranspose gl_MultiTexCoord0 gl_MultiTexCoord1 gl_MultiTexCoord2 ' + 'gl_MultiTexCoord3 gl_MultiTexCoord4 gl_MultiTexCoord5 gl_MultiTexCoord6 gl_MultiTexCoord7 ' + 'gl_Normal gl_NormalMatrix gl_NormalScale gl_NumSamples gl_NumWorkGroups gl_ObjectPlaneQ ' + 'gl_ObjectPlaneR gl_ObjectPlaneS gl_ObjectPlaneT gl_PatchVerticesIn gl_Point gl_PointCoord ' + 'gl_PointSize gl_Position gl_PrimitiveID gl_PrimitiveIDIn gl_ProjectionMatrix gl_ProjectionMatrixInverse ' + 'gl_ProjectionMatrixInverseTranspose gl_ProjectionMatrixTranspose gl_SampleID gl_SampleMask ' + 'gl_SampleMaskIn gl_SamplePosition gl_SecondaryColor gl_TessCoord gl_TessLevelInner gl_TessLevelOuter ' + 'gl_TexCoord gl_TextureEnvColor gl_TextureMatrix gl_TextureMatrixInverse gl_TextureMatrixInverseTranspose ' + 'gl_TextureMatrixTranspose gl_Vertex gl_VertexID gl_ViewportIndex gl_WorkGroupID gl_WorkGroupSize gl_in gl_out ' + // Functions 'EmitStreamVertex EmitVertex EndPrimitive EndStreamPrimitive abs acos acosh all any asin ' + 'asinh atan atanh atomicAdd atomicAnd atomicCompSwap atomicCounter atomicCounterDecrement ' + 'atomicCounterIncrement atomicExchange atomicMax atomicMin atomicOr atomicXor barrier ' + 'bitCount bitfieldExtract bitfieldInsert bitfieldReverse ceil clamp cos cosh cross ' + 'dFdx dFdy degrees determinant distance dot equal exp exp2 faceforward findLSB findMSB ' + 'floatBitsToInt floatBitsToUint floor fma fract frexp ftransform fwidth greaterThan ' + 'greaterThanEqual groupMemoryBarrier imageAtomicAdd imageAtomicAnd imageAtomicCompSwap ' + 'imageAtomicExchange imageAtomicMax imageAtomicMin imageAtomicOr imageAtomicXor imageLoad ' + 'imageSize imageStore imulExtended intBitsToFloat interpolateAtCentroid interpolateAtOffset ' + 'interpolateAtSample inverse inversesqrt isinf isnan ldexp length lessThan lessThanEqual log ' + 'log2 matrixCompMult max memoryBarrier memoryBarrierAtomicCounter memoryBarrierBuffer ' + 'memoryBarrierImage memoryBarrierShared min mix mod modf noise1 noise2 noise3 noise4 ' + 'normalize not notEqual outerProduct packDouble2x32 packHalf2x16 packSnorm2x16 packSnorm4x8 ' + 'packUnorm2x16 packUnorm4x8 pow radians reflect refract round roundEven shadow1D shadow1DLod ' + 'shadow1DProj shadow1DProjLod shadow2D shadow2DLod shadow2DProj shadow2DProjLod sign sin sinh ' + 'smoothstep sqrt step tan tanh texelFetch texelFetchOffset texture texture1D texture1DLod ' + 'texture1DProj texture1DProjLod texture2D texture2DLod texture2DProj texture2DProjLod ' + 'texture3D texture3DLod texture3DProj texture3DProjLod textureCube textureCubeLod ' + 'textureGather textureGatherOffset textureGatherOffsets textureGrad textureGradOffset ' + 'textureLod textureLodOffset textureOffset textureProj textureProjGrad textureProjGradOffset ' + 'textureProjLod textureProjLodOffset textureProjOffset textureQueryLevels textureQueryLod ' + 'textureSize transpose trunc uaddCarry uintBitsToFloat umulExtended unpackDouble2x32 ' + 'unpackHalf2x16 unpackSnorm2x16 unpackSnorm4x8 unpackUnorm2x16 unpackUnorm4x8 usubBorrow', literal: 'true false' }, illegal: '"', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.C_NUMBER_MODE, { className: 'meta', begin: '#', end: '$' } ] }; }; /***/ }, /* 237 */ /***/ function(module, exports) { module.exports = function(hljs) { var GO_KEYWORDS = { keyword: 'break default func interface select case map struct chan else goto package switch ' + 'const fallthrough if range type continue for import return var go defer ' + 'bool byte complex64 complex128 float32 float64 int8 int16 int32 int64 string uint8 ' + 'uint16 uint32 uint64 int uint uintptr rune', literal: 'true false iota nil', built_in: 'append cap close complex copy imag len make new panic print println real recover delete' }; return { aliases: ['golang'], keywords: GO_KEYWORDS, illegal: '</', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: 'string', variants: [ hljs.QUOTE_STRING_MODE, {begin: '\'', end: '[^\\\\]\''}, {begin: '`', end: '`'}, ] }, { className: 'number', variants: [ {begin: hljs.C_NUMBER_RE + '[dflsi]', relevance: 1}, hljs.C_NUMBER_MODE ] }, { begin: /:=/ // relevance booster }, { className: 'function', beginKeywords: 'func', end: /\s*\{/, excludeEnd: true, contains: [ hljs.TITLE_MODE, { className: 'params', begin: /\(/, end: /\)/, keywords: GO_KEYWORDS, illegal: /["']/ } ] } ] }; }; /***/ }, /* 238 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: { keyword: 'println readln print import module function local return let var ' + 'while for foreach times in case when match with break continue ' + 'augment augmentation each find filter reduce ' + 'if then else otherwise try catch finally raise throw orIfNull ' + 'DynamicObject|10 DynamicVariable struct Observable map set vector list array', literal: 'true false null' }, contains: [ hljs.HASH_COMMENT_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, { className: 'meta', begin: '@[A-Za-z]+' } ] } }; /***/ }, /* 239 */ /***/ function(module, exports) { module.exports = function(hljs) { return { case_insensitive: true, keywords: { keyword: 'task project allprojects subprojects artifacts buildscript configurations ' + 'dependencies repositories sourceSets description delete from into include ' + 'exclude source classpath destinationDir includes options sourceCompatibility ' + 'targetCompatibility group flatDir doLast doFirst flatten todir fromdir ant ' + 'def abstract break case catch continue default do else extends final finally ' + 'for if implements instanceof native new private protected public return static ' + 'switch synchronized throw throws transient try volatile while strictfp package ' + 'import false null super this true antlrtask checkstyle codenarc copy boolean ' + 'byte char class double float int interface long short void compile runTime ' + 'file fileTree abs any append asList asWritable call collect compareTo count ' + 'div dump each eachByte eachFile eachLine every find findAll flatten getAt ' + 'getErr getIn getOut getText grep immutable inject inspect intersect invokeMethods ' + 'isCase join leftShift minus multiply newInputStream newOutputStream newPrintWriter ' + 'newReader newWriter next plus pop power previous print println push putAt read ' + 'readBytes readLines reverse reverseEach round size sort splitEachLine step subMap ' + 'times toInteger toList tokenize upto waitForOrKill withPrintWriter withReader ' + 'withStream withWriter withWriterAppend write writeLine' }, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.NUMBER_MODE, hljs.REGEXP_MODE ] } }; /***/ }, /* 240 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: { literal : 'true false null', keyword: 'byte short char int long boolean float double void ' + // groovy specific keywords 'def as in assert trait ' + // common keywords with Java 'super this abstract static volatile transient public private protected synchronized final ' + 'class interface enum if else for while switch case break default continue ' + 'throw throws try catch finally implements extends new import package return instanceof' }, contains: [ hljs.COMMENT( '/\\*\\*', '\\*/', { relevance : 0, contains : [ { // eat up @'s in emails to prevent them to be recognized as doctags begin: /\w+@/, relevance: 0 }, { className : 'doctag', begin : '@[A-Za-z]+' } ] } ), hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: 'string', begin: '"""', end: '"""' }, { className: 'string', begin: "'''", end: "'''" }, { className: 'string', begin: "\\$/", end: "/\\$", relevance: 10 }, hljs.APOS_STRING_MODE, { className: 'regexp', begin: /~?\/[^\/\n]+\//, contains: [ hljs.BACKSLASH_ESCAPE ] }, hljs.QUOTE_STRING_MODE, { className: 'meta', begin: "^#!/usr/bin/env", end: '$', illegal: '\n' }, hljs.BINARY_NUMBER_MODE, { className: 'class', beginKeywords: 'class interface trait enum', end: '{', illegal: ':', contains: [ {beginKeywords: 'extends implements'}, hljs.UNDERSCORE_TITLE_MODE ] }, hljs.C_NUMBER_MODE, { className: 'meta', begin: '@[A-Za-z]+' }, { // highlight map keys and named parameters as strings className: 'string', begin: /[^\?]{0}[A-Za-z0-9_$]+ *:/ }, { // catch middle element of the ternary operator // to avoid highlight it as a label, named parameter, or map key begin: /\?/, end: /\:/ }, { // highlight labeled statements className: 'symbol', begin: '^\\s*[A-Za-z0-9_$]+:', relevance: 0 } ], illegal: /#|<\// } }; /***/ }, /* 241 */ /***/ function(module, exports) { module.exports = // TODO support filter tags like :javascript, support inline HTML function(hljs) { return { case_insensitive: true, contains: [ { className: 'meta', begin: '^!!!( (5|1\\.1|Strict|Frameset|Basic|Mobile|RDFa|XML\\b.*))?$', relevance: 10 }, // FIXME these comments should be allowed to span indented lines hljs.COMMENT( '^\\s*(!=#|=#|-#|/).*$', false, { relevance: 0 } ), { begin: '^\\s*(-|=|!=)(?!#)', starts: { end: '\\n', subLanguage: 'ruby' } }, { className: 'tag', begin: '^\\s*%', contains: [ { className: 'selector-tag', begin: '\\w+' }, { className: 'selector-id', begin: '#[\\w-]+' }, { className: 'selector-class', begin: '\\.[\\w-]+' }, { begin: '{\\s*', end: '\\s*}', contains: [ { begin: ':\\w+\\s*=>', end: ',\\s+', returnBegin: true, endsWithParent: true, contains: [ { className: 'attr', begin: ':\\w+' }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, { begin: '\\w+', relevance: 0 } ] } ] }, { begin: '\\(\\s*', end: '\\s*\\)', excludeEnd: true, contains: [ { begin: '\\w+\\s*=', end: '\\s+', returnBegin: true, endsWithParent: true, contains: [ { className: 'attr', begin: '\\w+', relevance: 0 }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, { begin: '\\w+', relevance: 0 } ] } ] } ] }, { begin: '^\\s*[=~]\\s*' }, { begin: '#{', starts: { end: '}', subLanguage: 'ruby' } } ] }; }; /***/ }, /* 242 */ /***/ function(module, exports) { module.exports = function(hljs) { var BUILT_INS = {'builtin-name': 'each in with if else unless bindattr action collection debugger log outlet template unbound view yield'}; return { aliases: ['hbs', 'html.hbs', 'html.handlebars'], case_insensitive: true, subLanguage: 'xml', contains: [ hljs.COMMENT('{{!(--)?', '(--)?}}'), { className: 'template-tag', begin: /\{\{[#\/]/, end: /\}\}/, contains: [ { className: 'name', begin: /[a-zA-Z\.-]+/, keywords: BUILT_INS, starts: { endsWithParent: true, relevance: 0, contains: [ hljs.QUOTE_STRING_MODE ] } } ] }, { className: 'template-variable', begin: /\{\{/, end: /\}\}/, keywords: BUILT_INS } ] }; }; /***/ }, /* 243 */ /***/ function(module, exports) { module.exports = function(hljs) { var COMMENT = { variants: [ hljs.COMMENT('--', '$'), hljs.COMMENT( '{-', '-}', { contains: ['self'] } ) ] }; var PRAGMA = { className: 'meta', begin: '{-#', end: '#-}' }; var PREPROCESSOR = { className: 'meta', begin: '^#', end: '$' }; var CONSTRUCTOR = { className: 'type', begin: '\\b[A-Z][\\w\']*', // TODO: other constructors (build-in, infix). relevance: 0 }; var LIST = { begin: '\\(', end: '\\)', illegal: '"', contains: [ PRAGMA, PREPROCESSOR, {className: 'type', begin: '\\b[A-Z][\\w]*(\\((\\.\\.|,|\\w+)\\))?'}, hljs.inherit(hljs.TITLE_MODE, {begin: '[_a-z][\\w\']*'}), COMMENT ] }; var RECORD = { begin: '{', end: '}', contains: LIST.contains }; return { aliases: ['hs'], keywords: 'let in if then else case of where do module import hiding ' + 'qualified type data newtype deriving class instance as default ' + 'infix infixl infixr foreign export ccall stdcall cplusplus ' + 'jvm dotnet safe unsafe family forall mdo proc rec', contains: [ // Top-level constructions. { beginKeywords: 'module', end: 'where', keywords: 'module where', contains: [LIST, COMMENT], illegal: '\\W\\.|;' }, { begin: '\\bimport\\b', end: '$', keywords: 'import qualified as hiding', contains: [LIST, COMMENT], illegal: '\\W\\.|;' }, { className: 'class', begin: '^(\\s*)?(class|instance)\\b', end: 'where', keywords: 'class family instance where', contains: [CONSTRUCTOR, LIST, COMMENT] }, { className: 'class', begin: '\\b(data|(new)?type)\\b', end: '$', keywords: 'data family type newtype deriving', contains: [PRAGMA, CONSTRUCTOR, LIST, RECORD, COMMENT] }, { beginKeywords: 'default', end: '$', contains: [CONSTRUCTOR, LIST, COMMENT] }, { beginKeywords: 'infix infixl infixr', end: '$', contains: [hljs.C_NUMBER_MODE, COMMENT] }, { begin: '\\bforeign\\b', end: '$', keywords: 'foreign import export ccall stdcall cplusplus jvm ' + 'dotnet safe unsafe', contains: [CONSTRUCTOR, hljs.QUOTE_STRING_MODE, COMMENT] }, { className: 'meta', begin: '#!\\/usr\\/bin\\/env\ runhaskell', end: '$' }, // "Whitespaces". PRAGMA, PREPROCESSOR, // Literals and names. // TODO: characters. hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, CONSTRUCTOR, hljs.inherit(hljs.TITLE_MODE, {begin: '^[_a-z][\\w\']*'}), COMMENT, {begin: '->|<-'} // No markup, relevance booster ] }; }; /***/ }, /* 244 */ /***/ function(module, exports) { module.exports = function(hljs) { var IDENT_RE = '[a-zA-Z_$][a-zA-Z0-9_$]*'; var IDENT_FUNC_RETURN_TYPE_RE = '([*]|[a-zA-Z_$][a-zA-Z0-9_$]*)'; var HAXE_BASIC_TYPES = 'Int Float String Bool Dynamic Void Array '; return { aliases: ['hx'], keywords: { keyword: 'break callback case cast catch continue default do dynamic else enum extern ' + 'for function here if import in inline never new override package private get set ' + 'public return static super switch this throw trace try typedef untyped using var while ' + HAXE_BASIC_TYPES, built_in: 'trace this', literal: 'true false null _' }, contains: [ { className: 'string', // interpolate-able strings begin: '\'', end: '\'', contains: [ hljs.BACKSLASH_ESCAPE, { className: 'subst', // interpolation begin: '\\$\\{', end: '\\}' }, { className: 'subst', // interpolation begin: '\\$', end: '\\W}' } ] }, hljs.QUOTE_STRING_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.C_NUMBER_MODE, { className: 'meta', // compiler meta begin: '@:', end: '$' }, { className: 'meta', // compiler conditionals begin: '#', end: '$', keywords: {'meta-keyword': 'if else elseif end error'} }, { className: 'type', // function types begin: ':[ \t]*', end: '[^A-Za-z0-9_ \t\\->]', excludeBegin: true, excludeEnd: true, relevance: 0 }, { className: 'type', // types begin: ':[ \t]*', end: '\\W', excludeBegin: true, excludeEnd: true }, { className: 'type', // instantiation begin: 'new *', end: '\\W', excludeBegin: true, excludeEnd: true }, { className: 'class', // enums beginKeywords: 'enum', end: '\\{', contains: [ hljs.TITLE_MODE ] }, { className: 'class', // abstracts beginKeywords: 'abstract', end: '[\\{$]', contains: [ { className: 'type', begin: '\\(', end: '\\)', excludeBegin: true, excludeEnd: true }, { className: 'type', begin: 'from +', end: '\\W', excludeBegin: true, excludeEnd: true }, { className: 'type', begin: 'to +', end: '\\W', excludeBegin: true, excludeEnd: true }, hljs.TITLE_MODE ], keywords: { keyword: 'abstract from to' } }, { className: 'class', // classes begin: '\\b(class|interface) +', end: '[\\{$]', excludeEnd: true, keywords: 'class interface', contains: [ { className: 'keyword', begin: '\\b(extends|implements) +', keywords: 'extends implements', contains: [ { className: 'type', begin: hljs.IDENT_RE, relevance: 0 } ] }, hljs.TITLE_MODE ] }, { className: 'function', beginKeywords: 'function', end: '\\(', excludeEnd: true, illegal: '\\S', contains: [ hljs.TITLE_MODE ] } ], illegal: /<\// }; }; /***/ }, /* 245 */ /***/ function(module, exports) { module.exports = function(hljs) { return { case_insensitive: true, lexemes: /[\w\._]+/, keywords: 'goto gosub return break repeat loop continue wait await dim sdim foreach dimtype dup dupptr end stop newmod delmod mref run exgoto on mcall assert logmes newlab resume yield onexit onerror onkey onclick oncmd exist delete mkdir chdir dirlist bload bsave bcopy memfile if else poke wpoke lpoke getstr chdpm memexpand memcpy memset notesel noteadd notedel noteload notesave randomize noteunsel noteget split strrep setease button chgdisp exec dialog mmload mmplay mmstop mci pset pget syscolor mes print title pos circle cls font sysfont objsize picload color palcolor palette redraw width gsel gcopy gzoom gmode bmpsave hsvcolor getkey listbox chkbox combox input mesbox buffer screen bgscr mouse objsel groll line clrobj boxf objprm objmode stick grect grotate gsquare gradf objimage objskip objenable celload celdiv celput newcom querycom delcom cnvstow comres axobj winobj sendmsg comevent comevarg sarrayconv callfunc cnvwtos comevdisp libptr system hspstat hspver stat cnt err strsize looplev sublev iparam wparam lparam refstr refdval int rnd strlen length length2 length3 length4 vartype gettime peek wpeek lpeek varptr varuse noteinfo instr abs limit getease str strmid strf getpath strtrim sin cos tan atan sqrt double absf expf logf limitf powf geteasef mousex mousey mousew hwnd hinstance hdc ginfo objinfo dirinfo sysinfo thismod __hspver__ __hsp30__ __date__ __time__ __line__ __file__ _debug __hspdef__ and or xor not screen_normal screen_palette screen_hide screen_fixedsize screen_tool screen_frame gmode_gdi gmode_mem gmode_rgb0 gmode_alpha gmode_rgb0alpha gmode_add gmode_sub gmode_pixela ginfo_mx ginfo_my ginfo_act ginfo_sel ginfo_wx1 ginfo_wy1 ginfo_wx2 ginfo_wy2 ginfo_vx ginfo_vy ginfo_sizex ginfo_sizey ginfo_winx ginfo_winy ginfo_mesx ginfo_mesy ginfo_r ginfo_g ginfo_b ginfo_paluse ginfo_dispx ginfo_dispy ginfo_cx ginfo_cy ginfo_intid ginfo_newid ginfo_sx ginfo_sy objinfo_mode objinfo_bmscr objinfo_hwnd notemax notesize dir_cur dir_exe dir_win dir_sys dir_cmdline dir_desktop dir_mydoc dir_tv font_normal font_bold font_italic font_underline font_strikeout font_antialias objmode_normal objmode_guifont objmode_usefont gsquare_grad msgothic msmincho do until while wend for next _break _continue switch case default swbreak swend ddim ldim alloc m_pi rad2deg deg2rad ease_linear ease_quad_in ease_quad_out ease_quad_inout ease_cubic_in ease_cubic_out ease_cubic_inout ease_quartic_in ease_quartic_out ease_quartic_inout ease_bounce_in ease_bounce_out ease_bounce_inout ease_shake_in ease_shake_out ease_shake_inout ease_loop', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, { // multi-line string className: 'string', begin: '{"', end: '"}', contains: [hljs.BACKSLASH_ESCAPE] }, hljs.COMMENT(';', '$', {relevance: 0}), { // pre-processor className: 'meta', begin: '#', end: '$', keywords: {'meta-keyword': 'addion cfunc cmd cmpopt comfunc const defcfunc deffunc define else endif enum epack func global if ifdef ifndef include modcfunc modfunc modinit modterm module pack packopt regcmd runtime undef usecom uselib'}, contains: [ hljs.inherit(hljs.QUOTE_STRING_MODE, {className: 'meta-string'}), hljs.NUMBER_MODE, hljs.C_NUMBER_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }, { // label className: 'symbol', begin: '^\\*(\\w+|@)' }, hljs.NUMBER_MODE, hljs.C_NUMBER_MODE ] }; }; /***/ }, /* 246 */ /***/ function(module, exports) { module.exports = function(hljs) { var BUILT_INS = 'action collection component concat debugger each each-in else get hash if input link-to loc log mut outlet partial query-params render textarea unbound unless with yield view'; var ATTR_ASSIGNMENT = { illegal: /\}\}/, begin: /[a-zA-Z0-9_]+=/, returnBegin: true, relevance: 0, contains: [ { className: 'attr', begin: /[a-zA-Z0-9_]+/ } ] }; var SUB_EXPR = { illegal: /\}\}/, begin: /\)/, end: /\)/, contains: [ { begin: /[a-zA-Z\.\-]+/, keywords: {built_in: BUILT_INS}, starts: { endsWithParent: true, relevance: 0, contains: [ hljs.QUOTE_STRING_MODE, ] } } ] }; var TAG_INNARDS = { endsWithParent: true, relevance: 0, keywords: {keyword: 'as', built_in: BUILT_INS}, contains: [ hljs.QUOTE_STRING_MODE, ATTR_ASSIGNMENT, hljs.NUMBER_MODE ] }; return { case_insensitive: true, subLanguage: 'xml', contains: [ hljs.COMMENT('{{!(--)?', '(--)?}}'), { className: 'template-tag', begin: /\{\{[#\/]/, end: /\}\}/, contains: [ { className: 'name', begin: /[a-zA-Z\.\-]+/, keywords: {'builtin-name': BUILT_INS}, starts: TAG_INNARDS } ] }, { className: 'template-variable', begin: /\{\{[a-zA-Z][a-zA-Z\-]+/, end: /\}\}/, keywords: {keyword: 'as', built_in: BUILT_INS}, contains: [ hljs.QUOTE_STRING_MODE ] } ] }; }; /***/ }, /* 247 */ /***/ function(module, exports) { module.exports = function(hljs) { var VERSION = 'HTTP/[0-9\\.]+'; return { aliases: ['https'], illegal: '\\S', contains: [ { begin: '^' + VERSION, end: '$', contains: [{className: 'number', begin: '\\b\\d{3}\\b'}] }, { begin: '^[A-Z]+ (.*?) ' + VERSION + '$', returnBegin: true, end: '$', contains: [ { className: 'string', begin: ' ', end: ' ', excludeBegin: true, excludeEnd: true }, { begin: VERSION }, { className: 'keyword', begin: '[A-Z]+' } ] }, { className: 'attribute', begin: '^\\w', end: ': ', excludeEnd: true, illegal: '\\n|\\s|=', starts: {end: '$', relevance: 0} }, { begin: '\\n\\n', starts: {subLanguage: [], endsWithParent: true} } ] }; }; /***/ }, /* 248 */ /***/ function(module, exports) { module.exports = function(hljs) { var START_BRACKET = '\\['; var END_BRACKET = '\\]'; return { aliases: ['i7'], case_insensitive: true, keywords: { // Some keywords more or less unique to I7, for relevance. keyword: // kind: 'thing room person man woman animal container ' + 'supporter backdrop door ' + // characteristic: 'scenery open closed locked inside gender ' + // verb: 'is are say understand ' + // misc keyword: 'kind of rule' }, contains: [ { className: 'string', begin: '"', end: '"', relevance: 0, contains: [ { className: 'subst', begin: START_BRACKET, end: END_BRACKET } ] }, { className: 'section', begin: /^(Volume|Book|Part|Chapter|Section|Table)\b/, end: '$' }, { // Rule definition // This is here for relevance. begin: /^(Check|Carry out|Report|Instead of|To|Rule|When|Before|After)\b/, end: ':', contains: [ { //Rule name begin: '\\(This', end: '\\)' } ] }, { className: 'comment', begin: START_BRACKET, end: END_BRACKET, contains: ['self'] } ] }; }; /***/ }, /* 249 */ /***/ function(module, exports) { module.exports = function(hljs) { var STRING = { className: "string", contains: [hljs.BACKSLASH_ESCAPE], variants: [ { begin: "'''", end: "'''", relevance: 10 }, { begin: '"""', end: '"""', relevance: 10 }, { begin: '"', end: '"' }, { begin: "'", end: "'" } ] }; return { aliases: ['toml'], case_insensitive: true, illegal: /\S/, contains: [ hljs.COMMENT(';', '$'), hljs.HASH_COMMENT_MODE, { className: 'section', begin: /^\s*\[+/, end: /\]+/ }, { begin: /^[a-z0-9\[\]_-]+\s*=\s*/, end: '$', returnBegin: true, contains: [ { className: 'attr', begin: /[a-z0-9\[\]_-]+/ }, { begin: /=/, endsWithParent: true, relevance: 0, contains: [ { className: 'literal', begin: /\bon|off|true|false|yes|no\b/ }, { className: 'variable', variants: [ {begin: /\$[\w\d"][\w\d_]*/}, {begin: /\$\{(.*?)}/} ] }, STRING, { className: 'number', begin: /([\+\-]+)?[\d]+_[\d_]+/ }, hljs.NUMBER_MODE ] } ] } ] }; }; /***/ }, /* 250 */ /***/ function(module, exports) { module.exports = function(hljs) { var PARAMS = { className: 'params', begin: '\\(', end: '\\)' }; var F_KEYWORDS = { literal: '.False. .True.', keyword: 'kind do while private call intrinsic where elsewhere ' + 'type endtype endmodule endselect endinterface end enddo endif if forall endforall only contains default return stop then ' + 'public subroutine|10 function program .and. .or. .not. .le. .eq. .ge. .gt. .lt. ' + 'goto save else use module select case ' + 'access blank direct exist file fmt form formatted iostat name named nextrec number opened rec recl sequential status unformatted unit ' + 'continue format pause cycle exit ' + 'c_null_char c_alert c_backspace c_form_feed flush wait decimal round iomsg ' + 'synchronous nopass non_overridable pass protected volatile abstract extends import ' + 'non_intrinsic value deferred generic final enumerator class associate bind enum ' + 'c_int c_short c_long c_long_long c_signed_char c_size_t c_int8_t c_int16_t c_int32_t c_int64_t c_int_least8_t c_int_least16_t ' + 'c_int_least32_t c_int_least64_t c_int_fast8_t c_int_fast16_t c_int_fast32_t c_int_fast64_t c_intmax_t C_intptr_t c_float c_double ' + 'c_long_double c_float_complex c_double_complex c_long_double_complex c_bool c_char c_null_ptr c_null_funptr ' + 'c_new_line c_carriage_return c_horizontal_tab c_vertical_tab iso_c_binding c_loc c_funloc c_associated c_f_pointer ' + 'c_ptr c_funptr iso_fortran_env character_storage_size error_unit file_storage_size input_unit iostat_end iostat_eor ' + 'numeric_storage_size output_unit c_f_procpointer ieee_arithmetic ieee_support_underflow_control ' + 'ieee_get_underflow_mode ieee_set_underflow_mode newunit contiguous recursive ' + 'pad position action delim readwrite eor advance nml interface procedure namelist include sequence elemental pure ' + 'integer real character complex logical dimension allocatable|10 parameter ' + 'external implicit|10 none double precision assign intent optional pointer ' + 'target in out common equivalence data ' + // IRPF90 special keywords 'begin_provider &begin_provider end_provider begin_shell end_shell begin_template end_template subst assert touch ' + 'soft_touch provide no_dep free irp_if irp_else irp_endif irp_write irp_read', built_in: 'alog alog10 amax0 amax1 amin0 amin1 amod cabs ccos cexp clog csin csqrt dabs dacos dasin datan datan2 dcos dcosh ddim dexp dint ' + 'dlog dlog10 dmax1 dmin1 dmod dnint dsign dsin dsinh dsqrt dtan dtanh float iabs idim idint idnint ifix isign max0 max1 min0 min1 sngl ' + 'algama cdabs cdcos cdexp cdlog cdsin cdsqrt cqabs cqcos cqexp cqlog cqsin cqsqrt dcmplx dconjg derf derfc dfloat dgamma dimag dlgama ' + 'iqint qabs qacos qasin qatan qatan2 qcmplx qconjg qcos qcosh qdim qerf qerfc qexp qgamma qimag qlgama qlog qlog10 qmax1 qmin1 qmod ' + 'qnint qsign qsin qsinh qsqrt qtan qtanh abs acos aimag aint anint asin atan atan2 char cmplx conjg cos cosh exp ichar index int log ' + 'log10 max min nint sign sin sinh sqrt tan tanh print write dim lge lgt lle llt mod nullify allocate deallocate ' + 'adjustl adjustr all allocated any associated bit_size btest ceiling count cshift date_and_time digits dot_product ' + 'eoshift epsilon exponent floor fraction huge iand ibclr ibits ibset ieor ior ishft ishftc lbound len_trim matmul ' + 'maxexponent maxloc maxval merge minexponent minloc minval modulo mvbits nearest pack present product ' + 'radix random_number random_seed range repeat reshape rrspacing scale scan selected_int_kind selected_real_kind ' + 'set_exponent shape size spacing spread sum system_clock tiny transpose trim ubound unpack verify achar iachar transfer ' + 'dble entry dprod cpu_time command_argument_count get_command get_command_argument get_environment_variable is_iostat_end ' + 'ieee_arithmetic ieee_support_underflow_control ieee_get_underflow_mode ieee_set_underflow_mode ' + 'is_iostat_eor move_alloc new_line selected_char_kind same_type_as extends_type_of' + 'acosh asinh atanh bessel_j0 bessel_j1 bessel_jn bessel_y0 bessel_y1 bessel_yn erf erfc erfc_scaled gamma log_gamma hypot norm2 ' + 'atomic_define atomic_ref execute_command_line leadz trailz storage_size merge_bits ' + 'bge bgt ble blt dshiftl dshiftr findloc iall iany iparity image_index lcobound ucobound maskl maskr ' + 'num_images parity popcnt poppar shifta shiftl shiftr this_image ' + // IRPF90 special built_ins 'IRP_ALIGN irp_here' }; return { case_insensitive: true, keywords: F_KEYWORDS, illegal: /\/\*/, contains: [ hljs.inherit(hljs.APOS_STRING_MODE, {className: 'string', relevance: 0}), hljs.inherit(hljs.QUOTE_STRING_MODE, {className: 'string', relevance: 0}), { className: 'function', beginKeywords: 'subroutine function program', illegal: '[${=\\n]', contains: [hljs.UNDERSCORE_TITLE_MODE, PARAMS] }, hljs.COMMENT('!', '$', {relevance: 0}), hljs.COMMENT('begin_doc', 'end_doc', {relevance: 10}), { className: 'number', begin: '(?=\\b|\\+|\\-|\\.)(?=\\.\\d|\\d)(?:\\d+)?(?:\\.?\\d*)(?:[de][+-]?\\d+)?\\b\\.?', relevance: 0 } ] }; }; /***/ }, /* 251 */ /***/ function(module, exports) { module.exports = function(hljs) { var JAVA_IDENT_RE = '[\u00C0-\u02B8a-zA-Z_$][\u00C0-\u02B8a-zA-Z_$0-9]*'; var GENERIC_IDENT_RE = JAVA_IDENT_RE + '(<' + JAVA_IDENT_RE + '(\\s*,\\s*' + JAVA_IDENT_RE + ')*>)?'; var KEYWORDS = 'false synchronized int abstract float private char boolean static null if const ' + 'for true while long strictfp finally protected import native final void ' + 'enum else break transient catch instanceof byte super volatile case assert short ' + 'package default double public try this switch continue throws protected public private ' + 'module requires exports do'; // https://docs.oracle.com/javase/7/docs/technotes/guides/language/underscores-literals.html var JAVA_NUMBER_RE = '\\b' + '(' + '0[bB]([01]+[01_]+[01]+|[01]+)' + // 0b... '|' + '0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)' + // 0x... '|' + '(' + '([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?' + '|' + '\\.([\\d]+[\\d_]+[\\d]+|[\\d]+)' + ')' + '([eE][-+]?\\d+)?' + // octal, decimal, float ')' + '[lLfF]?'; var JAVA_NUMBER_MODE = { className: 'number', begin: JAVA_NUMBER_RE, relevance: 0 }; return { aliases: ['jsp'], keywords: KEYWORDS, illegal: /<\/|#/, contains: [ hljs.COMMENT( '/\\*\\*', '\\*/', { relevance : 0, contains : [ { // eat up @'s in emails to prevent them to be recognized as doctags begin: /\w+@/, relevance: 0 }, { className : 'doctag', begin : '@[A-Za-z]+' } ] } ), hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, { className: 'class', beginKeywords: 'class interface', end: /[{;=]/, excludeEnd: true, keywords: 'class interface', illegal: /[:"\[\]]/, contains: [ {beginKeywords: 'extends implements'}, hljs.UNDERSCORE_TITLE_MODE ] }, { // Expression keywords prevent 'keyword Name(...)' from being // recognized as a function definition beginKeywords: 'new throw return else', relevance: 0 }, { className: 'function', begin: '(' + GENERIC_IDENT_RE + '\\s+)+' + hljs.UNDERSCORE_IDENT_RE + '\\s*\\(', returnBegin: true, end: /[{;=]/, excludeEnd: true, keywords: KEYWORDS, contains: [ { begin: hljs.UNDERSCORE_IDENT_RE + '\\s*\\(', returnBegin: true, relevance: 0, contains: [hljs.UNDERSCORE_TITLE_MODE] }, { className: 'params', begin: /\(/, end: /\)/, keywords: KEYWORDS, relevance: 0, contains: [ hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, hljs.C_BLOCK_COMMENT_MODE ] }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }, JAVA_NUMBER_MODE, { className: 'meta', begin: '@[A-Za-z]+' } ] }; }; /***/ }, /* 252 */ /***/ function(module, exports) { module.exports = function(hljs) { var IDENT_RE = '[A-Za-z$_][0-9A-Za-z$_]*'; var KEYWORDS = { keyword: 'in of if for while finally var new function do return void else break catch ' + 'instanceof with throw case default try this switch continue typeof delete ' + 'let yield const export super debugger as async await static ' + // ECMAScript 6 modules import 'import from as' , literal: 'true false null undefined NaN Infinity', built_in: 'eval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent ' + 'encodeURI encodeURIComponent escape unescape Object Function Boolean Error ' + 'EvalError InternalError RangeError ReferenceError StopIteration SyntaxError ' + 'TypeError URIError Number Math Date String RegExp Array Float32Array ' + 'Float64Array Int16Array Int32Array Int8Array Uint16Array Uint32Array ' + 'Uint8Array Uint8ClampedArray ArrayBuffer DataView JSON Intl arguments require ' + 'module console window document Symbol Set Map WeakSet WeakMap Proxy Reflect ' + 'Promise' }; var EXPRESSIONS; var NUMBER = { className: 'number', variants: [ { begin: '\\b(0[bB][01]+)' }, { begin: '\\b(0[oO][0-7]+)' }, { begin: hljs.C_NUMBER_RE } ], relevance: 0 }; var SUBST = { className: 'subst', begin: '\\$\\{', end: '\\}', keywords: KEYWORDS, contains: [] // defined later }; var TEMPLATE_STRING = { className: 'string', begin: '`', end: '`', contains: [ hljs.BACKSLASH_ESCAPE, SUBST ] }; SUBST.contains = [ hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, TEMPLATE_STRING, NUMBER, hljs.REGEXP_MODE ] var PARAMS_CONTAINS = SUBST.contains.concat([ hljs.C_BLOCK_COMMENT_MODE, hljs.C_LINE_COMMENT_MODE ]); return { aliases: ['js', 'jsx'], keywords: KEYWORDS, contains: [ { className: 'meta', relevance: 10, begin: /^\s*['"]use (strict|asm)['"]/ }, { className: 'meta', begin: /^#!/, end: /$/ }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, TEMPLATE_STRING, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, NUMBER, { // object attr container begin: /[{,]\s*/, relevance: 0, contains: [ { begin: IDENT_RE + '\\s*:', returnBegin: true, relevance: 0, contains: [{className: 'attr', begin: IDENT_RE, relevance: 0}] } ] }, { // "value" container begin: '(' + hljs.RE_STARTERS_RE + '|\\b(case|return|throw)\\b)\\s*', keywords: 'return throw case', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.REGEXP_MODE, { className: 'function', begin: '(\\(.*?\\)|' + IDENT_RE + ')\\s*=>', returnBegin: true, end: '\\s*=>', contains: [ { className: 'params', variants: [ { begin: IDENT_RE }, { begin: /\(\s*\)/, }, { begin: /\(/, end: /\)/, excludeBegin: true, excludeEnd: true, keywords: KEYWORDS, contains: PARAMS_CONTAINS } ] } ] }, { // E4X / JSX begin: /</, end: /(\/\w+|\w+\/)>/, subLanguage: 'xml', contains: [ {begin: /<\w+\s*\/>/, skip: true}, { begin: /<\w+/, end: /(\/\w+|\w+\/)>/, skip: true, contains: [ {begin: /<\w+\s*\/>/, skip: true}, 'self' ] } ] } ], relevance: 0 }, { className: 'function', beginKeywords: 'function', end: /\{/, excludeEnd: true, contains: [ hljs.inherit(hljs.TITLE_MODE, {begin: IDENT_RE}), { className: 'params', begin: /\(/, end: /\)/, excludeBegin: true, excludeEnd: true, contains: PARAMS_CONTAINS } ], illegal: /\[|%/ }, { begin: /\$[(.]/ // relevance booster for a pattern common to JS libs: `$(something)` and `$.something` }, hljs.METHOD_GUARD, { // ES6 class className: 'class', beginKeywords: 'class', end: /[{;=]/, excludeEnd: true, illegal: /[:"\[\]]/, contains: [ {beginKeywords: 'extends'}, hljs.UNDERSCORE_TITLE_MODE ] }, { beginKeywords: 'constructor', end: /\{/, excludeEnd: true } ], illegal: /#(?!!)/ }; }; /***/ }, /* 253 */ /***/ function(module, exports) { module.exports = function(hljs) { var LITERALS = {literal: 'true false null'}; var TYPES = [ hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE ]; var VALUE_CONTAINER = { end: ',', endsWithParent: true, excludeEnd: true, contains: TYPES, keywords: LITERALS }; var OBJECT = { begin: '{', end: '}', contains: [ { className: 'attr', begin: /"/, end: /"/, contains: [hljs.BACKSLASH_ESCAPE], illegal: '\\n', }, hljs.inherit(VALUE_CONTAINER, {begin: /:/}) ], illegal: '\\S' }; var ARRAY = { begin: '\\[', end: '\\]', contains: [hljs.inherit(VALUE_CONTAINER)], // inherit is a workaround for a bug that makes shared modes with endsWithParent compile only the ending of one of the parents illegal: '\\S' }; TYPES.splice(TYPES.length, 0, OBJECT, ARRAY); return { contains: TYPES, keywords: LITERALS, illegal: '\\S' }; }; /***/ }, /* 254 */ /***/ function(module, exports) { module.exports = function(hljs) { // Since there are numerous special names in Julia, it is too much trouble // to maintain them by hand. Hence these names (i.e. keywords, literals and // built-ins) are automatically generated from Julia (v0.3.0 and v0.4.1) // itself through following scripts for each. var KEYWORDS = { // # keyword generator // println("in") // for kw in Base.REPLCompletions.complete_keyword("") // println(kw) // end keyword: 'in abstract baremodule begin bitstype break catch ccall const continue do else elseif end export ' + 'finally for function global if immutable import importall let local macro module quote return try type ' + 'typealias using while', // # literal generator // println("true") // println("false") // for name in Base.REPLCompletions.completions("", 0)[1] // try // s = symbol(name) // v = eval(s) // if !isa(v, Function) && // !isa(v, DataType) && // !isa(v, IntrinsicFunction) && // !issubtype(typeof(v), Tuple) && // !isa(v, Union) && // !isa(v, Module) && // !isa(v, TypeConstructor) && // !isa(v, TypeVar) && // !isa(v, Colon) // println(name) // end // end // end literal: // v0.3 'true false ARGS CPU_CORES C_NULL DL_LOAD_PATH DevNull ENDIAN_BOM ENV I|0 Inf Inf16 Inf32 ' + 'InsertionSort JULIA_HOME LOAD_PATH MS_ASYNC MS_INVALIDATE MS_SYNC MergeSort NaN NaN16 NaN32 OS_NAME QuickSort ' + 'RTLD_DEEPBIND RTLD_FIRST RTLD_GLOBAL RTLD_LAZY RTLD_LOCAL RTLD_NODELETE RTLD_NOLOAD RTLD_NOW RoundDown ' + 'RoundFromZero RoundNearest RoundToZero RoundUp STDERR STDIN STDOUT VERSION WORD_SIZE catalan cglobal e|0 eu|0 ' + 'eulergamma golden im nothing pi γ π φ ' + // v0.4 (diff) 'Inf64 NaN64 RoundNearestTiesAway RoundNearestTiesUp ', // # built_in generator: // for name in Base.REPLCompletions.completions("", 0)[1] // try // v = eval(symbol(name)) // if isa(v, DataType) || isa(v, TypeConstructor) || isa(v, TypeVar) // println(name) // end // end // end built_in: // v0.3 'ANY ASCIIString AbstractArray AbstractRNG AbstractSparseArray Any ArgumentError Array Associative Base64Pipe ' + 'Bidiagonal BigFloat BigInt BitArray BitMatrix BitVector Bool BoundsError Box CFILE Cchar Cdouble Cfloat Char ' + 'CharString Cint Clong Clonglong ClusterManager Cmd Coff_t Colon Complex Complex128 Complex32 Complex64 ' + 'Condition Cptrdiff_t Cshort Csize_t Cssize_t Cuchar Cuint Culong Culonglong Cushort Cwchar_t DArray DataType ' + 'DenseArray Diagonal Dict DimensionMismatch DirectIndexString Display DivideError DomainError EOFError ' + 'EachLine Enumerate ErrorException Exception Expr Factorization FileMonitor FileOffset Filter Float16 Float32 ' + 'Float64 FloatRange FloatingPoint Function GetfieldNode GotoNode Hermitian IO IOBuffer IOStream IPv4 IPv6 ' + 'InexactError Int Int128 Int16 Int32 Int64 Int8 IntSet Integer InterruptException IntrinsicFunction KeyError ' + 'LabelNode LambdaStaticData LineNumberNode LoadError LocalProcess MIME MathConst MemoryError MersenneTwister ' + 'Method MethodError MethodTable Module NTuple NewvarNode Nothing Number ObjectIdDict OrdinalRange ' + 'OverflowError ParseError PollingFileWatcher ProcessExitedException ProcessGroup Ptr QuoteNode Range Range1 ' + 'Ranges Rational RawFD Real Regex RegexMatch RemoteRef RepString RevString RopeString RoundingMode Set ' + 'SharedArray Signed SparseMatrixCSC StackOverflowError Stat StatStruct StepRange String SubArray SubString ' + 'SymTridiagonal Symbol SymbolNode Symmetric SystemError Task TextDisplay Timer TmStruct TopNode Triangular ' + 'Tridiagonal Type TypeConstructor TypeError TypeName TypeVar UTF16String UTF32String UTF8String UdpSocket ' + 'Uint Uint128 Uint16 Uint32 Uint64 Uint8 UndefRefError UndefVarError UniformScaling UnionType UnitRange ' + 'Unsigned Vararg VersionNumber WString WeakKeyDict WeakRef Woodbury Zip ' + // v0.4 (diff) 'AbstractChannel AbstractFloat AbstractString AssertionError Base64DecodePipe Base64EncodePipe BufferStream ' + 'CapturedException CartesianIndex CartesianRange Channel Cintmax_t CompositeException Cstring Cuintmax_t ' + 'Cwstring Date DateTime Dims Enum GenSym GlobalRef HTML InitError InvalidStateException Irrational LinSpace ' + 'LowerTriangular NullException Nullable OutOfMemoryError Pair PartialQuickSort Pipe RandomDevice ' + 'ReadOnlyMemoryError ReentrantLock Ref RemoteException SegmentationFault SerializationState SimpleVector ' + 'TCPSocket Text Tuple UDPSocket UInt UInt128 UInt16 UInt32 UInt64 UInt8 UnicodeError Union UpperTriangular ' + 'Val Void WorkerConfig AbstractMatrix AbstractSparseMatrix AbstractSparseVector AbstractVecOrMat AbstractVector ' + 'DenseMatrix DenseVecOrMat DenseVector Matrix SharedMatrix SharedVector StridedArray StridedMatrix ' + 'StridedVecOrMat StridedVector VecOrMat Vector ' }; // ref: http://julia.readthedocs.org/en/latest/manual/variables/#allowed-variable-names var VARIABLE_NAME_RE = '[A-Za-z_\\u00A1-\\uFFFF][A-Za-z_0-9\\u00A1-\\uFFFF]*'; // placeholder for recursive self-reference var DEFAULT = { lexemes: VARIABLE_NAME_RE, keywords: KEYWORDS, illegal: /<\// }; var TYPE_ANNOTATION = { className: 'type', begin: /::/ }; var SUBTYPE = { className: 'type', begin: /<:/ }; // ref: http://julia.readthedocs.org/en/latest/manual/integers-and-floating-point-numbers/ var NUMBER = { className: 'number', // supported numeric literals: // * binary literal (e.g. 0x10) // * octal literal (e.g. 0o76543210) // * hexadecimal literal (e.g. 0xfedcba876543210) // * hexadecimal floating point literal (e.g. 0x1p0, 0x1.2p2) // * decimal literal (e.g. 9876543210, 100_000_000) // * floating pointe literal (e.g. 1.2, 1.2f, .2, 1., 1.2e10, 1.2e-10) begin: /(\b0x[\d_]*(\.[\d_]*)?|0x\.\d[\d_]*)p[-+]?\d+|\b0[box][a-fA-F0-9][a-fA-F0-9_]*|(\b\d[\d_]*(\.[\d_]*)?|\.\d[\d_]*)([eEfF][-+]?\d+)?/, relevance: 0 }; var CHAR = { className: 'string', begin: /'(.|\\[xXuU][a-zA-Z0-9]+)'/ }; var INTERPOLATION = { className: 'subst', begin: /\$\(/, end: /\)/, keywords: KEYWORDS }; var INTERPOLATED_VARIABLE = { className: 'variable', begin: '\\$' + VARIABLE_NAME_RE }; // TODO: neatly escape normal code in string literal var STRING = { className: 'string', contains: [hljs.BACKSLASH_ESCAPE, INTERPOLATION, INTERPOLATED_VARIABLE], variants: [ { begin: /\w*"""/, end: /"""\w*/, relevance: 10 }, { begin: /\w*"/, end: /"\w*/ } ] }; var COMMAND = { className: 'string', contains: [hljs.BACKSLASH_ESCAPE, INTERPOLATION, INTERPOLATED_VARIABLE], begin: '`', end: '`' }; var MACROCALL = { className: 'meta', begin: '@' + VARIABLE_NAME_RE }; var COMMENT = { className: 'comment', variants: [ { begin: '#=', end: '=#', relevance: 10 }, { begin: '#', end: '$' } ] }; DEFAULT.contains = [ NUMBER, CHAR, TYPE_ANNOTATION, SUBTYPE, STRING, COMMAND, MACROCALL, COMMENT, hljs.HASH_COMMENT_MODE ]; INTERPOLATION.contains = DEFAULT.contains; return DEFAULT; }; /***/ }, /* 255 */ /***/ function(module, exports) { module.exports = function (hljs) { var KEYWORDS = { keyword: 'abstract as val var vararg get set class object open private protected public noinline ' + 'crossinline dynamic final enum if else do while for when throw try catch finally ' + 'import package is in fun override companion reified inline ' + 'interface annotation data sealed internal infix operator out by constructor super ' + // to be deleted soon 'trait volatile transient native default', built_in: 'Byte Short Char Int Long Boolean Float Double Void Unit Nothing', literal: 'true false null' }; var KEYWORDS_WITH_LABEL = { className: 'keyword', begin: /\b(break|continue|return|this)\b/, starts: { contains: [ { className: 'symbol', begin: /@\w+/ } ] } }; var LABEL = { className: 'symbol', begin: hljs.UNDERSCORE_IDENT_RE + '@' }; // for string templates var SUBST = { className: 'subst', variants: [ {begin: '\\$' + hljs.UNDERSCORE_IDENT_RE}, {begin: '\\${', end: '}', contains: [hljs.APOS_STRING_MODE, hljs.C_NUMBER_MODE]} ] }; var STRING = { className: 'string', variants: [ { begin: '"""', end: '"""', contains: [SUBST] }, // Can't use built-in modes easily, as we want to use STRING in the meta // context as 'meta-string' and there's no syntax to remove explicitly set // classNames in built-in modes. { begin: '\'', end: '\'', illegal: /\n/, contains: [hljs.BACKSLASH_ESCAPE] }, { begin: '"', end: '"', illegal: /\n/, contains: [hljs.BACKSLASH_ESCAPE, SUBST] } ] }; var ANNOTATION_USE_SITE = { className: 'meta', begin: '@(?:file|property|field|get|set|receiver|param|setparam|delegate)\\s*:(?:\\s*' + hljs.UNDERSCORE_IDENT_RE + ')?' }; var ANNOTATION = { className: 'meta', begin: '@' + hljs.UNDERSCORE_IDENT_RE, contains: [ { begin: /\(/, end: /\)/, contains: [ hljs.inherit(STRING, {className: 'meta-string'}) ] } ] }; return { keywords: KEYWORDS, contains : [ hljs.COMMENT( '/\\*\\*', '\\*/', { relevance : 0, contains : [{ className : 'doctag', begin : '@[A-Za-z]+' }] } ), hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, KEYWORDS_WITH_LABEL, LABEL, ANNOTATION_USE_SITE, ANNOTATION, { className: 'function', beginKeywords: 'fun', end: '[(]|$', returnBegin: true, excludeEnd: true, keywords: KEYWORDS, illegal: /fun\s+(<.*>)?[^\s\(]+(\s+[^\s\(]+)\s*=/, relevance: 5, contains: [ { begin: hljs.UNDERSCORE_IDENT_RE + '\\s*\\(', returnBegin: true, relevance: 0, contains: [hljs.UNDERSCORE_TITLE_MODE] }, { className: 'type', begin: /</, end: />/, keywords: 'reified', relevance: 0 }, { className: 'params', begin: /\(/, end: /\)/, endsParent: true, keywords: KEYWORDS, relevance: 0, contains: [ { begin: /:/, end: /[=,\/]/, endsWithParent: true, contains: [ {className: 'type', begin: hljs.UNDERSCORE_IDENT_RE}, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ], relevance: 0 }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, ANNOTATION_USE_SITE, ANNOTATION, STRING, hljs.C_NUMBER_MODE ] }, hljs.C_BLOCK_COMMENT_MODE ] }, { className: 'class', beginKeywords: 'class interface trait', end: /[:\{(]|$/, // remove 'trait' when removed from KEYWORDS excludeEnd: true, illegal: 'extends implements', contains: [ {beginKeywords: 'public protected internal private constructor'}, hljs.UNDERSCORE_TITLE_MODE, { className: 'type', begin: /</, end: />/, excludeBegin: true, excludeEnd: true, relevance: 0 }, { className: 'type', begin: /[,:]\s*/, end: /[<\(,]|$/, excludeBegin: true, returnEnd: true }, ANNOTATION_USE_SITE, ANNOTATION ] }, STRING, { className: 'meta', begin: "^#!/usr/bin/env", end: '$', illegal: '\n' }, hljs.C_NUMBER_MODE ] }; }; /***/ }, /* 256 */ /***/ function(module, exports) { module.exports = function(hljs) { var LASSO_IDENT_RE = '[a-zA-Z_][\\w.]*'; var LASSO_ANGLE_RE = '<\\?(lasso(script)?|=)'; var LASSO_CLOSE_RE = '\\]|\\?>'; var LASSO_KEYWORDS = { literal: 'true false none minimal full all void and or not ' + 'bw nbw ew new cn ncn lt lte gt gte eq neq rx nrx ft', built_in: 'array date decimal duration integer map pair string tag xml null ' + 'boolean bytes keyword list locale queue set stack staticarray ' + 'local var variable global data self inherited currentcapture givenblock', keyword: 'cache database_names database_schemanames database_tablenames ' + 'define_tag define_type email_batch encode_set html_comment handle ' + 'handle_error header if inline iterate ljax_target link ' + 'link_currentaction link_currentgroup link_currentrecord link_detail ' + 'link_firstgroup link_firstrecord link_lastgroup link_lastrecord ' + 'link_nextgroup link_nextrecord link_prevgroup link_prevrecord log ' + 'loop namespace_using output_none portal private protect records ' + 'referer referrer repeating resultset rows search_args ' + 'search_arguments select sort_args sort_arguments thread_atomic ' + 'value_list while abort case else fail_if fail_ifnot fail if_empty ' + 'if_false if_null if_true loop_abort loop_continue loop_count params ' + 'params_up return return_value run_children soap_definetag ' + 'soap_lastrequest soap_lastresponse tag_name ascending average by ' + 'define descending do equals frozen group handle_failure import in ' + 'into join let match max min on order parent protected provide public ' + 'require returnhome skip split_thread sum take thread to trait type ' + 'where with yield yieldhome' }; var HTML_COMMENT = hljs.COMMENT( '<!--', '-->', { relevance: 0 } ); var LASSO_NOPROCESS = { className: 'meta', begin: '\\[noprocess\\]', starts: { end: '\\[/noprocess\\]', returnEnd: true, contains: [HTML_COMMENT] } }; var LASSO_START = { className: 'meta', begin: '\\[/noprocess|' + LASSO_ANGLE_RE }; var LASSO_DATAMEMBER = { className: 'symbol', begin: '\'' + LASSO_IDENT_RE + '\'' }; var LASSO_CODE = [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.inherit(hljs.C_NUMBER_MODE, {begin: hljs.C_NUMBER_RE + '|(-?infinity|NaN)\\b'}), hljs.inherit(hljs.APOS_STRING_MODE, {illegal: null}), hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}), { className: 'string', begin: '`', end: '`' }, { // variables variants: [ { begin: '[#$]' + LASSO_IDENT_RE }, { begin: '#', end: '\\d+', illegal: '\\W' } ] }, { className: 'type', begin: '::\\s*', end: LASSO_IDENT_RE, illegal: '\\W' }, { className: 'params', variants: [ { begin: '-(?!infinity)' + LASSO_IDENT_RE, relevance: 0 }, { begin: '(\\.\\.\\.)' } ] }, { begin: /(->|\.)\s*/, relevance: 0, contains: [LASSO_DATAMEMBER] }, { className: 'class', beginKeywords: 'define', returnEnd: true, end: '\\(|=>', contains: [ hljs.inherit(hljs.TITLE_MODE, {begin: LASSO_IDENT_RE + '(=(?!>))?|[-+*/%](?!>)'}) ] } ]; return { aliases: ['ls', 'lassoscript'], case_insensitive: true, lexemes: LASSO_IDENT_RE + '|&[lg]t;', keywords: LASSO_KEYWORDS, contains: [ { className: 'meta', begin: LASSO_CLOSE_RE, relevance: 0, starts: { // markup end: '\\[|' + LASSO_ANGLE_RE, returnEnd: true, relevance: 0, contains: [HTML_COMMENT] } }, LASSO_NOPROCESS, LASSO_START, { className: 'meta', begin: '\\[no_square_brackets', starts: { end: '\\[/no_square_brackets\\]', // not implemented in the language lexemes: LASSO_IDENT_RE + '|&[lg]t;', keywords: LASSO_KEYWORDS, contains: [ { className: 'meta', begin: LASSO_CLOSE_RE, relevance: 0, starts: { end: '\\[noprocess\\]|' + LASSO_ANGLE_RE, returnEnd: true, contains: [HTML_COMMENT] } }, LASSO_NOPROCESS, LASSO_START ].concat(LASSO_CODE) } }, { className: 'meta', begin: '\\[', relevance: 0 }, { className: 'meta', begin: '^#!', end:'lasso9$', relevance: 10 } ].concat(LASSO_CODE) }; }; /***/ }, /* 257 */ /***/ function(module, exports) { module.exports = function(hljs) { return { contains: [ { className: 'attribute', begin: '^dn', end: ': ', excludeEnd: true, starts: {end: '$', relevance: 0}, relevance: 10 }, { className: 'attribute', begin: '^\\w', end: ': ', excludeEnd: true, starts: {end: '$', relevance: 0} }, { className: 'literal', begin: '^-', end: '$' }, hljs.HASH_COMMENT_MODE ] }; }; /***/ }, /* 258 */ /***/ function(module, exports) { module.exports = function(hljs) { var IDENT_RE = '[\\w-]+'; // yes, Less identifiers may begin with a digit var INTERP_IDENT_RE = '(' + IDENT_RE + '|@{' + IDENT_RE + '})'; /* Generic Modes */ var RULES = [], VALUE = []; // forward def. for recursive modes var STRING_MODE = function(c) { return { // Less strings are not multiline (also include '~' for more consistent coloring of "escaped" strings) className: 'string', begin: '~?' + c + '.*?' + c };}; var IDENT_MODE = function(name, begin, relevance) { return { className: name, begin: begin, relevance: relevance };}; var PARENS_MODE = { // used only to properly balance nested parens inside mixin call, def. arg list begin: '\\(', end: '\\)', contains: VALUE, relevance: 0 }; // generic Less highlighter (used almost everywhere except selectors): VALUE.push( hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, STRING_MODE("'"), STRING_MODE('"'), hljs.CSS_NUMBER_MODE, // fixme: it does not include dot for numbers like .5em :( { begin: '(url|data-uri)\\(', starts: {className: 'string', end: '[\\)\\n]', excludeEnd: true} }, IDENT_MODE('number', '#[0-9A-Fa-f]+\\b'), PARENS_MODE, IDENT_MODE('variable', '@@?' + IDENT_RE, 10), IDENT_MODE('variable', '@{' + IDENT_RE + '}'), IDENT_MODE('built_in', '~?`[^`]*?`'), // inline javascript (or whatever host language) *multiline* string { // @media features (it’s here to not duplicate things in AT_RULE_MODE with extra PARENS_MODE overriding): className: 'attribute', begin: IDENT_RE + '\\s*:', end: ':', returnBegin: true, excludeEnd: true }, { className: 'meta', begin: '!important' } ); var VALUE_WITH_RULESETS = VALUE.concat({ begin: '{', end: '}', contains: RULES }); var MIXIN_GUARD_MODE = { beginKeywords: 'when', endsWithParent: true, contains: [{beginKeywords: 'and not'}].concat(VALUE) // using this form to override VALUE’s 'function' match }; /* Rule-Level Modes */ var RULE_MODE = { begin: INTERP_IDENT_RE + '\\s*:', returnBegin: true, end: '[;}]', relevance: 0, contains: [ { className: 'attribute', begin: INTERP_IDENT_RE, end: ':', excludeEnd: true, starts: { endsWithParent: true, illegal: '[<=$]', relevance: 0, contains: VALUE } } ] }; var AT_RULE_MODE = { className: 'keyword', begin: '@(import|media|charset|font-face|(-[a-z]+-)?keyframes|supports|document|namespace|page|viewport|host)\\b', starts: {end: '[;{}]', returnEnd: true, contains: VALUE, relevance: 0} }; // variable definitions and calls var VAR_RULE_MODE = { className: 'variable', variants: [ // using more strict pattern for higher relevance to increase chances of Less detection. // this is *the only* Less specific statement used in most of the sources, so... // (we’ll still often loose to the css-parser unless there's '//' comment, // simply because 1 variable just can't beat 99 properties :) {begin: '@' + IDENT_RE + '\\s*:', relevance: 15}, {begin: '@' + IDENT_RE} ], starts: {end: '[;}]', returnEnd: true, contains: VALUE_WITH_RULESETS} }; var SELECTOR_MODE = { // first parse unambiguous selectors (i.e. those not starting with tag) // then fall into the scary lookahead-discriminator variant. // this mode also handles mixin definitions and calls variants: [{ begin: '[\\.#:&\\[>]', end: '[;{}]' // mixin calls end with ';' }, { begin: INTERP_IDENT_RE, end: '{' }], returnBegin: true, returnEnd: true, illegal: '[<=\'$"]', relevance: 0, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, MIXIN_GUARD_MODE, IDENT_MODE('keyword', 'all\\b'), IDENT_MODE('variable', '@{' + IDENT_RE + '}'), // otherwise it’s identified as tag IDENT_MODE('selector-tag', INTERP_IDENT_RE + '%?', 0), // '%' for more consistent coloring of @keyframes "tags" IDENT_MODE('selector-id', '#' + INTERP_IDENT_RE), IDENT_MODE('selector-class', '\\.' + INTERP_IDENT_RE, 0), IDENT_MODE('selector-tag', '&', 0), {className: 'selector-attr', begin: '\\[', end: '\\]'}, {className: 'selector-pseudo', begin: /:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/}, {begin: '\\(', end: '\\)', contains: VALUE_WITH_RULESETS}, // argument list of parametric mixins {begin: '!important'} // eat !important after mixin call or it will be colored as tag ] }; RULES.push( hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, AT_RULE_MODE, VAR_RULE_MODE, RULE_MODE, SELECTOR_MODE ); return { case_insensitive: true, illegal: '[=>\'/<($"]', contains: RULES }; }; /***/ }, /* 259 */ /***/ function(module, exports) { module.exports = function(hljs) { var LISP_IDENT_RE = '[a-zA-Z_\\-\\+\\*\\/\\<\\=\\>\\&\\#][a-zA-Z0-9_\\-\\+\\*\\/\\<\\=\\>\\&\\#!]*'; var MEC_RE = '\\|[^]*?\\|'; var LISP_SIMPLE_NUMBER_RE = '(\\-|\\+)?\\d+(\\.\\d+|\\/\\d+)?((d|e|f|l|s|D|E|F|L|S)(\\+|\\-)?\\d+)?'; var SHEBANG = { className: 'meta', begin: '^#!', end: '$' }; var LITERAL = { className: 'literal', begin: '\\b(t{1}|nil)\\b' }; var NUMBER = { className: 'number', variants: [ {begin: LISP_SIMPLE_NUMBER_RE, relevance: 0}, {begin: '#(b|B)[0-1]+(/[0-1]+)?'}, {begin: '#(o|O)[0-7]+(/[0-7]+)?'}, {begin: '#(x|X)[0-9a-fA-F]+(/[0-9a-fA-F]+)?'}, {begin: '#(c|C)\\(' + LISP_SIMPLE_NUMBER_RE + ' +' + LISP_SIMPLE_NUMBER_RE, end: '\\)'} ] }; var STRING = hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}); var COMMENT = hljs.COMMENT( ';', '$', { relevance: 0 } ); var VARIABLE = { begin: '\\*', end: '\\*' }; var KEYWORD = { className: 'symbol', begin: '[:&]' + LISP_IDENT_RE }; var IDENT = { begin: LISP_IDENT_RE, relevance: 0 }; var MEC = { begin: MEC_RE }; var QUOTED_LIST = { begin: '\\(', end: '\\)', contains: ['self', LITERAL, STRING, NUMBER, IDENT] }; var QUOTED = { contains: [NUMBER, STRING, VARIABLE, KEYWORD, QUOTED_LIST, IDENT], variants: [ { begin: '[\'`]\\(', end: '\\)' }, { begin: '\\(quote ', end: '\\)', keywords: {name: 'quote'} }, { begin: '\'' + MEC_RE } ] }; var QUOTED_ATOM = { variants: [ {begin: '\'' + LISP_IDENT_RE}, {begin: '#\'' + LISP_IDENT_RE + '(::' + LISP_IDENT_RE + ')*'} ] }; var LIST = { begin: '\\(\\s*', end: '\\)' }; var BODY = { endsWithParent: true, relevance: 0 }; LIST.contains = [ { className: 'name', variants: [ {begin: LISP_IDENT_RE}, {begin: MEC_RE} ] }, BODY ]; BODY.contains = [QUOTED, QUOTED_ATOM, LIST, LITERAL, NUMBER, STRING, COMMENT, VARIABLE, KEYWORD, MEC, IDENT]; return { illegal: /\S/, contains: [ NUMBER, SHEBANG, LITERAL, STRING, COMMENT, QUOTED, QUOTED_ATOM, LIST, IDENT ] }; }; /***/ }, /* 260 */ /***/ function(module, exports) { module.exports = function(hljs) { var VARIABLE = { begin: '\\b[gtps][A-Z]+[A-Za-z0-9_\\-]*\\b|\\$_[A-Z]+', relevance: 0 }; var COMMENT_MODES = [ hljs.C_BLOCK_COMMENT_MODE, hljs.HASH_COMMENT_MODE, hljs.COMMENT('--', '$'), hljs.COMMENT('[^:]//', '$') ]; var TITLE1 = hljs.inherit(hljs.TITLE_MODE, { variants: [ {begin: '\\b_*rig[A-Z]+[A-Za-z0-9_\\-]*'}, {begin: '\\b_[a-z0-9\\-]+'} ] }); var TITLE2 = hljs.inherit(hljs.TITLE_MODE, {begin: '\\b([A-Za-z0-9_\\-]+)\\b'}); return { case_insensitive: false, keywords: { keyword: '$_COOKIE $_FILES $_GET $_GET_BINARY $_GET_RAW $_POST $_POST_BINARY $_POST_RAW $_SESSION $_SERVER ' + 'codepoint codepoints segment segments codeunit codeunits sentence sentences trueWord trueWords paragraph ' + 'after byte bytes english the until http forever descending using line real8 with seventh ' + 'for stdout finally element word words fourth before black ninth sixth characters chars stderr ' + 'uInt1 uInt1s uInt2 uInt2s stdin string lines relative rel any fifth items from middle mid ' + 'at else of catch then third it file milliseconds seconds second secs sec int1 int1s int4 ' + 'int4s internet int2 int2s normal text item last long detailed effective uInt4 uInt4s repeat ' + 'end repeat URL in try into switch to words https token binfile each tenth as ticks tick ' + 'system real4 by dateItems without char character ascending eighth whole dateTime numeric short ' + 'first ftp integer abbreviated abbr abbrev private case while if ' + 'div mod wrap and or bitAnd bitNot bitOr bitXor among not in a an within ' + 'contains ends with begins the keys of keys', literal: 'SIX TEN FORMFEED NINE ZERO NONE SPACE FOUR FALSE COLON CRLF PI COMMA ENDOFFILE EOF EIGHT FIVE ' + 'QUOTE EMPTY ONE TRUE RETURN CR LINEFEED RIGHT BACKSLASH NULL SEVEN TAB THREE TWO ' + 'six ten formfeed nine zero none space four false colon crlf pi comma endoffile eof eight five ' + 'quote empty one true return cr linefeed right backslash null seven tab three two ' + 'RIVERSION RISTATE FILE_READ_MODE FILE_WRITE_MODE FILE_WRITE_MODE DIR_WRITE_MODE FILE_READ_UMASK ' + 'FILE_WRITE_UMASK DIR_READ_UMASK DIR_WRITE_UMASK', built_in: 'put abs acos aliasReference annuity arrayDecode arrayEncode asin atan atan2 average avg avgDev base64Decode ' + 'base64Encode baseConvert binaryDecode binaryEncode byteOffset byteToNum cachedURL cachedURLs charToNum ' + 'cipherNames codepointOffset codepointProperty codepointToNum codeunitOffset commandNames compound compress ' + 'constantNames cos date dateFormat decompress directories ' + 'diskSpace DNSServers exp exp1 exp2 exp10 extents files flushEvents folders format functionNames geometricMean global ' + 'globals hasMemory harmonicMean hostAddress hostAddressToName hostName hostNameToAddress isNumber ISOToMac itemOffset ' + 'keys len length libURLErrorData libUrlFormData libURLftpCommand libURLLastHTTPHeaders libURLLastRHHeaders ' + 'libUrlMultipartFormAddPart libUrlMultipartFormData libURLVersion lineOffset ln ln1 localNames log log2 log10 ' + 'longFilePath lower macToISO matchChunk matchText matrixMultiply max md5Digest median merge millisec ' + 'millisecs millisecond milliseconds min monthNames nativeCharToNum normalizeText num number numToByte numToChar ' + 'numToCodepoint numToNativeChar offset open openfiles openProcesses openProcessIDs openSockets ' + 'paragraphOffset paramCount param params peerAddress pendingMessages platform popStdDev populationStandardDeviation ' + 'populationVariance popVariance processID random randomBytes replaceText result revCreateXMLTree revCreateXMLTreeFromFile ' + 'revCurrentRecord revCurrentRecordIsFirst revCurrentRecordIsLast revDatabaseColumnCount revDatabaseColumnIsNull ' + 'revDatabaseColumnLengths revDatabaseColumnNames revDatabaseColumnNamed revDatabaseColumnNumbered ' + 'revDatabaseColumnTypes revDatabaseConnectResult revDatabaseCursors revDatabaseID revDatabaseTableNames ' + 'revDatabaseType revDataFromQuery revdb_closeCursor revdb_columnbynumber revdb_columncount revdb_columnisnull ' + 'revdb_columnlengths revdb_columnnames revdb_columntypes revdb_commit revdb_connect revdb_connections ' + 'revdb_connectionerr revdb_currentrecord revdb_cursorconnection revdb_cursorerr revdb_cursors revdb_dbtype ' + 'revdb_disconnect revdb_execute revdb_iseof revdb_isbof revdb_movefirst revdb_movelast revdb_movenext ' + 'revdb_moveprev revdb_query revdb_querylist revdb_recordcount revdb_rollback revdb_tablenames ' + 'revGetDatabaseDriverPath revNumberOfRecords revOpenDatabase revOpenDatabases revQueryDatabase ' + 'revQueryDatabaseBlob revQueryResult revQueryIsAtStart revQueryIsAtEnd revUnixFromMacPath revXMLAttribute ' + 'revXMLAttributes revXMLAttributeValues revXMLChildContents revXMLChildNames revXMLCreateTreeFromFileWithNamespaces ' + 'revXMLCreateTreeWithNamespaces revXMLDataFromXPathQuery revXMLEvaluateXPath revXMLFirstChild revXMLMatchingNode ' + 'revXMLNextSibling revXMLNodeContents revXMLNumberOfChildren revXMLParent revXMLPreviousSibling ' + 'revXMLRootNode revXMLRPC_CreateRequest revXMLRPC_Documents revXMLRPC_Error ' + 'revXMLRPC_GetHost revXMLRPC_GetMethod revXMLRPC_GetParam revXMLText revXMLRPC_Execute ' + 'revXMLRPC_GetParamCount revXMLRPC_GetParamNode revXMLRPC_GetParamType revXMLRPC_GetPath revXMLRPC_GetPort ' + 'revXMLRPC_GetProtocol revXMLRPC_GetRequest revXMLRPC_GetResponse revXMLRPC_GetSocket revXMLTree ' + 'revXMLTrees revXMLValidateDTD revZipDescribeItem revZipEnumerateItems revZipOpenArchives round sampVariance ' + 'sec secs seconds sentenceOffset sha1Digest shell shortFilePath sin specialFolderPath sqrt standardDeviation statRound ' + 'stdDev sum sysError systemVersion tan tempName textDecode textEncode tick ticks time to tokenOffset toLower toUpper ' + 'transpose truewordOffset trunc uniDecode uniEncode upper URLDecode URLEncode URLStatus uuid value variableNames ' + 'variance version waitDepth weekdayNames wordOffset xsltApplyStylesheet xsltApplyStylesheetFromFile xsltLoadStylesheet ' + 'xsltLoadStylesheetFromFile add breakpoint cancel clear local variable file word line folder directory URL close socket process ' + 'combine constant convert create new alias folder directory decrypt delete variable word line folder ' + 'directory URL dispatch divide do encrypt filter get include intersect kill libURLDownloadToFile ' + 'libURLFollowHttpRedirects libURLftpUpload libURLftpUploadFile libURLresetAll libUrlSetAuthCallback ' + 'libURLSetCustomHTTPHeaders libUrlSetExpect100 libURLSetFTPListCommand libURLSetFTPMode libURLSetFTPStopTime ' + 'libURLSetStatusCallback load multiply socket prepare process post seek rel relative read from process rename ' + 'replace require resetAll resolve revAddXMLNode revAppendXML revCloseCursor revCloseDatabase revCommitDatabase ' + 'revCopyFile revCopyFolder revCopyXMLNode revDeleteFolder revDeleteXMLNode revDeleteAllXMLTrees ' + 'revDeleteXMLTree revExecuteSQL revGoURL revInsertXMLNode revMoveFolder revMoveToFirstRecord revMoveToLastRecord ' + 'revMoveToNextRecord revMoveToPreviousRecord revMoveToRecord revMoveXMLNode revPutIntoXMLNode revRollBackDatabase ' + 'revSetDatabaseDriverPath revSetXMLAttribute revXMLRPC_AddParam revXMLRPC_DeleteAllDocuments revXMLAddDTD ' + 'revXMLRPC_Free revXMLRPC_FreeAll revXMLRPC_DeleteDocument revXMLRPC_DeleteParam revXMLRPC_SetHost ' + 'revXMLRPC_SetMethod revXMLRPC_SetPort revXMLRPC_SetProtocol revXMLRPC_SetSocket revZipAddItemWithData ' + 'revZipAddItemWithFile revZipAddUncompressedItemWithData revZipAddUncompressedItemWithFile revZipCancel ' + 'revZipCloseArchive revZipDeleteItem revZipExtractItemToFile revZipExtractItemToVariable revZipSetProgressCallback ' + 'revZipRenameItem revZipReplaceItemWithData revZipReplaceItemWithFile revZipOpenArchive send set sort split start stop ' + 'subtract union unload wait write' }, contains: [ VARIABLE, { className: 'keyword', begin: '\\bend\\sif\\b' }, { className: 'function', beginKeywords: 'function', end: '$', contains: [ VARIABLE, TITLE2, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.BINARY_NUMBER_MODE, hljs.C_NUMBER_MODE, TITLE1 ] }, { className: 'function', begin: '\\bend\\s+', end: '$', keywords: 'end', contains: [ TITLE2, TITLE1 ], relevance: 0 }, { beginKeywords: 'command on', end: '$', contains: [ VARIABLE, TITLE2, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.BINARY_NUMBER_MODE, hljs.C_NUMBER_MODE, TITLE1 ] }, { className: 'meta', variants: [ { begin: '<\\?(rev|lc|livecode)', relevance: 10 }, { begin: '<\\?' }, { begin: '\\?>' } ] }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.BINARY_NUMBER_MODE, hljs.C_NUMBER_MODE, TITLE1 ].concat(COMMENT_MODES), illegal: ';$|^\\[|^=|&|{' }; }; /***/ }, /* 261 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = { keyword: // JS keywords 'in if for while finally new do return else break catch instanceof throw try this ' + 'switch continue typeof delete debugger case default function var with ' + // LiveScript keywords 'then unless until loop of by when and or is isnt not it that otherwise from to til fallthrough super ' + 'case default function var void const let enum export import native ' + '__hasProp __extends __slice __bind __indexOf', literal: // JS literals 'true false null undefined ' + // LiveScript literals 'yes no on off it that void', built_in: 'npm require console print module global window document' }; var JS_IDENT_RE = '[A-Za-z$_](?:\-[0-9A-Za-z$_]|[0-9A-Za-z$_])*'; var TITLE = hljs.inherit(hljs.TITLE_MODE, {begin: JS_IDENT_RE}); var SUBST = { className: 'subst', begin: /#\{/, end: /}/, keywords: KEYWORDS }; var SUBST_SIMPLE = { className: 'subst', begin: /#[A-Za-z$_]/, end: /(?:\-[0-9A-Za-z$_]|[0-9A-Za-z$_])*/, keywords: KEYWORDS }; var EXPRESSIONS = [ hljs.BINARY_NUMBER_MODE, { className: 'number', begin: '(\\b0[xX][a-fA-F0-9_]+)|(\\b\\d(\\d|_\\d)*(\\.(\\d(\\d|_\\d)*)?)?(_*[eE]([-+]\\d(_\\d|\\d)*)?)?[_a-z]*)', relevance: 0, starts: {end: '(\\s*/)?', relevance: 0} // a number tries to eat the following slash to prevent treating it as a regexp }, { className: 'string', variants: [ { begin: /'''/, end: /'''/, contains: [hljs.BACKSLASH_ESCAPE] }, { begin: /'/, end: /'/, contains: [hljs.BACKSLASH_ESCAPE] }, { begin: /"""/, end: /"""/, contains: [hljs.BACKSLASH_ESCAPE, SUBST, SUBST_SIMPLE] }, { begin: /"/, end: /"/, contains: [hljs.BACKSLASH_ESCAPE, SUBST, SUBST_SIMPLE] }, { begin: /\\/, end: /(\s|$)/, excludeEnd: true } ] }, { className: 'regexp', variants: [ { begin: '//', end: '//[gim]*', contains: [SUBST, hljs.HASH_COMMENT_MODE] }, { // regex can't start with space to parse x / 2 / 3 as two divisions // regex can't start with *, and it supports an "illegal" in the main mode begin: /\/(?![ *])(\\\/|.)*?\/[gim]*(?=\W|$)/ } ] }, { begin: '@' + JS_IDENT_RE }, { begin: '``', end: '``', excludeBegin: true, excludeEnd: true, subLanguage: 'javascript' } ]; SUBST.contains = EXPRESSIONS; var PARAMS = { className: 'params', begin: '\\(', returnBegin: true, /* We need another contained nameless mode to not have every nested pair of parens to be called "params" */ contains: [ { begin: /\(/, end: /\)/, keywords: KEYWORDS, contains: ['self'].concat(EXPRESSIONS) } ] }; return { aliases: ['ls'], keywords: KEYWORDS, illegal: /\/\*/, contains: EXPRESSIONS.concat([ hljs.COMMENT('\\/\\*', '\\*\\/'), hljs.HASH_COMMENT_MODE, { className: 'function', contains: [TITLE, PARAMS], returnBegin: true, variants: [ { begin: '(' + JS_IDENT_RE + '\\s*(?:=|:=)\\s*)?(\\(.*\\))?\\s*\\B\\->\\*?', end: '\\->\\*?' }, { begin: '(' + JS_IDENT_RE + '\\s*(?:=|:=)\\s*)?!?(\\(.*\\))?\\s*\\B[-~]{1,2}>\\*?', end: '[-~]{1,2}>\\*?' }, { begin: '(' + JS_IDENT_RE + '\\s*(?:=|:=)\\s*)?(\\(.*\\))?\\s*\\B!?[-~]{1,2}>\\*?', end: '!?[-~]{1,2}>\\*?' } ] }, { className: 'class', beginKeywords: 'class', end: '$', illegal: /[:="\[\]]/, contains: [ { beginKeywords: 'extends', endsWithParent: true, illegal: /[:="\[\]]/, contains: [TITLE] }, TITLE ] }, { begin: JS_IDENT_RE + ':', end: ':', returnBegin: true, returnEnd: true, relevance: 0 } ]) }; }; /***/ }, /* 262 */ /***/ function(module, exports) { module.exports = function(hljs) { var identifier = '([-a-zA-Z$._][\\w\\-$.]*)'; return { //lexemes: '[.%]?' + hljs.IDENT_RE, keywords: 'begin end true false declare define global ' + 'constant private linker_private internal ' + 'available_externally linkonce linkonce_odr weak ' + 'weak_odr appending dllimport dllexport common ' + 'default hidden protected extern_weak external ' + 'thread_local zeroinitializer undef null to tail ' + 'target triple datalayout volatile nuw nsw nnan ' + 'ninf nsz arcp fast exact inbounds align ' + 'addrspace section alias module asm sideeffect ' + 'gc dbg linker_private_weak attributes blockaddress ' + 'initialexec localdynamic localexec prefix unnamed_addr ' + 'ccc fastcc coldcc x86_stdcallcc x86_fastcallcc ' + 'arm_apcscc arm_aapcscc arm_aapcs_vfpcc ptx_device ' + 'ptx_kernel intel_ocl_bicc msp430_intrcc spir_func ' + 'spir_kernel x86_64_sysvcc x86_64_win64cc x86_thiscallcc ' + 'cc c signext zeroext inreg sret nounwind ' + 'noreturn noalias nocapture byval nest readnone ' + 'readonly inlinehint noinline alwaysinline optsize ssp ' + 'sspreq noredzone noimplicitfloat naked builtin cold ' + 'nobuiltin noduplicate nonlazybind optnone returns_twice ' + 'sanitize_address sanitize_memory sanitize_thread sspstrong ' + 'uwtable returned type opaque eq ne slt sgt ' + 'sle sge ult ugt ule uge oeq one olt ogt ' + 'ole oge ord uno ueq une x acq_rel acquire ' + 'alignstack atomic catch cleanup filter inteldialect ' + 'max min monotonic nand personality release seq_cst ' + 'singlethread umax umin unordered xchg add fadd ' + 'sub fsub mul fmul udiv sdiv fdiv urem srem ' + 'frem shl lshr ashr and or xor icmp fcmp ' + 'phi call trunc zext sext fptrunc fpext uitofp ' + 'sitofp fptoui fptosi inttoptr ptrtoint bitcast ' + 'addrspacecast select va_arg ret br switch invoke ' + 'unwind unreachable indirectbr landingpad resume ' + 'malloc alloca free load store getelementptr ' + 'extractelement insertelement shufflevector getresult ' + 'extractvalue insertvalue atomicrmw cmpxchg fence ' + 'argmemonly double', contains: [ { className: 'keyword', begin: 'i\\d+' }, hljs.COMMENT( ';', '\\n', {relevance: 0} ), // Double quote string hljs.QUOTE_STRING_MODE, { className: 'string', variants: [ // Double-quoted string { begin: '"', end: '[^\\\\]"' }, ], relevance: 0 }, { className: 'title', variants: [ { begin: '@' + identifier }, { begin: '@\\d+' }, { begin: '!' + identifier }, { begin: '!\\d+' + identifier } ] }, { className: 'symbol', variants: [ { begin: '%' + identifier }, { begin: '%\\d+' }, { begin: '#\\d+' }, ] }, { className: 'number', variants: [ { begin: '0[xX][a-fA-F0-9]+' }, { begin: '-?\\d+(?:[.]\\d+)?(?:[eE][-+]?\\d+(?:[.]\\d+)?)?' } ], relevance: 0 }, ] }; }; /***/ }, /* 263 */ /***/ function(module, exports) { module.exports = function(hljs) { var LSL_STRING_ESCAPE_CHARS = { className: 'subst', begin: /\\[tn"\\]/ }; var LSL_STRINGS = { className: 'string', begin: '"', end: '"', contains: [ LSL_STRING_ESCAPE_CHARS ] }; var LSL_NUMBERS = { className: 'number', begin: hljs.C_NUMBER_RE }; var LSL_CONSTANTS = { className: 'literal', variants: [ { begin: '\\b(?:PI|TWO_PI|PI_BY_TWO|DEG_TO_RAD|RAD_TO_DEG|SQRT2)\\b' }, { begin: '\\b(?:XP_ERROR_(?:EXPERIENCES_DISABLED|EXPERIENCE_(?:DISABLED|SUSPENDED)|INVALID_(?:EXPERIENCE|PARAMETERS)|KEY_NOT_FOUND|MATURITY_EXCEEDED|NONE|NOT_(?:FOUND|PERMITTED(?:_LAND)?)|NO_EXPERIENCE|QUOTA_EXCEEDED|RETRY_UPDATE|STORAGE_EXCEPTION|STORE_DISABLED|THROTTLED|UNKNOWN_ERROR)|JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASS(?:IVE|_(?:ALWAYS|IF_NOT_HANDLED|NEVER))|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:CLICK_ACTION|HOVER_HEIGHT|LAST_OWNER_ID|(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_(?:COUNT|EQUIVALENCE)|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|REZZER_KEY|ROO?T|VELOCITY|OMEGA|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|(?:BODY_SHAPE|PATHFINDING)_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|TOTAL_INVENTORY_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?)|[LR]HAND_RING1|TAIL_(?:BASE|TIP)|[LR]WING|FACE_(?:JAW|[LR]EAR|[LR]EYE|TOUNGE)|GROIN|HIND_[LR]FOOT)|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:ALPHA_MODE(?:_(?:BLEND|EMISSIVE|MASK|NONE))?|NORMAL|SPECULAR|TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[ABCD]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\\b' }, { begin: '\\b(?:FALSE|TRUE)\\b' }, { begin: '\\b(?:ZERO_ROTATION)\\b' }, { begin: '\\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\\b' }, { begin: '\\b(?:ZERO_VECTOR|TOUCH_INVALID_(?:TEXCOORD|VECTOR))\\b' } ] }; var LSL_FUNCTIONS = { className: 'built_in', begin: '\\b(?:ll(?:AgentInExperience|(?:Create|DataSize|Delete|KeyCount|Keys|Read|Update)KeyValue|GetExperience(?:Details|ErrorMessage)|ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached(?:List)?|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|Request(?:Experience)?Permissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\\b' }; return { illegal: ':', contains: [ LSL_STRINGS, { className: 'comment', variants: [ hljs.COMMENT('//', '$'), hljs.COMMENT('/\\*', '\\*/') ] }, LSL_NUMBERS, { className: 'section', variants: [ { begin: '\\b(?:state|default)\\b' }, { begin: '\\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|experience_permissions(?:_denied)?|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\\b' } ] }, LSL_FUNCTIONS, LSL_CONSTANTS, { className: 'type', begin: '\\b(?:integer|float|string|key|vector|quaternion|rotation|list)\\b' } ] }; }; /***/ }, /* 264 */ /***/ function(module, exports) { module.exports = function(hljs) { var OPENING_LONG_BRACKET = '\\[=*\\['; var CLOSING_LONG_BRACKET = '\\]=*\\]'; var LONG_BRACKETS = { begin: OPENING_LONG_BRACKET, end: CLOSING_LONG_BRACKET, contains: ['self'] }; var COMMENTS = [ hljs.COMMENT('--(?!' + OPENING_LONG_BRACKET + ')', '$'), hljs.COMMENT( '--' + OPENING_LONG_BRACKET, CLOSING_LONG_BRACKET, { contains: [LONG_BRACKETS], relevance: 10 } ) ]; return { lexemes: hljs.UNDERSCORE_IDENT_RE, keywords: { keyword: 'and break do else elseif end false for if in local nil not or repeat return then ' + 'true until while', built_in: '_G _VERSION assert collectgarbage dofile error getfenv getmetatable ipairs load ' + 'loadfile loadstring module next pairs pcall print rawequal rawget rawset require ' + 'select setfenv setmetatable tonumber tostring type unpack xpcall coroutine debug ' + 'io math os package string table' }, contains: COMMENTS.concat([ { className: 'function', beginKeywords: 'function', end: '\\)', contains: [ hljs.inherit(hljs.TITLE_MODE, {begin: '([_a-zA-Z]\\w*\\.)*([_a-zA-Z]\\w*:)?[_a-zA-Z]\\w*'}), { className: 'params', begin: '\\(', endsWithParent: true, contains: COMMENTS } ].concat(COMMENTS) }, hljs.C_NUMBER_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, { className: 'string', begin: OPENING_LONG_BRACKET, end: CLOSING_LONG_BRACKET, contains: [LONG_BRACKETS], relevance: 5 } ]) }; }; /***/ }, /* 265 */ /***/ function(module, exports) { module.exports = function(hljs) { var VARIABLE = { className: 'variable', begin: /\$\(/, end: /\)/, contains: [hljs.BACKSLASH_ESCAPE] }; return { aliases: ['mk', 'mak'], contains: [ hljs.HASH_COMMENT_MODE, { begin: /^\w+\s*\W*=/, returnBegin: true, relevance: 0, starts: { end: /\s*\W*=/, excludeEnd: true, starts: { end: /$/, relevance: 0, contains: [ VARIABLE ] } } }, { className: 'section', begin: /^[\w]+:\s*$/ }, { className: 'meta', begin: /^\.PHONY:/, end: /$/, keywords: {'meta-keyword': '.PHONY'}, lexemes: /[\.\w]+/ }, { begin: /^\t+/, end: /$/, relevance: 0, contains: [ hljs.QUOTE_STRING_MODE, VARIABLE ] } ] }; }; /***/ }, /* 266 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['mma'], lexemes: '(\\$|\\b)' + hljs.IDENT_RE + '\\b', keywords: 'AbelianGroup Abort AbortKernels AbortProtect Above Abs Absolute AbsoluteCorrelation AbsoluteCorrelationFunction AbsoluteCurrentValue AbsoluteDashing AbsoluteFileName AbsoluteOptions AbsolutePointSize AbsoluteThickness AbsoluteTime AbsoluteTiming AccountingForm Accumulate Accuracy AccuracyGoal ActionDelay ActionMenu ActionMenuBox ActionMenuBoxOptions Active ActiveItem ActiveStyle AcyclicGraphQ AddOnHelpPath AddTo AdjacencyGraph AdjacencyList AdjacencyMatrix AdjustmentBox AdjustmentBoxOptions AdjustTimeSeriesForecast AffineTransform After AiryAi AiryAiPrime AiryAiZero AiryBi AiryBiPrime AiryBiZero AlgebraicIntegerQ AlgebraicNumber AlgebraicNumberDenominator AlgebraicNumberNorm AlgebraicNumberPolynomial AlgebraicNumberTrace AlgebraicRules AlgebraicRulesData Algebraics AlgebraicUnitQ Alignment AlignmentMarker AlignmentPoint All AllowedDimensions AllowGroupClose AllowInlineCells AllowKernelInitialization AllowReverseGroupClose AllowScriptLevelChange AlphaChannel AlternatingGroup AlternativeHypothesis Alternatives AmbientLight Analytic AnchoredSearch And AndersonDarlingTest AngerJ AngleBracket AngularGauge Animate AnimationCycleOffset AnimationCycleRepetitions AnimationDirection AnimationDisplayTime AnimationRate AnimationRepetitions AnimationRunning Animator AnimatorBox AnimatorBoxOptions AnimatorElements Annotation Annuity AnnuityDue Antialiasing Antisymmetric Apart ApartSquareFree Appearance AppearanceElements AppellF1 Append AppendTo Apply ArcCos ArcCosh ArcCot ArcCoth ArcCsc ArcCsch ArcSec ArcSech ArcSin ArcSinDistribution ArcSinh ArcTan ArcTanh Arg ArgMax ArgMin ArgumentCountQ ARIMAProcess ArithmeticGeometricMean ARMAProcess ARProcess Array ArrayComponents ArrayDepth ArrayFlatten ArrayPad ArrayPlot ArrayQ ArrayReshape ArrayRules Arrays Arrow Arrow3DBox ArrowBox Arrowheads AspectRatio AspectRatioFixed Assert Assuming Assumptions AstronomicalData Asynchronous AsynchronousTaskObject AsynchronousTasks AtomQ Attributes AugmentedSymmetricPolynomial AutoAction AutoDelete AutoEvaluateEvents AutoGeneratedPackage AutoIndent AutoIndentSpacings AutoItalicWords AutoloadPath AutoMatch Automatic AutomaticImageSize AutoMultiplicationSymbol AutoNumberFormatting AutoOpenNotebooks AutoOpenPalettes AutorunSequencing AutoScaling AutoScroll AutoSpacing AutoStyleOptions AutoStyleWords Axes AxesEdge AxesLabel AxesOrigin AxesStyle Axis ' + 'BabyMonsterGroupB Back Background BackgroundTasksSettings Backslash Backsubstitution Backward Band BandpassFilter BandstopFilter BarabasiAlbertGraphDistribution BarChart BarChart3D BarLegend BarlowProschanImportance BarnesG BarOrigin BarSpacing BartlettHannWindow BartlettWindow BaseForm Baseline BaselinePosition BaseStyle BatesDistribution BattleLemarieWavelet Because BeckmannDistribution Beep Before Begin BeginDialogPacket BeginFrontEndInteractionPacket BeginPackage BellB BellY Below BenfordDistribution BeniniDistribution BenktanderGibratDistribution BenktanderWeibullDistribution BernoulliB BernoulliDistribution BernoulliGraphDistribution BernoulliProcess BernsteinBasis BesselFilterModel BesselI BesselJ BesselJZero BesselK BesselY BesselYZero Beta BetaBinomialDistribution BetaDistribution BetaNegativeBinomialDistribution BetaPrimeDistribution BetaRegularized BetweennessCentrality BezierCurve BezierCurve3DBox BezierCurve3DBoxOptions BezierCurveBox BezierCurveBoxOptions BezierFunction BilateralFilter Binarize BinaryFormat BinaryImageQ BinaryRead BinaryReadList BinaryWrite BinCounts BinLists Binomial BinomialDistribution BinomialProcess BinormalDistribution BiorthogonalSplineWavelet BipartiteGraphQ BirnbaumImportance BirnbaumSaundersDistribution BitAnd BitClear BitGet BitLength BitNot BitOr BitSet BitShiftLeft BitShiftRight BitXor Black BlackmanHarrisWindow BlackmanNuttallWindow BlackmanWindow Blank BlankForm BlankNullSequence BlankSequence Blend Block BlockRandom BlomqvistBeta BlomqvistBetaTest Blue Blur BodePlot BohmanWindow Bold Bookmarks Boole BooleanConsecutiveFunction BooleanConvert BooleanCountingFunction BooleanFunction BooleanGraph BooleanMaxterms BooleanMinimize BooleanMinterms Booleans BooleanTable BooleanVariables BorderDimensions BorelTannerDistribution Bottom BottomHatTransform BoundaryStyle Bounds Box BoxBaselineShift BoxData BoxDimensions Boxed Boxes BoxForm BoxFormFormatTypes BoxFrame BoxID BoxMargins BoxMatrix BoxRatios BoxRotation BoxRotationPoint BoxStyle BoxWhiskerChart Bra BracketingBar BraKet BrayCurtisDistance BreadthFirstScan Break Brown BrownForsytheTest BrownianBridgeProcess BrowserCategory BSplineBasis BSplineCurve BSplineCurve3DBox BSplineCurveBox BSplineCurveBoxOptions BSplineFunction BSplineSurface BSplineSurface3DBox BubbleChart BubbleChart3D BubbleScale BubbleSizes BulletGauge BusinessDayQ ButterflyGraph ButterworthFilterModel Button ButtonBar ButtonBox ButtonBoxOptions ButtonCell ButtonContents ButtonData ButtonEvaluator ButtonExpandable ButtonFrame ButtonFunction ButtonMargins ButtonMinHeight ButtonNote ButtonNotebook ButtonSource ButtonStyle ButtonStyleMenuListing Byte ByteCount ByteOrdering ' + 'C CachedValue CacheGraphics CalendarData CalendarType CallPacket CanberraDistance Cancel CancelButton CandlestickChart Cap CapForm CapitalDifferentialD CardinalBSplineBasis CarmichaelLambda Cases Cashflow Casoratian Catalan CatalanNumber Catch CauchyDistribution CauchyWindow CayleyGraph CDF CDFDeploy CDFInformation CDFWavelet Ceiling Cell CellAutoOverwrite CellBaseline CellBoundingBox CellBracketOptions CellChangeTimes CellContents CellContext CellDingbat CellDynamicExpression CellEditDuplicate CellElementsBoundingBox CellElementSpacings CellEpilog CellEvaluationDuplicate CellEvaluationFunction CellEventActions CellFrame CellFrameColor CellFrameLabelMargins CellFrameLabels CellFrameMargins CellGroup CellGroupData CellGrouping CellGroupingRules CellHorizontalScrolling CellID CellLabel CellLabelAutoDelete CellLabelMargins CellLabelPositioning CellMargins CellObject CellOpen CellPrint CellProlog Cells CellSize CellStyle CellTags CellularAutomaton CensoredDistribution Censoring Center CenterDot CentralMoment CentralMomentGeneratingFunction CForm ChampernowneNumber ChanVeseBinarize Character CharacterEncoding CharacterEncodingsPath CharacteristicFunction CharacteristicPolynomial CharacterRange Characters ChartBaseStyle ChartElementData ChartElementDataFunction ChartElementFunction ChartElements ChartLabels ChartLayout ChartLegends ChartStyle Chebyshev1FilterModel Chebyshev2FilterModel ChebyshevDistance ChebyshevT ChebyshevU Check CheckAbort CheckAll Checkbox CheckboxBar CheckboxBox CheckboxBoxOptions ChemicalData ChessboardDistance ChiDistribution ChineseRemainder ChiSquareDistribution ChoiceButtons ChoiceDialog CholeskyDecomposition Chop Circle CircleBox CircleDot CircleMinus CirclePlus CircleTimes CirculantGraph CityData Clear ClearAll ClearAttributes ClearSystemCache ClebschGordan ClickPane Clip ClipboardNotebook ClipFill ClippingStyle ClipPlanes ClipRange Clock ClockGauge ClockwiseContourIntegral Close Closed CloseKernels ClosenessCentrality Closing ClosingAutoSave ClosingEvent ClusteringComponents CMYKColor Coarse Coefficient CoefficientArrays CoefficientDomain CoefficientList CoefficientRules CoifletWavelet Collect Colon ColonForm ColorCombine ColorConvert ColorData ColorDataFunction ColorFunction ColorFunctionScaling Colorize ColorNegate ColorOutput ColorProfileData ColorQuantize ColorReplace ColorRules ColorSelectorSettings ColorSeparate ColorSetter ColorSetterBox ColorSetterBoxOptions ColorSlider ColorSpace Column ColumnAlignments ColumnBackgrounds ColumnForm ColumnLines ColumnsEqual ColumnSpacings ColumnWidths CommonDefaultFormatTypes Commonest CommonestFilter CommonUnits CommunityBoundaryStyle CommunityGraphPlot CommunityLabels CommunityRegionStyle CompatibleUnitQ CompilationOptions CompilationTarget Compile Compiled CompiledFunction Complement CompleteGraph CompleteGraphQ CompleteKaryTree CompletionsListPacket Complex Complexes ComplexExpand ComplexInfinity ComplexityFunction ComponentMeasurements ' + 'ComponentwiseContextMenu Compose ComposeList ComposeSeries Composition CompoundExpression CompoundPoissonDistribution CompoundPoissonProcess CompoundRenewalProcess Compress CompressedData Condition ConditionalExpression Conditioned Cone ConeBox ConfidenceLevel ConfidenceRange ConfidenceTransform ConfigurationPath Congruent Conjugate ConjugateTranspose Conjunction Connect ConnectedComponents ConnectedGraphQ ConnesWindow ConoverTest ConsoleMessage ConsoleMessagePacket ConsolePrint Constant ConstantArray Constants ConstrainedMax ConstrainedMin ContentPadding ContentsBoundingBox ContentSelectable ContentSize Context ContextMenu Contexts ContextToFilename ContextToFileName Continuation Continue ContinuedFraction ContinuedFractionK ContinuousAction ContinuousMarkovProcess ContinuousTimeModelQ ContinuousWaveletData ContinuousWaveletTransform ContourDetect ContourGraphics ContourIntegral ContourLabels ContourLines ContourPlot ContourPlot3D Contours ContourShading ContourSmoothing ContourStyle ContraharmonicMean Control ControlActive ControlAlignment ControllabilityGramian ControllabilityMatrix ControllableDecomposition ControllableModelQ ControllerDuration ControllerInformation ControllerInformationData ControllerLinking ControllerManipulate ControllerMethod ControllerPath ControllerState ControlPlacement ControlsRendering ControlType Convergents ConversionOptions ConversionRules ConvertToBitmapPacket ConvertToPostScript ConvertToPostScriptPacket Convolve ConwayGroupCo1 ConwayGroupCo2 ConwayGroupCo3 CoordinateChartData CoordinatesToolOptions CoordinateTransform CoordinateTransformData CoprimeQ Coproduct CopulaDistribution Copyable CopyDirectory CopyFile CopyTag CopyToClipboard CornerFilter CornerNeighbors Correlation CorrelationDistance CorrelationFunction CorrelationTest Cos Cosh CoshIntegral CosineDistance CosineWindow CosIntegral Cot Coth Count CounterAssignments CounterBox CounterBoxOptions CounterClockwiseContourIntegral CounterEvaluator CounterFunction CounterIncrements CounterStyle CounterStyleMenuListing CountRoots CountryData Covariance CovarianceEstimatorFunction CovarianceFunction CoxianDistribution CoxIngersollRossProcess CoxModel CoxModelFit CramerVonMisesTest CreateArchive CreateDialog CreateDirectory CreateDocument CreateIntermediateDirectories CreatePalette CreatePalettePacket CreateScheduledTask CreateTemporary CreateWindow CriticalityFailureImportance CriticalitySuccessImportance CriticalSection Cross CrossingDetect CrossMatrix Csc Csch CubeRoot Cubics Cuboid CuboidBox Cumulant CumulantGeneratingFunction Cup CupCap Curl CurlyDoubleQuote CurlyQuote CurrentImage CurrentlySpeakingPacket CurrentValue CurvatureFlowFilter CurveClosed Cyan CycleGraph CycleIndexPolynomial Cycles CyclicGroup Cyclotomic Cylinder CylinderBox CylindricalDecomposition ' + 'D DagumDistribution DamerauLevenshteinDistance DampingFactor Darker Dashed Dashing DataCompression DataDistribution DataRange DataReversed Date DateDelimiters DateDifference DateFunction DateList DateListLogPlot DateListPlot DatePattern DatePlus DateRange DateString DateTicksFormat DaubechiesWavelet DavisDistribution DawsonF DayCount DayCountConvention DayMatchQ DayName DayPlus DayRange DayRound DeBruijnGraph Debug DebugTag Decimal DeclareKnownSymbols DeclarePackage Decompose Decrement DedekindEta Default DefaultAxesStyle DefaultBaseStyle DefaultBoxStyle DefaultButton DefaultColor DefaultControlPlacement DefaultDuplicateCellStyle DefaultDuration DefaultElement DefaultFaceGridsStyle DefaultFieldHintStyle DefaultFont DefaultFontProperties DefaultFormatType DefaultFormatTypeForStyle DefaultFrameStyle DefaultFrameTicksStyle DefaultGridLinesStyle DefaultInlineFormatType DefaultInputFormatType DefaultLabelStyle DefaultMenuStyle DefaultNaturalLanguage DefaultNewCellStyle DefaultNewInlineCellStyle DefaultNotebook DefaultOptions DefaultOutputFormatType DefaultStyle DefaultStyleDefinitions DefaultTextFormatType DefaultTextInlineFormatType DefaultTicksStyle DefaultTooltipStyle DefaultValues Defer DefineExternal DefineInputStreamMethod DefineOutputStreamMethod Definition Degree DegreeCentrality DegreeGraphDistribution DegreeLexicographic DegreeReverseLexicographic Deinitialization Del Deletable Delete DeleteBorderComponents DeleteCases DeleteContents DeleteDirectory DeleteDuplicates DeleteFile DeleteSmallComponents DeleteWithContents DeletionWarning Delimiter DelimiterFlashTime DelimiterMatching Delimiters Denominator DensityGraphics DensityHistogram DensityPlot DependentVariables Deploy Deployed Depth DepthFirstScan Derivative DerivativeFilter DescriptorStateSpace DesignMatrix Det DGaussianWavelet DiacriticalPositioning Diagonal DiagonalMatrix Dialog DialogIndent DialogInput DialogLevel DialogNotebook DialogProlog DialogReturn DialogSymbols Diamond DiamondMatrix DiceDissimilarity DictionaryLookup DifferenceDelta DifferenceOrder DifferenceRoot DifferenceRootReduce Differences DifferentialD DifferentialRoot DifferentialRootReduce DifferentiatorFilter DigitBlock DigitBlockMinimum DigitCharacter DigitCount DigitQ DihedralGroup Dilation Dimensions DiracComb DiracDelta DirectedEdge DirectedEdges DirectedGraph DirectedGraphQ DirectedInfinity Direction Directive Directory DirectoryName DirectoryQ DirectoryStack DirichletCharacter DirichletConvolve DirichletDistribution DirichletL DirichletTransform DirichletWindow DisableConsolePrintPacket DiscreteChirpZTransform DiscreteConvolve DiscreteDelta DiscreteHadamardTransform DiscreteIndicator DiscreteLQEstimatorGains DiscreteLQRegulatorGains DiscreteLyapunovSolve DiscreteMarkovProcess DiscretePlot DiscretePlot3D DiscreteRatio DiscreteRiccatiSolve DiscreteShift DiscreteTimeModelQ DiscreteUniformDistribution DiscreteVariables DiscreteWaveletData DiscreteWaveletPacketTransform ' + 'DiscreteWaveletTransform Discriminant Disjunction Disk DiskBox DiskMatrix Dispatch DispersionEstimatorFunction Display DisplayAllSteps DisplayEndPacket DisplayFlushImagePacket DisplayForm DisplayFunction DisplayPacket DisplayRules DisplaySetSizePacket DisplayString DisplayTemporary DisplayWith DisplayWithRef DisplayWithVariable DistanceFunction DistanceTransform Distribute Distributed DistributedContexts DistributeDefinitions DistributionChart DistributionDomain DistributionFitTest DistributionParameterAssumptions DistributionParameterQ Dithering Div Divergence Divide DivideBy Dividers Divisible Divisors DivisorSigma DivisorSum DMSList DMSString Do DockedCells DocumentNotebook DominantColors DOSTextFormat Dot DotDashed DotEqual Dotted DoubleBracketingBar DoubleContourIntegral DoubleDownArrow DoubleLeftArrow DoubleLeftRightArrow DoubleLeftTee DoubleLongLeftArrow DoubleLongLeftRightArrow DoubleLongRightArrow DoubleRightArrow DoubleRightTee DoubleUpArrow DoubleUpDownArrow DoubleVerticalBar DoublyInfinite Down DownArrow DownArrowBar DownArrowUpArrow DownLeftRightVector DownLeftTeeVector DownLeftVector DownLeftVectorBar DownRightTeeVector DownRightVector DownRightVectorBar Downsample DownTee DownTeeArrow DownValues DragAndDrop DrawEdges DrawFrontFaces DrawHighlighted Drop DSolve Dt DualLinearProgramming DualSystemsModel DumpGet DumpSave DuplicateFreeQ Dynamic DynamicBox DynamicBoxOptions DynamicEvaluationTimeout DynamicLocation DynamicModule DynamicModuleBox DynamicModuleBoxOptions DynamicModuleParent DynamicModuleValues DynamicName DynamicNamespace DynamicReference DynamicSetting DynamicUpdating DynamicWrapper DynamicWrapperBox DynamicWrapperBoxOptions ' + 'E EccentricityCentrality EdgeAdd EdgeBetweennessCentrality EdgeCapacity EdgeCapForm EdgeColor EdgeConnectivity EdgeCost EdgeCount EdgeCoverQ EdgeDashing EdgeDelete EdgeDetect EdgeForm EdgeIndex EdgeJoinForm EdgeLabeling EdgeLabels EdgeLabelStyle EdgeList EdgeOpacity EdgeQ EdgeRenderingFunction EdgeRules EdgeShapeFunction EdgeStyle EdgeThickness EdgeWeight Editable EditButtonSettings EditCellTagsSettings EditDistance EffectiveInterest Eigensystem Eigenvalues EigenvectorCentrality Eigenvectors Element ElementData Eliminate EliminationOrder EllipticE EllipticExp EllipticExpPrime EllipticF EllipticFilterModel EllipticK EllipticLog EllipticNomeQ EllipticPi EllipticReducedHalfPeriods EllipticTheta EllipticThetaPrime EmitSound EmphasizeSyntaxErrors EmpiricalDistribution Empty EmptyGraphQ EnableConsolePrintPacket Enabled Encode End EndAdd EndDialogPacket EndFrontEndInteractionPacket EndOfFile EndOfLine EndOfString EndPackage EngineeringForm Enter EnterExpressionPacket EnterTextPacket Entropy EntropyFilter Environment Epilog Equal EqualColumns EqualRows EqualTilde EquatedTo Equilibrium EquirippleFilterKernel Equivalent Erf Erfc Erfi ErlangB ErlangC ErlangDistribution Erosion ErrorBox ErrorBoxOptions ErrorNorm ErrorPacket ErrorsDialogSettings EstimatedDistribution EstimatedProcess EstimatorGains EstimatorRegulator EuclideanDistance EulerE EulerGamma EulerianGraphQ EulerPhi Evaluatable Evaluate Evaluated EvaluatePacket EvaluationCell EvaluationCompletionAction EvaluationElements EvaluationMode EvaluationMonitor EvaluationNotebook EvaluationObject EvaluationOrder Evaluator EvaluatorNames EvenQ EventData EventEvaluator EventHandler EventHandlerTag EventLabels ExactBlackmanWindow ExactNumberQ ExactRootIsolation ExampleData Except ExcludedForms ExcludePods Exclusions ExclusionsStyle Exists Exit ExitDialog Exp Expand ExpandAll ExpandDenominator ExpandFileName ExpandNumerator Expectation ExpectationE ExpectedValue ExpGammaDistribution ExpIntegralE ExpIntegralEi Exponent ExponentFunction ExponentialDistribution ExponentialFamily ExponentialGeneratingFunction ExponentialMovingAverage ExponentialPowerDistribution ExponentPosition ExponentStep Export ExportAutoReplacements ExportPacket ExportString Expression ExpressionCell ExpressionPacket ExpToTrig ExtendedGCD Extension ExtentElementFunction ExtentMarkers ExtentSize ExternalCall ExternalDataCharacterEncoding Extract ExtractArchive ExtremeValueDistribution ' + 'FaceForm FaceGrids FaceGridsStyle Factor FactorComplete Factorial Factorial2 FactorialMoment FactorialMomentGeneratingFunction FactorialPower FactorInteger FactorList FactorSquareFree FactorSquareFreeList FactorTerms FactorTermsList Fail FailureDistribution False FARIMAProcess FEDisableConsolePrintPacket FeedbackSector FeedbackSectorStyle FeedbackType FEEnableConsolePrintPacket Fibonacci FieldHint FieldHintStyle FieldMasked FieldSize File FileBaseName FileByteCount FileDate FileExistsQ FileExtension FileFormat FileHash FileInformation FileName FileNameDepth FileNameDialogSettings FileNameDrop FileNameJoin FileNames FileNameSetter FileNameSplit FileNameTake FilePrint FileType FilledCurve FilledCurveBox Filling FillingStyle FillingTransform FilterRules FinancialBond FinancialData FinancialDerivative FinancialIndicator Find FindArgMax FindArgMin FindClique FindClusters FindCurvePath FindDistributionParameters FindDivisions FindEdgeCover FindEdgeCut FindEulerianCycle FindFaces FindFile FindFit FindGeneratingFunction FindGeoLocation FindGeometricTransform FindGraphCommunities FindGraphIsomorphism FindGraphPartition FindHamiltonianCycle FindIndependentEdgeSet FindIndependentVertexSet FindInstance FindIntegerNullVector FindKClan FindKClique FindKClub FindKPlex FindLibrary FindLinearRecurrence FindList FindMaximum FindMaximumFlow FindMaxValue FindMinimum FindMinimumCostFlow FindMinimumCut FindMinValue FindPermutation FindPostmanTour FindProcessParameters FindRoot FindSequenceFunction FindSettings FindShortestPath FindShortestTour FindThreshold FindVertexCover FindVertexCut Fine FinishDynamic FiniteAbelianGroupCount FiniteGroupCount FiniteGroupData First FirstPassageTimeDistribution FischerGroupFi22 FischerGroupFi23 FischerGroupFi24Prime FisherHypergeometricDistribution FisherRatioTest FisherZDistribution Fit FitAll FittedModel FixedPoint FixedPointList FlashSelection Flat Flatten FlattenAt FlatTopWindow FlipView Floor FlushPrintOutputPacket Fold FoldList Font FontColor FontFamily FontForm FontName FontOpacity FontPostScriptName FontProperties FontReencoding FontSize FontSlant FontSubstitutions FontTracking FontVariations FontWeight For ForAll Format FormatRules FormatType FormatTypeAutoConvert FormatValues FormBox FormBoxOptions FortranForm Forward ForwardBackward Fourier FourierCoefficient FourierCosCoefficient FourierCosSeries FourierCosTransform FourierDCT FourierDCTFilter FourierDCTMatrix FourierDST FourierDSTMatrix FourierMatrix FourierParameters FourierSequenceTransform FourierSeries FourierSinCoefficient FourierSinSeries FourierSinTransform FourierTransform FourierTrigSeries FractionalBrownianMotionProcess FractionalPart FractionBox FractionBoxOptions FractionLine Frame FrameBox FrameBoxOptions Framed FrameInset FrameLabel Frameless FrameMargins FrameStyle FrameTicks FrameTicksStyle FRatioDistribution FrechetDistribution FreeQ FrequencySamplingFilterKernel FresnelC FresnelS Friday FrobeniusNumber FrobeniusSolve ' + 'FromCharacterCode FromCoefficientRules FromContinuedFraction FromDate FromDigits FromDMS Front FrontEndDynamicExpression FrontEndEventActions FrontEndExecute FrontEndObject FrontEndResource FrontEndResourceString FrontEndStackSize FrontEndToken FrontEndTokenExecute FrontEndValueCache FrontEndVersion FrontFaceColor FrontFaceOpacity Full FullAxes FullDefinition FullForm FullGraphics FullOptions FullSimplify Function FunctionExpand FunctionInterpolation FunctionSpace FussellVeselyImportance ' + 'GaborFilter GaborMatrix GaborWavelet GainMargins GainPhaseMargins Gamma GammaDistribution GammaRegularized GapPenalty Gather GatherBy GaugeFaceElementFunction GaugeFaceStyle GaugeFrameElementFunction GaugeFrameSize GaugeFrameStyle GaugeLabels GaugeMarkers GaugeStyle GaussianFilter GaussianIntegers GaussianMatrix GaussianWindow GCD GegenbauerC General GeneralizedLinearModelFit GenerateConditions GeneratedCell GeneratedParameters GeneratingFunction Generic GenericCylindricalDecomposition GenomeData GenomeLookup GeodesicClosing GeodesicDilation GeodesicErosion GeodesicOpening GeoDestination GeodesyData GeoDirection GeoDistance GeoGridPosition GeometricBrownianMotionProcess GeometricDistribution GeometricMean GeometricMeanFilter GeometricTransformation GeometricTransformation3DBox GeometricTransformation3DBoxOptions GeometricTransformationBox GeometricTransformationBoxOptions GeoPosition GeoPositionENU GeoPositionXYZ GeoProjectionData GestureHandler GestureHandlerTag Get GetBoundingBoxSizePacket GetContext GetEnvironment GetFileName GetFrontEndOptionsDataPacket GetLinebreakInformationPacket GetMenusPacket GetPageBreakInformationPacket Glaisher GlobalClusteringCoefficient GlobalPreferences GlobalSession Glow GoldenRatio GompertzMakehamDistribution GoodmanKruskalGamma GoodmanKruskalGammaTest Goto Grad Gradient GradientFilter GradientOrientationFilter Graph GraphAssortativity GraphCenter GraphComplement GraphData GraphDensity GraphDiameter GraphDifference GraphDisjointUnion ' + 'GraphDistance GraphDistanceMatrix GraphElementData GraphEmbedding GraphHighlight GraphHighlightStyle GraphHub Graphics Graphics3D Graphics3DBox Graphics3DBoxOptions GraphicsArray GraphicsBaseline GraphicsBox GraphicsBoxOptions GraphicsColor GraphicsColumn GraphicsComplex GraphicsComplex3DBox GraphicsComplex3DBoxOptions GraphicsComplexBox GraphicsComplexBoxOptions GraphicsContents GraphicsData GraphicsGrid GraphicsGridBox GraphicsGroup GraphicsGroup3DBox GraphicsGroup3DBoxOptions GraphicsGroupBox GraphicsGroupBoxOptions GraphicsGrouping GraphicsHighlightColor GraphicsRow GraphicsSpacing GraphicsStyle GraphIntersection GraphLayout GraphLinkEfficiency GraphPeriphery GraphPlot GraphPlot3D GraphPower GraphPropertyDistribution GraphQ GraphRadius GraphReciprocity GraphRoot GraphStyle GraphUnion Gray GrayLevel GreatCircleDistance Greater GreaterEqual GreaterEqualLess GreaterFullEqual GreaterGreater GreaterLess GreaterSlantEqual GreaterTilde Green Grid GridBaseline GridBox GridBoxAlignment GridBoxBackground GridBoxDividers GridBoxFrame GridBoxItemSize GridBoxItemStyle GridBoxOptions GridBoxSpacings GridCreationSettings GridDefaultElement GridElementStyleOptions GridFrame GridFrameMargins GridGraph GridLines GridLinesStyle GroebnerBasis GroupActionBase GroupCentralizer GroupElementFromWord GroupElementPosition GroupElementQ GroupElements GroupElementToWord GroupGenerators GroupMultiplicationTable GroupOrbits GroupOrder GroupPageBreakWithin GroupSetwiseStabilizer GroupStabilizer GroupStabilizerChain Gudermannian GumbelDistribution ' + 'HaarWavelet HadamardMatrix HalfNormalDistribution HamiltonianGraphQ HammingDistance HammingWindow HankelH1 HankelH2 HankelMatrix HannPoissonWindow HannWindow HaradaNortonGroupHN HararyGraph HarmonicMean HarmonicMeanFilter HarmonicNumber Hash HashTable Haversine HazardFunction Head HeadCompose Heads HeavisideLambda HeavisidePi HeavisideTheta HeldGroupHe HeldPart HelpBrowserLookup HelpBrowserNotebook HelpBrowserSettings HermiteDecomposition HermiteH HermitianMatrixQ HessenbergDecomposition Hessian HexadecimalCharacter Hexahedron HexahedronBox HexahedronBoxOptions HiddenSurface HighlightGraph HighlightImage HighpassFilter HigmanSimsGroupHS HilbertFilter HilbertMatrix Histogram Histogram3D HistogramDistribution HistogramList HistogramTransform HistogramTransformInterpolation HitMissTransform HITSCentrality HodgeDual HoeffdingD HoeffdingDTest Hold HoldAll HoldAllComplete HoldComplete HoldFirst HoldForm HoldPattern HoldRest HolidayCalendar HomeDirectory HomePage Horizontal HorizontalForm HorizontalGauge HorizontalScrollPosition HornerForm HotellingTSquareDistribution HoytDistribution HTMLSave Hue HumpDownHump HumpEqual HurwitzLerchPhi HurwitzZeta HyperbolicDistribution HypercubeGraph HyperexponentialDistribution Hyperfactorial Hypergeometric0F1 Hypergeometric0F1Regularized Hypergeometric1F1 Hypergeometric1F1Regularized Hypergeometric2F1 Hypergeometric2F1Regularized HypergeometricDistribution HypergeometricPFQ HypergeometricPFQRegularized HypergeometricU Hyperlink HyperlinkCreationSettings Hyphenation HyphenationOptions HypoexponentialDistribution HypothesisTestData ' + 'I Identity IdentityMatrix If IgnoreCase Im Image Image3D Image3DSlices ImageAccumulate ImageAdd ImageAdjust ImageAlign ImageApply ImageAspectRatio ImageAssemble ImageCache ImageCacheValid ImageCapture ImageChannels ImageClip ImageColorSpace ImageCompose ImageConvolve ImageCooccurrence ImageCorners ImageCorrelate ImageCorrespondingPoints ImageCrop ImageData ImageDataPacket ImageDeconvolve ImageDemosaic ImageDifference ImageDimensions ImageDistance ImageEffect ImageFeatureTrack ImageFileApply ImageFileFilter ImageFileScan ImageFilter ImageForestingComponents ImageForwardTransformation ImageHistogram ImageKeypoints ImageLevels ImageLines ImageMargins ImageMarkers ImageMeasurements ImageMultiply ImageOffset ImagePad ImagePadding ImagePartition ImagePeriodogram ImagePerspectiveTransformation ImageQ ImageRangeCache ImageReflect ImageRegion ImageResize ImageResolution ImageRotate ImageRotated ImageScaled ImageScan ImageSize ImageSizeAction ImageSizeCache ImageSizeMultipliers ImageSizeRaw ImageSubtract ImageTake ImageTransformation ImageTrim ImageType ImageValue ImageValuePositions Implies Import ImportAutoReplacements ImportString ImprovementImportance In IncidenceGraph IncidenceList IncidenceMatrix IncludeConstantBasis IncludeFileExtension IncludePods IncludeSingularTerm Increment Indent IndentingNewlineSpacings IndentMaxFraction IndependenceTest IndependentEdgeSetQ IndependentUnit IndependentVertexSetQ Indeterminate IndexCreationOptions Indexed IndexGraph IndexTag Inequality InexactNumberQ InexactNumbers Infinity Infix Information Inherited InheritScope Initialization InitializationCell InitializationCellEvaluation InitializationCellWarning InlineCounterAssignments InlineCounterIncrements InlineRules Inner Inpaint Input InputAliases InputAssumptions InputAutoReplacements InputField InputFieldBox InputFieldBoxOptions InputForm InputGrouping InputNamePacket InputNotebook InputPacket InputSettings InputStream InputString InputStringPacket InputToBoxFormPacket Insert InsertionPointObject InsertResults Inset Inset3DBox Inset3DBoxOptions InsetBox InsetBoxOptions Install InstallService InString Integer IntegerDigits IntegerExponent IntegerLength IntegerPart IntegerPartitions IntegerQ Integers IntegerString Integral Integrate Interactive InteractiveTradingChart Interlaced Interleaving InternallyBalancedDecomposition InterpolatingFunction InterpolatingPolynomial Interpolation InterpolationOrder InterpolationPoints InterpolationPrecision Interpretation InterpretationBox InterpretationBoxOptions InterpretationFunction ' + 'InterpretTemplate InterquartileRange Interrupt InterruptSettings Intersection Interval IntervalIntersection IntervalMemberQ IntervalUnion Inverse InverseBetaRegularized InverseCDF InverseChiSquareDistribution InverseContinuousWaveletTransform InverseDistanceTransform InverseEllipticNomeQ InverseErf InverseErfc InverseFourier InverseFourierCosTransform InverseFourierSequenceTransform InverseFourierSinTransform InverseFourierTransform InverseFunction InverseFunctions InverseGammaDistribution InverseGammaRegularized InverseGaussianDistribution InverseGudermannian InverseHaversine InverseJacobiCD InverseJacobiCN InverseJacobiCS InverseJacobiDC InverseJacobiDN InverseJacobiDS InverseJacobiNC InverseJacobiND InverseJacobiNS InverseJacobiSC InverseJacobiSD InverseJacobiSN InverseLaplaceTransform InversePermutation InverseRadon InverseSeries InverseSurvivalFunction InverseWaveletTransform InverseWeierstrassP InverseZTransform Invisible InvisibleApplication InvisibleTimes IrreduciblePolynomialQ IsolatingInterval IsomorphicGraphQ IsotopeData Italic Item ItemBox ItemBoxOptions ItemSize ItemStyle ItoProcess ' + 'JaccardDissimilarity JacobiAmplitude Jacobian JacobiCD JacobiCN JacobiCS JacobiDC JacobiDN JacobiDS JacobiNC JacobiND JacobiNS JacobiP JacobiSC JacobiSD JacobiSN JacobiSymbol JacobiZeta JankoGroupJ1 JankoGroupJ2 JankoGroupJ3 JankoGroupJ4 JarqueBeraALMTest JohnsonDistribution Join Joined JoinedCurve JoinedCurveBox JoinForm JordanDecomposition JordanModelDecomposition ' + 'K KagiChart KaiserBesselWindow KaiserWindow KalmanEstimator KalmanFilter KarhunenLoeveDecomposition KaryTree KatzCentrality KCoreComponents KDistribution KelvinBei KelvinBer KelvinKei KelvinKer KendallTau KendallTauTest KernelExecute KernelMixtureDistribution KernelObject Kernels Ket Khinchin KirchhoffGraph KirchhoffMatrix KleinInvariantJ KnightTourGraph KnotData KnownUnitQ KolmogorovSmirnovTest KroneckerDelta KroneckerModelDecomposition KroneckerProduct KroneckerSymbol KuiperTest KumaraswamyDistribution Kurtosis KuwaharaFilter ' + 'Label Labeled LabeledSlider LabelingFunction LabelStyle LaguerreL LambdaComponents LambertW LanczosWindow LandauDistribution Language LanguageCategory LaplaceDistribution LaplaceTransform Laplacian LaplacianFilter LaplacianGaussianFilter Large Larger Last Latitude LatitudeLongitude LatticeData LatticeReduce Launch LaunchKernels LayeredGraphPlot LayerSizeFunction LayoutInformation LCM LeafCount LeapYearQ LeastSquares LeastSquaresFilterKernel Left LeftArrow LeftArrowBar LeftArrowRightArrow LeftDownTeeVector LeftDownVector LeftDownVectorBar LeftRightArrow LeftRightVector LeftTee LeftTeeArrow LeftTeeVector LeftTriangle LeftTriangleBar LeftTriangleEqual LeftUpDownVector LeftUpTeeVector LeftUpVector LeftUpVectorBar LeftVector LeftVectorBar LegendAppearance Legended LegendFunction LegendLabel LegendLayout LegendMargins LegendMarkers LegendMarkerSize LegendreP LegendreQ LegendreType Length LengthWhile LerchPhi Less LessEqual LessEqualGreater LessFullEqual LessGreater LessLess LessSlantEqual LessTilde LetterCharacter LetterQ Level LeveneTest LeviCivitaTensor LevyDistribution Lexicographic LibraryFunction LibraryFunctionError LibraryFunctionInformation LibraryFunctionLoad LibraryFunctionUnload LibraryLoad LibraryUnload LicenseID LiftingFilterData LiftingWaveletTransform LightBlue LightBrown LightCyan Lighter LightGray LightGreen Lighting LightingAngle LightMagenta LightOrange LightPink LightPurple LightRed LightSources LightYellow Likelihood Limit LimitsPositioning LimitsPositioningTokens LindleyDistribution Line Line3DBox LinearFilter LinearFractionalTransform LinearModelFit LinearOffsetFunction LinearProgramming LinearRecurrence LinearSolve LinearSolveFunction LineBox LineBreak LinebreakAdjustments LineBreakChart LineBreakWithin LineColor LineForm LineGraph LineIndent LineIndentMaxFraction LineIntegralConvolutionPlot LineIntegralConvolutionScale LineLegend LineOpacity LineSpacing LineWrapParts LinkActivate LinkClose LinkConnect LinkConnectedQ LinkCreate LinkError LinkFlush LinkFunction LinkHost LinkInterrupt LinkLaunch LinkMode LinkObject LinkOpen LinkOptions LinkPatterns LinkProtocol LinkRead LinkReadHeld LinkReadyQ Links LinkWrite LinkWriteHeld LiouvilleLambda List Listable ListAnimate ListContourPlot ListContourPlot3D ListConvolve ListCorrelate ListCurvePathPlot ListDeconvolve ListDensityPlot Listen ListFourierSequenceTransform ListInterpolation ListLineIntegralConvolutionPlot ListLinePlot ListLogLinearPlot ListLogLogPlot ListLogPlot ListPicker ListPickerBox ListPickerBoxBackground ListPickerBoxOptions ListPlay ListPlot ListPlot3D ListPointPlot3D ListPolarPlot ListQ ListStreamDensityPlot ListStreamPlot ListSurfacePlot3D ListVectorDensityPlot ListVectorPlot ListVectorPlot3D ListZTransform Literal LiteralSearch LocalClusteringCoefficient LocalizeVariables LocationEquivalenceTest LocationTest Locator LocatorAutoCreate LocatorBox LocatorBoxOptions LocatorCentering LocatorPane LocatorPaneBox LocatorPaneBoxOptions ' + 'LocatorRegion Locked Log Log10 Log2 LogBarnesG LogGamma LogGammaDistribution LogicalExpand LogIntegral LogisticDistribution LogitModelFit LogLikelihood LogLinearPlot LogLogisticDistribution LogLogPlot LogMultinormalDistribution LogNormalDistribution LogPlot LogRankTest LogSeriesDistribution LongEqual Longest LongestAscendingSequence LongestCommonSequence LongestCommonSequencePositions LongestCommonSubsequence LongestCommonSubsequencePositions LongestMatch LongForm Longitude LongLeftArrow LongLeftRightArrow LongRightArrow Loopback LoopFreeGraphQ LowerCaseQ LowerLeftArrow LowerRightArrow LowerTriangularize LowpassFilter LQEstimatorGains LQGRegulator LQOutputRegulatorGains LQRegulatorGains LUBackSubstitution LucasL LuccioSamiComponents LUDecomposition LyapunovSolve LyonsGroupLy ' + 'MachineID MachineName MachineNumberQ MachinePrecision MacintoshSystemPageSetup Magenta Magnification Magnify MainSolve MaintainDynamicCaches Majority MakeBoxes MakeExpression MakeRules MangoldtLambda ManhattanDistance Manipulate Manipulator MannWhitneyTest MantissaExponent Manual Map MapAll MapAt MapIndexed MAProcess MapThread MarcumQ MardiaCombinedTest MardiaKurtosisTest MardiaSkewnessTest MarginalDistribution MarkovProcessProperties Masking MatchingDissimilarity MatchLocalNameQ MatchLocalNames MatchQ Material MathematicaNotation MathieuC MathieuCharacteristicA MathieuCharacteristicB MathieuCharacteristicExponent MathieuCPrime MathieuGroupM11 MathieuGroupM12 MathieuGroupM22 MathieuGroupM23 MathieuGroupM24 MathieuS MathieuSPrime MathMLForm MathMLText Matrices MatrixExp MatrixForm MatrixFunction MatrixLog MatrixPlot MatrixPower MatrixQ MatrixRank Max MaxBend MaxDetect MaxExtraBandwidths MaxExtraConditions MaxFeatures MaxFilter Maximize MaxIterations MaxMemoryUsed MaxMixtureKernels MaxPlotPoints MaxPoints MaxRecursion MaxStableDistribution MaxStepFraction MaxSteps MaxStepSize MaxValue MaxwellDistribution McLaughlinGroupMcL Mean MeanClusteringCoefficient MeanDegreeConnectivity MeanDeviation MeanFilter MeanGraphDistance MeanNeighborDegree MeanShift MeanShiftFilter Median MedianDeviation MedianFilter Medium MeijerG MeixnerDistribution MemberQ MemoryConstrained MemoryInUse Menu MenuAppearance MenuCommandKey MenuEvaluator MenuItem MenuPacket MenuSortingValue MenuStyle MenuView MergeDifferences Mesh MeshFunctions MeshRange MeshShading MeshStyle Message MessageDialog MessageList MessageName MessageOptions MessagePacket Messages MessagesNotebook MetaCharacters MetaInformation Method MethodOptions MexicanHatWavelet MeyerWavelet Min MinDetect MinFilter MinimalPolynomial MinimalStateSpaceModel Minimize Minors MinRecursion MinSize MinStableDistribution Minus MinusPlus MinValue Missing MissingDataMethod MittagLefflerE MixedRadix MixedRadixQuantity MixtureDistribution Mod Modal Mode Modular ModularLambda Module Modulus MoebiusMu Moment Momentary MomentConvert MomentEvaluate MomentGeneratingFunction Monday Monitor MonomialList MonomialOrder MonsterGroupM MorletWavelet MorphologicalBinarize MorphologicalBranchPoints MorphologicalComponents MorphologicalEulerNumber MorphologicalGraph MorphologicalPerimeter MorphologicalTransform Most MouseAnnotation MouseAppearance MouseAppearanceTag MouseButtons Mouseover MousePointerNote MousePosition MovingAverage MovingMedian MoyalDistribution MultiedgeStyle MultilaunchWarning MultiLetterItalics MultiLetterStyle MultilineFunction Multinomial MultinomialDistribution MultinormalDistribution MultiplicativeOrder Multiplicity Multiselection MultivariateHypergeometricDistribution MultivariatePoissonDistribution MultivariateTDistribution ' + 'N NakagamiDistribution NameQ Names NamespaceBox Nand NArgMax NArgMin NBernoulliB NCache NDSolve NDSolveValue Nearest NearestFunction NeedCurrentFrontEndPackagePacket NeedCurrentFrontEndSymbolsPacket NeedlemanWunschSimilarity Needs Negative NegativeBinomialDistribution NegativeMultinomialDistribution NeighborhoodGraph Nest NestedGreaterGreater NestedLessLess NestedScriptRules NestList NestWhile NestWhileList NevilleThetaC NevilleThetaD NevilleThetaN NevilleThetaS NewPrimitiveStyle NExpectation Next NextPrime NHoldAll NHoldFirst NHoldRest NicholsGridLines NicholsPlot NIntegrate NMaximize NMaxValue NMinimize NMinValue NominalVariables NonAssociative NoncentralBetaDistribution NoncentralChiSquareDistribution NoncentralFRatioDistribution NoncentralStudentTDistribution NonCommutativeMultiply NonConstants None NonlinearModelFit NonlocalMeansFilter NonNegative NonPositive Nor NorlundB Norm Normal NormalDistribution NormalGrouping Normalize NormalizedSquaredEuclideanDistance NormalsFunction NormFunction Not NotCongruent NotCupCap NotDoubleVerticalBar Notebook NotebookApply NotebookAutoSave NotebookClose NotebookConvertSettings NotebookCreate NotebookCreateReturnObject NotebookDefault NotebookDelete NotebookDirectory NotebookDynamicExpression NotebookEvaluate NotebookEventActions NotebookFileName NotebookFind NotebookFindReturnObject NotebookGet NotebookGetLayoutInformationPacket NotebookGetMisspellingsPacket NotebookInformation NotebookInterfaceObject NotebookLocate NotebookObject NotebookOpen NotebookOpenReturnObject NotebookPath NotebookPrint NotebookPut NotebookPutReturnObject NotebookRead NotebookResetGeneratedCells Notebooks NotebookSave NotebookSaveAs NotebookSelection NotebookSetupLayoutInformationPacket NotebooksMenu NotebookWrite NotElement NotEqualTilde NotExists NotGreater NotGreaterEqual NotGreaterFullEqual NotGreaterGreater NotGreaterLess NotGreaterSlantEqual NotGreaterTilde NotHumpDownHump NotHumpEqual NotLeftTriangle NotLeftTriangleBar NotLeftTriangleEqual NotLess NotLessEqual NotLessFullEqual NotLessGreater NotLessLess NotLessSlantEqual NotLessTilde NotNestedGreaterGreater NotNestedLessLess NotPrecedes NotPrecedesEqual NotPrecedesSlantEqual NotPrecedesTilde NotReverseElement NotRightTriangle NotRightTriangleBar NotRightTriangleEqual NotSquareSubset NotSquareSubsetEqual NotSquareSuperset NotSquareSupersetEqual NotSubset NotSubsetEqual NotSucceeds NotSucceedsEqual NotSucceedsSlantEqual NotSucceedsTilde NotSuperset NotSupersetEqual NotTilde NotTildeEqual NotTildeFullEqual NotTildeTilde NotVerticalBar NProbability NProduct NProductFactors NRoots NSolve NSum NSumTerms Null NullRecords NullSpace NullWords Number NumberFieldClassNumber NumberFieldDiscriminant NumberFieldFundamentalUnits NumberFieldIntegralBasis NumberFieldNormRepresentatives NumberFieldRegulator NumberFieldRootsOfUnity NumberFieldSignature NumberForm NumberFormat NumberMarks NumberMultiplier NumberPadding NumberPoint NumberQ NumberSeparator ' + 'NumberSigns NumberString Numerator NumericFunction NumericQ NuttallWindow NValues NyquistGridLines NyquistPlot ' + 'O ObservabilityGramian ObservabilityMatrix ObservableDecomposition ObservableModelQ OddQ Off Offset OLEData On ONanGroupON OneIdentity Opacity Open OpenAppend Opener OpenerBox OpenerBoxOptions OpenerView OpenFunctionInspectorPacket Opening OpenRead OpenSpecialOptions OpenTemporary OpenWrite Operate OperatingSystem OptimumFlowData Optional OptionInspectorSettings OptionQ Options OptionsPacket OptionsPattern OptionValue OptionValueBox OptionValueBoxOptions Or Orange Order OrderDistribution OrderedQ Ordering Orderless OrnsteinUhlenbeckProcess Orthogonalize Out Outer OutputAutoOverwrite OutputControllabilityMatrix OutputControllableModelQ OutputForm OutputFormData OutputGrouping OutputMathEditExpression OutputNamePacket OutputResponse OutputSizeLimit OutputStream Over OverBar OverDot Overflow OverHat Overlaps Overlay OverlayBox OverlayBoxOptions Overscript OverscriptBox OverscriptBoxOptions OverTilde OverVector OwenT OwnValues ' + 'PackingMethod PaddedForm Padding PadeApproximant PadLeft PadRight PageBreakAbove PageBreakBelow PageBreakWithin PageFooterLines PageFooters PageHeaderLines PageHeaders PageHeight PageRankCentrality PageWidth PairedBarChart PairedHistogram PairedSmoothHistogram PairedTTest PairedZTest PaletteNotebook PalettePath Pane PaneBox PaneBoxOptions Panel PanelBox PanelBoxOptions Paneled PaneSelector PaneSelectorBox PaneSelectorBoxOptions PaperWidth ParabolicCylinderD ParagraphIndent ParagraphSpacing ParallelArray ParallelCombine ParallelDo ParallelEvaluate Parallelization Parallelize ParallelMap ParallelNeeds ParallelProduct ParallelSubmit ParallelSum ParallelTable ParallelTry Parameter ParameterEstimator ParameterMixtureDistribution ParameterVariables ParametricFunction ParametricNDSolve ParametricNDSolveValue ParametricPlot ParametricPlot3D ParentConnect ParentDirectory ParentForm Parenthesize ParentList ParetoDistribution Part PartialCorrelationFunction PartialD ParticleData Partition PartitionsP PartitionsQ ParzenWindow PascalDistribution PassEventsDown PassEventsUp Paste PasteBoxFormInlineCells PasteButton Path PathGraph PathGraphQ Pattern PatternSequence PatternTest PauliMatrix PaulWavelet Pause PausedTime PDF PearsonChiSquareTest PearsonCorrelationTest PearsonDistribution PerformanceGoal PeriodicInterpolation Periodogram PeriodogramArray PermutationCycles PermutationCyclesQ PermutationGroup PermutationLength PermutationList PermutationListQ PermutationMax PermutationMin PermutationOrder PermutationPower PermutationProduct PermutationReplace Permutations PermutationSupport Permute PeronaMalikFilter Perpendicular PERTDistribution PetersenGraph PhaseMargins Pi Pick PIDData PIDDerivativeFilter PIDFeedforward PIDTune Piecewise PiecewiseExpand PieChart PieChart3D PillaiTrace PillaiTraceTest Pink Pivoting PixelConstrained PixelValue PixelValuePositions Placed Placeholder PlaceholderReplace Plain PlanarGraphQ Play PlayRange Plot Plot3D Plot3Matrix PlotDivision PlotJoined PlotLabel PlotLayout PlotLegends PlotMarkers PlotPoints PlotRange PlotRangeClipping PlotRangePadding PlotRegion PlotStyle Plus PlusMinus Pochhammer PodStates PodWidth Point Point3DBox PointBox PointFigureChart PointForm PointLegend PointSize PoissonConsulDistribution PoissonDistribution PoissonProcess PoissonWindow PolarAxes PolarAxesOrigin PolarGridLines PolarPlot PolarTicks PoleZeroMarkers PolyaAeppliDistribution PolyGamma Polygon Polygon3DBox Polygon3DBoxOptions PolygonBox PolygonBoxOptions PolygonHoleScale PolygonIntersections PolygonScale PolyhedronData PolyLog PolynomialExtendedGCD PolynomialForm PolynomialGCD PolynomialLCM PolynomialMod PolynomialQ PolynomialQuotient PolynomialQuotientRemainder PolynomialReduce PolynomialRemainder Polynomials PopupMenu PopupMenuBox PopupMenuBoxOptions PopupView PopupWindow Position Positive PositiveDefiniteMatrixQ PossibleZeroQ Postfix PostScript Power PowerDistribution PowerExpand PowerMod PowerModList ' + 'PowerSpectralDensity PowersRepresentations PowerSymmetricPolynomial Precedence PrecedenceForm Precedes PrecedesEqual PrecedesSlantEqual PrecedesTilde Precision PrecisionGoal PreDecrement PredictionRoot PreemptProtect PreferencesPath Prefix PreIncrement Prepend PrependTo PreserveImageOptions Previous PriceGraphDistribution PrimaryPlaceholder Prime PrimeNu PrimeOmega PrimePi PrimePowerQ PrimeQ Primes PrimeZetaP PrimitiveRoot PrincipalComponents PrincipalValue Print PrintAction PrintForm PrintingCopies PrintingOptions PrintingPageRange PrintingStartingPageNumber PrintingStyleEnvironment PrintPrecision PrintTemporary Prism PrismBox PrismBoxOptions PrivateCellOptions PrivateEvaluationOptions PrivateFontOptions PrivateFrontEndOptions PrivateNotebookOptions PrivatePaths Probability ProbabilityDistribution ProbabilityPlot ProbabilityPr ProbabilityScalePlot ProbitModelFit ProcessEstimator ProcessParameterAssumptions ProcessParameterQ ProcessStateDomain ProcessTimeDomain Product ProductDistribution ProductLog ProgressIndicator ProgressIndicatorBox ProgressIndicatorBoxOptions Projection Prolog PromptForm Properties Property PropertyList PropertyValue Proportion Proportional Protect Protected ProteinData Pruning PseudoInverse Purple Put PutAppend Pyramid PyramidBox PyramidBoxOptions ' + 'QBinomial QFactorial QGamma QHypergeometricPFQ QPochhammer QPolyGamma QRDecomposition QuadraticIrrationalQ Quantile QuantilePlot Quantity QuantityForm QuantityMagnitude QuantityQ QuantityUnit Quartics QuartileDeviation Quartiles QuartileSkewness QueueingNetworkProcess QueueingProcess QueueProperties Quiet Quit Quotient QuotientRemainder ' + 'RadialityCentrality RadicalBox RadicalBoxOptions RadioButton RadioButtonBar RadioButtonBox RadioButtonBoxOptions Radon RamanujanTau RamanujanTauL RamanujanTauTheta RamanujanTauZ Random RandomChoice RandomComplex RandomFunction RandomGraph RandomImage RandomInteger RandomPermutation RandomPrime RandomReal RandomSample RandomSeed RandomVariate RandomWalkProcess Range RangeFilter RangeSpecification RankedMax RankedMin Raster Raster3D Raster3DBox Raster3DBoxOptions RasterArray RasterBox RasterBoxOptions Rasterize RasterSize Rational RationalFunctions Rationalize Rationals Ratios Raw RawArray RawBoxes RawData RawMedium RayleighDistribution Re Read ReadList ReadProtected Real RealBlockDiagonalForm RealDigits RealExponent Reals Reap Record RecordLists RecordSeparators Rectangle RectangleBox RectangleBoxOptions RectangleChart RectangleChart3D RecurrenceFilter RecurrenceTable RecurringDigitsForm Red Reduce RefBox ReferenceLineStyle ReferenceMarkers ReferenceMarkerStyle Refine ReflectionMatrix ReflectionTransform Refresh RefreshRate RegionBinarize RegionFunction RegionPlot RegionPlot3D RegularExpression Regularization Reinstall Release ReleaseHold ReliabilityDistribution ReliefImage ReliefPlot Remove RemoveAlphaChannel RemoveAsynchronousTask Removed RemoveInputStreamMethod RemoveOutputStreamMethod RemoveProperty RemoveScheduledTask RenameDirectory RenameFile RenderAll RenderingOptions RenewalProcess RenkoChart Repeated RepeatedNull RepeatedString Replace ReplaceAll ReplaceHeldPart ReplaceImageValue ReplaceList ReplacePart ReplacePixelValue ReplaceRepeated Resampling Rescale RescalingTransform ResetDirectory ResetMenusPacket ResetScheduledTask Residue Resolve Rest Resultant ResumePacket Return ReturnExpressionPacket ReturnInputFormPacket ReturnPacket ReturnTextPacket Reverse ReverseBiorthogonalSplineWavelet ReverseElement ReverseEquilibrium ReverseGraph ReverseUpEquilibrium RevolutionAxis RevolutionPlot3D RGBColor RiccatiSolve RiceDistribution RidgeFilter RiemannR RiemannSiegelTheta RiemannSiegelZ Riffle Right RightArrow RightArrowBar RightArrowLeftArrow RightCosetRepresentative RightDownTeeVector RightDownVector RightDownVectorBar RightTee RightTeeArrow RightTeeVector RightTriangle RightTriangleBar RightTriangleEqual RightUpDownVector RightUpTeeVector RightUpVector RightUpVectorBar RightVector RightVectorBar RiskAchievementImportance RiskReductionImportance RogersTanimotoDissimilarity Root RootApproximant RootIntervals RootLocusPlot RootMeanSquare RootOfUnityQ RootReduce Roots RootSum Rotate RotateLabel RotateLeft RotateRight RotationAction RotationBox RotationBoxOptions RotationMatrix RotationTransform Round RoundImplies RoundingRadius Row RowAlignments RowBackgrounds RowBox RowHeights RowLines RowMinHeight RowReduce RowsEqual RowSpacings RSolve RudvalisGroupRu Rule RuleCondition RuleDelayed RuleForm RulerUnits Run RunScheduledTask RunThrough RuntimeAttributes RuntimeOptions RussellRaoDissimilarity ' + 'SameQ SameTest SampleDepth SampledSoundFunction SampledSoundList SampleRate SamplingPeriod SARIMAProcess SARMAProcess SatisfiabilityCount SatisfiabilityInstances SatisfiableQ Saturday Save Saveable SaveAutoDelete SaveDefinitions SawtoothWave Scale Scaled ScaleDivisions ScaledMousePosition ScaleOrigin ScalePadding ScaleRanges ScaleRangeStyle ScalingFunctions ScalingMatrix ScalingTransform Scan ScheduledTaskActiveQ ScheduledTaskData ScheduledTaskObject ScheduledTasks SchurDecomposition ScientificForm ScreenRectangle ScreenStyleEnvironment ScriptBaselineShifts ScriptLevel ScriptMinSize ScriptRules ScriptSizeMultipliers Scrollbars ScrollingOptions ScrollPosition Sec Sech SechDistribution SectionGrouping SectorChart SectorChart3D SectorOrigin SectorSpacing SeedRandom Select Selectable SelectComponents SelectedCells SelectedNotebook Selection SelectionAnimate SelectionCell SelectionCellCreateCell SelectionCellDefaultStyle SelectionCellParentStyle SelectionCreateCell SelectionDebuggerTag SelectionDuplicateCell SelectionEvaluate SelectionEvaluateCreateCell SelectionMove SelectionPlaceholder SelectionSetStyle SelectWithContents SelfLoops SelfLoopStyle SemialgebraicComponentInstances SendMail Sequence SequenceAlignment SequenceForm SequenceHold SequenceLimit Series SeriesCoefficient SeriesData SessionTime Set SetAccuracy SetAlphaChannel SetAttributes Setbacks SetBoxFormNamesPacket SetDelayed SetDirectory SetEnvironment SetEvaluationNotebook SetFileDate SetFileLoadingContext SetNotebookStatusLine SetOptions SetOptionsPacket SetPrecision SetProperty SetSelectedNotebook SetSharedFunction SetSharedVariable SetSpeechParametersPacket SetStreamPosition SetSystemOptions Setter SetterBar SetterBox SetterBoxOptions Setting SetValue Shading Shallow ShannonWavelet ShapiroWilkTest Share Sharpen ShearingMatrix ShearingTransform ShenCastanMatrix Short ShortDownArrow Shortest ShortestMatch ShortestPathFunction ShortLeftArrow ShortRightArrow ShortUpArrow Show ShowAutoStyles ShowCellBracket ShowCellLabel ShowCellTags ShowClosedCellArea ShowContents ShowControls ShowCursorTracker ShowGroupOpenCloseIcon ShowGroupOpener ShowInvisibleCharacters ShowPageBreaks ShowPredictiveInterface ShowSelection ShowShortBoxForm ShowSpecialCharacters ShowStringCharacters ShowSyntaxStyles ShrinkingDelay ShrinkWrapBoundingBox SiegelTheta SiegelTukeyTest Sign Signature SignedRankTest SignificanceLevel SignPadding SignTest SimilarityRules SimpleGraph SimpleGraphQ Simplify Sin Sinc SinghMaddalaDistribution SingleEvaluation SingleLetterItalics SingleLetterStyle SingularValueDecomposition SingularValueList SingularValuePlot SingularValues Sinh SinhIntegral SinIntegral SixJSymbol Skeleton SkeletonTransform SkellamDistribution Skewness SkewNormalDistribution Skip SliceDistribution Slider Slider2D Slider2DBox Slider2DBoxOptions SliderBox SliderBoxOptions SlideView Slot SlotSequence Small SmallCircle Smaller SmithDelayCompensator SmithWatermanSimilarity ' + 'SmoothDensityHistogram SmoothHistogram SmoothHistogram3D SmoothKernelDistribution SocialMediaData Socket SokalSneathDissimilarity Solve SolveAlways SolveDelayed Sort SortBy Sound SoundAndGraphics SoundNote SoundVolume Sow Space SpaceForm Spacer Spacings Span SpanAdjustments SpanCharacterRounding SpanFromAbove SpanFromBoth SpanFromLeft SpanLineThickness SpanMaxSize SpanMinSize SpanningCharacters SpanSymmetric SparseArray SpatialGraphDistribution Speak SpeakTextPacket SpearmanRankTest SpearmanRho Spectrogram SpectrogramArray Specularity SpellingCorrection SpellingDictionaries SpellingDictionariesPath SpellingOptions SpellingSuggestionsPacket Sphere SphereBox SphericalBesselJ SphericalBesselY SphericalHankelH1 SphericalHankelH2 SphericalHarmonicY SphericalPlot3D SphericalRegion SpheroidalEigenvalue SpheroidalJoiningFactor SpheroidalPS SpheroidalPSPrime SpheroidalQS SpheroidalQSPrime SpheroidalRadialFactor SpheroidalS1 SpheroidalS1Prime SpheroidalS2 SpheroidalS2Prime Splice SplicedDistribution SplineClosed SplineDegree SplineKnots SplineWeights Split SplitBy SpokenString Sqrt SqrtBox SqrtBoxOptions Square SquaredEuclideanDistance SquareFreeQ SquareIntersection SquaresR SquareSubset SquareSubsetEqual SquareSuperset SquareSupersetEqual SquareUnion SquareWave StabilityMargins StabilityMarginsStyle StableDistribution Stack StackBegin StackComplete StackInhibit StandardDeviation StandardDeviationFilter StandardForm Standardize StandbyDistribution Star StarGraph StartAsynchronousTask StartingStepSize StartOfLine StartOfString StartScheduledTask StartupSound StateDimensions StateFeedbackGains StateOutputEstimator StateResponse StateSpaceModel StateSpaceRealization StateSpaceTransform StationaryDistribution StationaryWaveletPacketTransform StationaryWaveletTransform StatusArea StatusCentrality StepMonitor StieltjesGamma StirlingS1 StirlingS2 StopAsynchronousTask StopScheduledTask StrataVariables StratonovichProcess StreamColorFunction StreamColorFunctionScaling StreamDensityPlot StreamPlot StreamPoints StreamPosition Streams StreamScale StreamStyle String StringBreak StringByteCount StringCases StringCount StringDrop StringExpression StringForm StringFormat StringFreeQ StringInsert StringJoin StringLength StringMatchQ StringPosition StringQ StringReplace StringReplaceList StringReplacePart StringReverse StringRotateLeft StringRotateRight StringSkeleton StringSplit StringTake StringToStream StringTrim StripBoxes StripOnInput StripWrapperBoxes StrokeForm StructuralImportance StructuredArray StructuredSelection StruveH StruveL Stub StudentTDistribution Style StyleBox StyleBoxAutoDelete StyleBoxOptions StyleData StyleDefinitions StyleForm StyleKeyMapping StyleMenuListing StyleNameDialogSettings StyleNames StylePrint StyleSheetPath Subfactorial Subgraph SubMinus SubPlus SubresultantPolynomialRemainders ' + 'SubresultantPolynomials Subresultants Subscript SubscriptBox SubscriptBoxOptions Subscripted Subset SubsetEqual Subsets SubStar Subsuperscript SubsuperscriptBox SubsuperscriptBoxOptions Subtract SubtractFrom SubValues Succeeds SucceedsEqual SucceedsSlantEqual SucceedsTilde SuchThat Sum SumConvergence Sunday SuperDagger SuperMinus SuperPlus Superscript SuperscriptBox SuperscriptBoxOptions Superset SupersetEqual SuperStar Surd SurdForm SurfaceColor SurfaceGraphics SurvivalDistribution SurvivalFunction SurvivalModel SurvivalModelFit SuspendPacket SuzukiDistribution SuzukiGroupSuz SwatchLegend Switch Symbol SymbolName SymletWavelet Symmetric SymmetricGroup SymmetricMatrixQ SymmetricPolynomial SymmetricReduction Symmetrize SymmetrizedArray SymmetrizedArrayRules SymmetrizedDependentComponents SymmetrizedIndependentComponents SymmetrizedReplacePart SynchronousInitialization SynchronousUpdating Syntax SyntaxForm SyntaxInformation SyntaxLength SyntaxPacket SyntaxQ SystemDialogInput SystemException SystemHelpPath SystemInformation SystemInformationData SystemOpen SystemOptions SystemsModelDelay SystemsModelDelayApproximate SystemsModelDelete SystemsModelDimensions SystemsModelExtract SystemsModelFeedbackConnect SystemsModelLabels SystemsModelOrder SystemsModelParallelConnect SystemsModelSeriesConnect SystemsModelStateFeedbackConnect SystemStub ' + 'Tab TabFilling Table TableAlignments TableDepth TableDirections TableForm TableHeadings TableSpacing TableView TableViewBox TabSpacings TabView TabViewBox TabViewBoxOptions TagBox TagBoxNote TagBoxOptions TaggingRules TagSet TagSetDelayed TagStyle TagUnset Take TakeWhile Tally Tan Tanh TargetFunctions TargetUnits TautologyQ TelegraphProcess TemplateBox TemplateBoxOptions TemplateSlotSequence TemporalData Temporary TemporaryVariable TensorContract TensorDimensions TensorExpand TensorProduct TensorQ TensorRank TensorReduce TensorSymmetry TensorTranspose TensorWedge Tetrahedron TetrahedronBox TetrahedronBoxOptions TeXForm TeXSave Text Text3DBox Text3DBoxOptions TextAlignment TextBand TextBoundingBox TextBox TextCell TextClipboardType TextData TextForm TextJustification TextLine TextPacket TextParagraph TextRecognize TextRendering TextStyle Texture TextureCoordinateFunction TextureCoordinateScaling Therefore ThermometerGauge Thick Thickness Thin Thinning ThisLink ThompsonGroupTh Thread ThreeJSymbol Threshold Through Throw Thumbnail Thursday Ticks TicksStyle Tilde TildeEqual TildeFullEqual TildeTilde TimeConstrained TimeConstraint Times TimesBy TimeSeriesForecast TimeSeriesInvertibility TimeUsed TimeValue TimeZone Timing Tiny TitleGrouping TitsGroupT ToBoxes ToCharacterCode ToColor ToContinuousTimeModel ToDate ToDiscreteTimeModel ToeplitzMatrix ToExpression ToFileName Together Toggle ToggleFalse Toggler TogglerBar TogglerBox TogglerBoxOptions ToHeldExpression ToInvertibleTimeSeries TokenWords Tolerance ToLowerCase ToNumberField TooBig Tooltip TooltipBox TooltipBoxOptions TooltipDelay TooltipStyle Top TopHatTransform TopologicalSort ToRadicals ToRules ToString Total TotalHeight TotalVariationFilter TotalWidth TouchscreenAutoZoom TouchscreenControlPlacement ToUpperCase Tr Trace TraceAbove TraceAction TraceBackward TraceDepth TraceDialog TraceForward TraceInternal TraceLevel TraceOff TraceOn TraceOriginal TracePrint TraceScan TrackedSymbols TradingChart TraditionalForm TraditionalFunctionNotation TraditionalNotation TraditionalOrder TransferFunctionCancel TransferFunctionExpand TransferFunctionFactor TransferFunctionModel TransferFunctionPoles TransferFunctionTransform TransferFunctionZeros TransformationFunction TransformationFunctions TransformationMatrix TransformedDistribution TransformedField Translate TranslationTransform TransparentColor Transpose TreeForm TreeGraph TreeGraphQ TreePlot TrendStyle TriangleWave TriangularDistribution Trig TrigExpand TrigFactor TrigFactorList Trigger TrigReduce TrigToExp TrimmedMean True TrueQ TruncatedDistribution TsallisQExponentialDistribution TsallisQGaussianDistribution TTest Tube TubeBezierCurveBox TubeBezierCurveBoxOptions TubeBox TubeBSplineCurveBox TubeBSplineCurveBoxOptions Tuesday TukeyLambdaDistribution TukeyWindow Tuples TuranGraph TuringMachine ' + 'Transparent ' + 'UnateQ Uncompress Undefined UnderBar Underflow Underlined Underoverscript UnderoverscriptBox UnderoverscriptBoxOptions Underscript UnderscriptBox UnderscriptBoxOptions UndirectedEdge UndirectedGraph UndirectedGraphQ UndocumentedTestFEParserPacket UndocumentedTestGetSelectionPacket Unequal Unevaluated UniformDistribution UniformGraphDistribution UniformSumDistribution Uninstall Union UnionPlus Unique UnitBox UnitConvert UnitDimensions Unitize UnitRootTest UnitSimplify UnitStep UnitTriangle UnitVector Unprotect UnsameQ UnsavedVariables Unset UnsetShared UntrackedVariables Up UpArrow UpArrowBar UpArrowDownArrow Update UpdateDynamicObjects UpdateDynamicObjectsSynchronous UpdateInterval UpDownArrow UpEquilibrium UpperCaseQ UpperLeftArrow UpperRightArrow UpperTriangularize Upsample UpSet UpSetDelayed UpTee UpTeeArrow UpValues URL URLFetch URLFetchAsynchronous URLSave URLSaveAsynchronous UseGraphicsRange Using UsingFrontEnd ' + 'V2Get ValidationLength Value ValueBox ValueBoxOptions ValueForm ValueQ ValuesData Variables Variance VarianceEquivalenceTest VarianceEstimatorFunction VarianceGammaDistribution VarianceTest VectorAngle VectorColorFunction VectorColorFunctionScaling VectorDensityPlot VectorGlyphData VectorPlot VectorPlot3D VectorPoints VectorQ Vectors VectorScale VectorStyle Vee Verbatim Verbose VerboseConvertToPostScriptPacket VerifyConvergence VerifySolutions VerifyTestAssumptions Version VersionNumber VertexAdd VertexCapacity VertexColors VertexComponent VertexConnectivity VertexCoordinateRules VertexCoordinates VertexCorrelationSimilarity VertexCosineSimilarity VertexCount VertexCoverQ VertexDataCoordinates VertexDegree VertexDelete VertexDiceSimilarity VertexEccentricity VertexInComponent VertexInDegree VertexIndex VertexJaccardSimilarity VertexLabeling VertexLabels VertexLabelStyle VertexList VertexNormals VertexOutComponent VertexOutDegree VertexQ VertexRenderingFunction VertexReplace VertexShape VertexShapeFunction VertexSize VertexStyle VertexTextureCoordinates VertexWeight Vertical VerticalBar VerticalForm VerticalGauge VerticalSeparator VerticalSlider VerticalTilde ViewAngle ViewCenter ViewMatrix ViewPoint ViewPointSelectorSettings ViewPort ViewRange ViewVector ViewVertical VirtualGroupData Visible VisibleCell VoigtDistribution VonMisesDistribution ' + 'WaitAll WaitAsynchronousTask WaitNext WaitUntil WakebyDistribution WalleniusHypergeometricDistribution WaringYuleDistribution WatershedComponents WatsonUSquareTest WattsStrogatzGraphDistribution WaveletBestBasis WaveletFilterCoefficients WaveletImagePlot WaveletListPlot WaveletMapIndexed WaveletMatrixPlot WaveletPhi WaveletPsi WaveletScale WaveletScalogram WaveletThreshold WeaklyConnectedComponents WeaklyConnectedGraphQ WeakStationarity WeatherData WeberE Wedge Wednesday WeibullDistribution WeierstrassHalfPeriods WeierstrassInvariants WeierstrassP WeierstrassPPrime WeierstrassSigma WeierstrassZeta WeightedAdjacencyGraph WeightedAdjacencyMatrix WeightedData WeightedGraphQ Weights WelchWindow WheelGraph WhenEvent Which While White Whitespace WhitespaceCharacter WhittakerM WhittakerW WienerFilter WienerProcess WignerD WignerSemicircleDistribution WilksW WilksWTest WindowClickSelect WindowElements WindowFloating WindowFrame WindowFrameElements WindowMargins WindowMovable WindowOpacity WindowSelected WindowSize WindowStatusArea WindowTitle WindowToolbars WindowWidth With WolframAlpha WolframAlphaDate WolframAlphaQuantity WolframAlphaResult Word WordBoundary WordCharacter WordData WordSearch WordSeparators WorkingPrecision Write WriteString Wronskian ' + 'XMLElement XMLObject Xnor Xor ' + 'Yellow YuleDissimilarity ' + 'ZernikeR ZeroSymmetric ZeroTest ZeroWidthTimes Zeta ZetaZero ZipfDistribution ZTest ZTransform ' + '$Aborted $ActivationGroupID $ActivationKey $ActivationUserRegistered $AddOnsDirectory $AssertFunction $Assumptions $AsynchronousTask $BaseDirectory $BatchInput $BatchOutput $BoxForms $ByteOrdering $Canceled $CharacterEncoding $CharacterEncodings $CommandLine $CompilationTarget $ConditionHold $ConfiguredKernels $Context $ContextPath $ControlActiveSetting $CreationDate $CurrentLink $DateStringFormat $DefaultFont $DefaultFrontEnd $DefaultImagingDevice $DefaultPath $Display $DisplayFunction $DistributedContexts $DynamicEvaluation $Echo $Epilog $ExportFormats $Failed $FinancialDataSource $FormatType $FrontEnd $FrontEndSession $GeoLocation $HistoryLength $HomeDirectory $HTTPCookies $IgnoreEOF $ImagingDevices $ImportFormats $InitialDirectory $Input $InputFileName $InputStreamMethods $Inspector $InstallationDate $InstallationDirectory $InterfaceEnvironment $IterationLimit $KernelCount $KernelID $Language $LaunchDirectory $LibraryPath $LicenseExpirationDate $LicenseID $LicenseProcesses $LicenseServer $LicenseSubprocesses $LicenseType $Line $Linked $LinkSupported $LoadedFiles $MachineAddresses $MachineDomain $MachineDomains $MachineEpsilon $MachineID $MachineName $MachinePrecision $MachineType $MaxExtraPrecision $MaxLicenseProcesses $MaxLicenseSubprocesses $MaxMachineNumber $MaxNumber $MaxPiecewiseCases $MaxPrecision $MaxRootDegree $MessageGroups $MessageList $MessagePrePrint $Messages $MinMachineNumber $MinNumber $MinorReleaseNumber $MinPrecision $ModuleNumber $NetworkLicense $NewMessage $NewSymbol $Notebooks $NumberMarks $Off $OperatingSystem $Output $OutputForms $OutputSizeLimit $OutputStreamMethods $Packages $ParentLink $ParentProcessID $PasswordFile $PatchLevelID $Path $PathnameSeparator $PerformanceGoal $PipeSupported $Post $Pre $PreferencesDirectory $PrePrint $PreRead $PrintForms $PrintLiteral $ProcessID $ProcessorCount $ProcessorType $ProductInformation $ProgramName $RandomState $RecursionLimit $ReleaseNumber $RootDirectory $ScheduledTask $ScriptCommandLine $SessionID $SetParentLink $SharedFunctions $SharedVariables $SoundDisplay $SoundDisplayFunction $SuppressInputFormHeads $SynchronousEvaluation $SyntaxHandler $System $SystemCharacterEncoding $SystemID $SystemWordLength $TemporaryDirectory $TemporaryPrefix $TextStyle $TimedOut $TimeUnit $TimeZone $TopDirectory $TraceOff $TraceOn $TracePattern $TracePostAction $TracePreAction $Urgent $UserAddOnsDirectory $UserBaseDirectory $UserDocumentsDirectory $UserName $Version $VersionNumber', contains: [ { className: 'comment', begin: /\(\*/, end: /\*\)/ }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, { begin: /\{/, end: /\}/, illegal: /:/ } ] }; }; /***/ }, /* 267 */ /***/ function(module, exports) { module.exports = function(hljs) { var COMMON_CONTAINS = [ hljs.C_NUMBER_MODE, { className: 'string', begin: '\'', end: '\'', contains: [hljs.BACKSLASH_ESCAPE, {begin: '\'\''}] } ]; var TRANSPOSE = { relevance: 0, contains: [ { begin: /'['\.]*/ } ] }; return { keywords: { keyword: 'break case catch classdef continue else elseif end enumerated events for function ' + 'global if methods otherwise parfor persistent properties return spmd switch try while', built_in: 'sin sind sinh asin asind asinh cos cosd cosh acos acosd acosh tan tand tanh atan ' + 'atand atan2 atanh sec secd sech asec asecd asech csc cscd csch acsc acscd acsch cot ' + 'cotd coth acot acotd acoth hypot exp expm1 log log1p log10 log2 pow2 realpow reallog ' + 'realsqrt sqrt nthroot nextpow2 abs angle complex conj imag real unwrap isreal ' + 'cplxpair fix floor ceil round mod rem sign airy besselj bessely besselh besseli ' + 'besselk beta betainc betaln ellipj ellipke erf erfc erfcx erfinv expint gamma ' + 'gammainc gammaln psi legendre cross dot factor isprime primes gcd lcm rat rats perms ' + 'nchoosek factorial cart2sph cart2pol pol2cart sph2cart hsv2rgb rgb2hsv zeros ones ' + 'eye repmat rand randn linspace logspace freqspace meshgrid accumarray size length ' + 'ndims numel disp isempty isequal isequalwithequalnans cat reshape diag blkdiag tril ' + 'triu fliplr flipud flipdim rot90 find sub2ind ind2sub bsxfun ndgrid permute ipermute ' + 'shiftdim circshift squeeze isscalar isvector ans eps realmax realmin pi i inf nan ' + 'isnan isinf isfinite j why compan gallery hadamard hankel hilb invhilb magic pascal ' + 'rosser toeplitz vander wilkinson' }, illegal: '(//|"|#|/\\*|\\s+/\\w+)', contains: [ { className: 'function', beginKeywords: 'function', end: '$', contains: [ hljs.UNDERSCORE_TITLE_MODE, { className: 'params', variants: [ {begin: '\\(', end: '\\)'}, {begin: '\\[', end: '\\]'} ] } ] }, { begin: /[a-zA-Z_][a-zA-Z_0-9]*'['\.]*/, returnBegin: true, relevance: 0, contains: [ {begin: /[a-zA-Z_][a-zA-Z_0-9]*/, relevance: 0}, TRANSPOSE.contains[0] ] }, { begin: '\\[', end: '\\]', contains: COMMON_CONTAINS, relevance: 0, starts: TRANSPOSE }, { begin: '\\{', end: /}/, contains: COMMON_CONTAINS, relevance: 0, starts: TRANSPOSE }, { // transpose operators at the end of a function call begin: /\)/, relevance: 0, starts: TRANSPOSE }, hljs.COMMENT('^\\s*\\%\\{\\s*$', '^\\s*\\%\\}\\s*$'), hljs.COMMENT('\\%', '$') ].concat(COMMON_CONTAINS) }; }; /***/ }, /* 268 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = 'if then else elseif for thru do while unless step in and or not'; var LITERALS = 'true false unknown inf minf ind und %e %i %pi %phi %gamma'; var BUILTIN_FUNCTIONS = ' abasep abs absint absolute_real_time acos acosh acot acoth acsc acsch activate' + ' addcol add_edge add_edges addmatrices addrow add_vertex add_vertices adjacency_matrix' + ' adjoin adjoint af agd airy airy_ai airy_bi airy_dai airy_dbi algsys alg_type' + ' alias allroots alphacharp alphanumericp amortization %and annuity_fv' + ' annuity_pv antid antidiff AntiDifference append appendfile apply apply1 apply2' + ' applyb1 apropos args arit_amortization arithmetic arithsum array arrayapply' + ' arrayinfo arraymake arraysetapply ascii asec asech asin asinh askinteger' + ' asksign assoc assoc_legendre_p assoc_legendre_q assume assume_external_byte_order' + ' asympa at atan atan2 atanh atensimp atom atvalue augcoefmatrix augmented_lagrangian_method' + ' av average_degree backtrace bars barsplot barsplot_description base64 base64_decode' + ' bashindices batch batchload bc2 bdvac belln benefit_cost bern bernpoly bernstein_approx' + ' bernstein_expand bernstein_poly bessel bessel_i bessel_j bessel_k bessel_simplify' + ' bessel_y beta beta_incomplete beta_incomplete_generalized beta_incomplete_regularized' + ' bezout bfallroots bffac bf_find_root bf_fmin_cobyla bfhzeta bfloat bfloatp' + ' bfpsi bfpsi0 bfzeta biconnected_components bimetric binomial bipartition' + ' block blockmatrixp bode_gain bode_phase bothcoef box boxplot boxplot_description' + ' break bug_report build_info|10 buildq build_sample burn cabs canform canten' + ' cardinality carg cartan cartesian_product catch cauchy_matrix cbffac cdf_bernoulli' + ' cdf_beta cdf_binomial cdf_cauchy cdf_chi2 cdf_continuous_uniform cdf_discrete_uniform' + ' cdf_exp cdf_f cdf_gamma cdf_general_finite_discrete cdf_geometric cdf_gumbel' + ' cdf_hypergeometric cdf_laplace cdf_logistic cdf_lognormal cdf_negative_binomial' + ' cdf_noncentral_chi2 cdf_noncentral_student_t cdf_normal cdf_pareto cdf_poisson' + ' cdf_rank_sum cdf_rayleigh cdf_signed_rank cdf_student_t cdf_weibull cdisplay' + ' ceiling central_moment cequal cequalignore cf cfdisrep cfexpand cgeodesic' + ' cgreaterp cgreaterpignore changename changevar chaosgame charat charfun charfun2' + ' charlist charp charpoly chdir chebyshev_t chebyshev_u checkdiv check_overlaps' + ' chinese cholesky christof chromatic_index chromatic_number cint circulant_graph' + ' clear_edge_weight clear_rules clear_vertex_label clebsch_gordan clebsch_graph' + ' clessp clesspignore close closefile cmetric coeff coefmatrix cograd col collapse' + ' collectterms columnop columnspace columnswap columnvector combination combine' + ' comp2pui compare compfile compile compile_file complement_graph complete_bipartite_graph' + ' complete_graph complex_number_p components compose_functions concan concat' + ' conjugate conmetderiv connected_components connect_vertices cons constant' + ' constantp constituent constvalue cont2part content continuous_freq contortion' + ' contour_plot contract contract_edge contragrad contrib_ode convert coord' + ' copy copy_file copy_graph copylist copymatrix cor cos cosh cot coth cov cov1' + ' covdiff covect covers crc24sum create_graph create_list csc csch csetup cspline' + ' ctaylor ct_coordsys ctransform ctranspose cube_graph cuboctahedron_graph' + ' cunlisp cv cycle_digraph cycle_graph cylindrical days360 dblint deactivate' + ' declare declare_constvalue declare_dimensions declare_fundamental_dimensions' + ' declare_fundamental_units declare_qty declare_translated declare_unit_conversion' + ' declare_units declare_weights decsym defcon define define_alt_display define_variable' + ' defint defmatch defrule defstruct deftaylor degree_sequence del delete deleten' + ' delta demo demoivre denom depends derivdegree derivlist describe desolve' + ' determinant dfloat dgauss_a dgauss_b dgeev dgemm dgeqrf dgesv dgesvd diag' + ' diagmatrix diag_matrix diagmatrixp diameter diff digitcharp dimacs_export' + ' dimacs_import dimension dimensionless dimensions dimensions_as_list direct' + ' directory discrete_freq disjoin disjointp disolate disp dispcon dispform' + ' dispfun dispJordan display disprule dispterms distrib divide divisors divsum' + ' dkummer_m dkummer_u dlange dodecahedron_graph dotproduct dotsimp dpart' + ' draw draw2d draw3d drawdf draw_file draw_graph dscalar echelon edge_coloring' + ' edge_connectivity edges eigens_by_jacobi eigenvalues eigenvectors eighth' + ' einstein eivals eivects elapsed_real_time elapsed_run_time ele2comp ele2polynome' + ' ele2pui elem elementp elevation_grid elim elim_allbut eliminate eliminate_using' + ' ellipse elliptic_e elliptic_ec elliptic_eu elliptic_f elliptic_kc elliptic_pi' + ' ematrix empty_graph emptyp endcons entermatrix entertensor entier equal equalp' + ' equiv_classes erf erfc erf_generalized erfi errcatch error errormsg errors' + ' euler ev eval_string evenp every evolution evolution2d evundiff example exp' + ' expand expandwrt expandwrt_factored expint expintegral_chi expintegral_ci' + ' expintegral_e expintegral_e1 expintegral_ei expintegral_e_simplify expintegral_li' + ' expintegral_shi expintegral_si explicit explose exponentialize express expt' + ' exsec extdiff extract_linear_equations extremal_subset ezgcd %f f90 facsum' + ' factcomb factor factorfacsum factorial factorout factorsum facts fast_central_elements' + ' fast_linsolve fasttimes featurep fernfale fft fib fibtophi fifth filename_merge' + ' file_search file_type fillarray findde find_root find_root_abs find_root_error' + ' find_root_rel first fix flatten flength float floatnump floor flower_snark' + ' flush flush1deriv flushd flushnd flush_output fmin_cobyla forget fortran' + ' fourcos fourexpand fourier fourier_elim fourint fourintcos fourintsin foursimp' + ' foursin fourth fposition frame_bracket freeof freshline fresnel_c fresnel_s' + ' from_adjacency_matrix frucht_graph full_listify fullmap fullmapl fullratsimp' + ' fullratsubst fullsetify funcsolve fundamental_dimensions fundamental_units' + ' fundef funmake funp fv g0 g1 gamma gamma_greek gamma_incomplete gamma_incomplete_generalized' + ' gamma_incomplete_regularized gauss gauss_a gauss_b gaussprob gcd gcdex gcdivide' + ' gcfac gcfactor gd generalized_lambert_w genfact gen_laguerre genmatrix gensym' + ' geo_amortization geo_annuity_fv geo_annuity_pv geomap geometric geometric_mean' + ' geosum get getcurrentdirectory get_edge_weight getenv get_lu_factors get_output_stream_string' + ' get_pixel get_plot_option get_tex_environment get_tex_environment_default' + ' get_vertex_label gfactor gfactorsum ggf girth global_variances gn gnuplot_close' + ' gnuplot_replot gnuplot_reset gnuplot_restart gnuplot_start go Gosper GosperSum' + ' gr2d gr3d gradef gramschmidt graph6_decode graph6_encode graph6_export graph6_import' + ' graph_center graph_charpoly graph_eigenvalues graph_flow graph_order graph_periphery' + ' graph_product graph_size graph_union great_rhombicosidodecahedron_graph great_rhombicuboctahedron_graph' + ' grid_graph grind grobner_basis grotzch_graph hamilton_cycle hamilton_path' + ' hankel hankel_1 hankel_2 harmonic harmonic_mean hav heawood_graph hermite' + ' hessian hgfred hilbertmap hilbert_matrix hipow histogram histogram_description' + ' hodge horner hypergeometric i0 i1 %ibes ic1 ic2 ic_convert ichr1 ichr2 icosahedron_graph' + ' icosidodecahedron_graph icurvature ident identfor identity idiff idim idummy' + ' ieqn %if ifactors iframes ifs igcdex igeodesic_coords ilt image imagpart' + ' imetric implicit implicit_derivative implicit_plot indexed_tensor indices' + ' induced_subgraph inferencep inference_result infix info_display init_atensor' + ' init_ctensor in_neighbors innerproduct inpart inprod inrt integerp integer_partitions' + ' integrate intersect intersection intervalp intopois intosum invariant1 invariant2' + ' inverse_fft inverse_jacobi_cd inverse_jacobi_cn inverse_jacobi_cs inverse_jacobi_dc' + ' inverse_jacobi_dn inverse_jacobi_ds inverse_jacobi_nc inverse_jacobi_nd inverse_jacobi_ns' + ' inverse_jacobi_sc inverse_jacobi_sd inverse_jacobi_sn invert invert_by_adjoint' + ' invert_by_lu inv_mod irr is is_biconnected is_bipartite is_connected is_digraph' + ' is_edge_in_graph is_graph is_graph_or_digraph ishow is_isomorphic isolate' + ' isomorphism is_planar isqrt isreal_p is_sconnected is_tree is_vertex_in_graph' + ' items_inference %j j0 j1 jacobi jacobian jacobi_cd jacobi_cn jacobi_cs jacobi_dc' + ' jacobi_dn jacobi_ds jacobi_nc jacobi_nd jacobi_ns jacobi_p jacobi_sc jacobi_sd' + ' jacobi_sn JF jn join jordan julia julia_set julia_sin %k kdels kdelta kill' + ' killcontext kostka kron_delta kronecker_product kummer_m kummer_u kurtosis' + ' kurtosis_bernoulli kurtosis_beta kurtosis_binomial kurtosis_chi2 kurtosis_continuous_uniform' + ' kurtosis_discrete_uniform kurtosis_exp kurtosis_f kurtosis_gamma kurtosis_general_finite_discrete' + ' kurtosis_geometric kurtosis_gumbel kurtosis_hypergeometric kurtosis_laplace' + ' kurtosis_logistic kurtosis_lognormal kurtosis_negative_binomial kurtosis_noncentral_chi2' + ' kurtosis_noncentral_student_t kurtosis_normal kurtosis_pareto kurtosis_poisson' + ' kurtosis_rayleigh kurtosis_student_t kurtosis_weibull label labels lagrange' + ' laguerre lambda lambert_w laplace laplacian_matrix last lbfgs lc2kdt lcharp' + ' lc_l lcm lc_u ldefint ldisp ldisplay legendre_p legendre_q leinstein length' + ' let letrules letsimp levi_civita lfreeof lgtreillis lhs li liediff limit' + ' Lindstedt linear linearinterpol linear_program linear_regression line_graph' + ' linsolve listarray list_correlations listify list_matrix_entries list_nc_monomials' + ' listoftens listofvars listp lmax lmin load loadfile local locate_matrix_entry' + ' log logcontract log_gamma lopow lorentz_gauge lowercasep lpart lratsubst' + ' lreduce lriemann lsquares_estimates lsquares_estimates_approximate lsquares_estimates_exact' + ' lsquares_mse lsquares_residual_mse lsquares_residuals lsum ltreillis lu_backsub' + ' lucas lu_factor %m macroexpand macroexpand1 make_array makebox makefact makegamma' + ' make_graph make_level_picture makelist makeOrders make_poly_continent make_poly_country' + ' make_polygon make_random_state make_rgb_picture makeset make_string_input_stream' + ' make_string_output_stream make_transform mandelbrot mandelbrot_set map mapatom' + ' maplist matchdeclare matchfix mat_cond mat_fullunblocker mat_function mathml_display' + ' mat_norm matrix matrixmap matrixp matrix_size mattrace mat_trace mat_unblocker' + ' max max_clique max_degree max_flow maximize_lp max_independent_set max_matching' + ' maybe md5sum mean mean_bernoulli mean_beta mean_binomial mean_chi2 mean_continuous_uniform' + ' mean_deviation mean_discrete_uniform mean_exp mean_f mean_gamma mean_general_finite_discrete' + ' mean_geometric mean_gumbel mean_hypergeometric mean_laplace mean_logistic' + ' mean_lognormal mean_negative_binomial mean_noncentral_chi2 mean_noncentral_student_t' + ' mean_normal mean_pareto mean_poisson mean_rayleigh mean_student_t mean_weibull' + ' median median_deviation member mesh metricexpandall mgf1_sha1 min min_degree' + ' min_edge_cut minfactorial minimalPoly minimize_lp minimum_spanning_tree minor' + ' minpack_lsquares minpack_solve min_vertex_cover min_vertex_cut mkdir mnewton' + ' mod mode_declare mode_identity ModeMatrix moebius mon2schur mono monomial_dimensions' + ' multibernstein_poly multi_display_for_texinfo multi_elem multinomial multinomial_coeff' + ' multi_orbit multiplot_mode multi_pui multsym multthru mycielski_graph nary' + ' natural_unit nc_degree ncexpt ncharpoly negative_picture neighbors new newcontext' + ' newdet new_graph newline newton new_variable next_prime nicedummies niceindices' + ' ninth nofix nonarray noncentral_moment nonmetricity nonnegintegerp nonscalarp' + ' nonzeroandfreeof notequal nounify nptetrad npv nroots nterms ntermst' + ' nthroot nullity nullspace num numbered_boundaries numberp number_to_octets' + ' num_distinct_partitions numerval numfactor num_partitions nusum nzeta nzetai' + ' nzetar octets_to_number octets_to_oid odd_girth oddp ode2 ode_check odelin' + ' oid_to_octets op opena opena_binary openr openr_binary openw openw_binary' + ' operatorp opsubst optimize %or orbit orbits ordergreat ordergreatp orderless' + ' orderlessp orthogonal_complement orthopoly_recur orthopoly_weight outermap' + ' out_neighbors outofpois pade parabolic_cylinder_d parametric parametric_surface' + ' parg parGosper parse_string parse_timedate part part2cont partfrac partition' + ' partition_set partpol path_digraph path_graph pathname_directory pathname_name' + ' pathname_type pdf_bernoulli pdf_beta pdf_binomial pdf_cauchy pdf_chi2 pdf_continuous_uniform' + ' pdf_discrete_uniform pdf_exp pdf_f pdf_gamma pdf_general_finite_discrete' + ' pdf_geometric pdf_gumbel pdf_hypergeometric pdf_laplace pdf_logistic pdf_lognormal' + ' pdf_negative_binomial pdf_noncentral_chi2 pdf_noncentral_student_t pdf_normal' + ' pdf_pareto pdf_poisson pdf_rank_sum pdf_rayleigh pdf_signed_rank pdf_student_t' + ' pdf_weibull pearson_skewness permanent permut permutation permutations petersen_graph' + ' petrov pickapart picture_equalp picturep piechart piechart_description planar_embedding' + ' playback plog plot2d plot3d plotdf ploteq plsquares pochhammer points poisdiff' + ' poisexpt poisint poismap poisplus poissimp poissubst poistimes poistrim polar' + ' polarform polartorect polar_to_xy poly_add poly_buchberger poly_buchberger_criterion' + ' poly_colon_ideal poly_content polydecomp poly_depends_p poly_elimination_ideal' + ' poly_exact_divide poly_expand poly_expt poly_gcd polygon poly_grobner poly_grobner_equal' + ' poly_grobner_member poly_grobner_subsetp poly_ideal_intersection poly_ideal_polysaturation' + ' poly_ideal_polysaturation1 poly_ideal_saturation poly_ideal_saturation1 poly_lcm' + ' poly_minimization polymod poly_multiply polynome2ele polynomialp poly_normal_form' + ' poly_normalize poly_normalize_list poly_polysaturation_extension poly_primitive_part' + ' poly_pseudo_divide poly_reduced_grobner poly_reduction poly_saturation_extension' + ' poly_s_polynomial poly_subtract polytocompanion pop postfix potential power_mod' + ' powerseries powerset prefix prev_prime primep primes principal_components' + ' print printf printfile print_graph printpois printprops prodrac product properties' + ' propvars psi psubst ptriangularize pui pui2comp pui2ele pui2polynome pui_direct' + ' puireduc push put pv qput qrange qty quad_control quad_qag quad_qagi quad_qagp' + ' quad_qags quad_qawc quad_qawf quad_qawo quad_qaws quadrilateral quantile' + ' quantile_bernoulli quantile_beta quantile_binomial quantile_cauchy quantile_chi2' + ' quantile_continuous_uniform quantile_discrete_uniform quantile_exp quantile_f' + ' quantile_gamma quantile_general_finite_discrete quantile_geometric quantile_gumbel' + ' quantile_hypergeometric quantile_laplace quantile_logistic quantile_lognormal' + ' quantile_negative_binomial quantile_noncentral_chi2 quantile_noncentral_student_t' + ' quantile_normal quantile_pareto quantile_poisson quantile_rayleigh quantile_student_t' + ' quantile_weibull quartile_skewness quit qunit quotient racah_v racah_w radcan' + ' radius random random_bernoulli random_beta random_binomial random_bipartite_graph' + ' random_cauchy random_chi2 random_continuous_uniform random_digraph random_discrete_uniform' + ' random_exp random_f random_gamma random_general_finite_discrete random_geometric' + ' random_graph random_graph1 random_gumbel random_hypergeometric random_laplace' + ' random_logistic random_lognormal random_negative_binomial random_network' + ' random_noncentral_chi2 random_noncentral_student_t random_normal random_pareto' + ' random_permutation random_poisson random_rayleigh random_regular_graph random_student_t' + ' random_tournament random_tree random_weibull range rank rat ratcoef ratdenom' + ' ratdiff ratdisrep ratexpand ratinterpol rational rationalize ratnumer ratnump' + ' ratp ratsimp ratsubst ratvars ratweight read read_array read_binary_array' + ' read_binary_list read_binary_matrix readbyte readchar read_hashed_array readline' + ' read_list read_matrix read_nested_list readonly read_xpm real_imagpart_to_conjugate' + ' realpart realroots rearray rectangle rectform rectform_log_if_constant recttopolar' + ' rediff reduce_consts reduce_order region region_boundaries region_boundaries_plus' + ' rem remainder remarray rembox remcomps remcon remcoord remfun remfunction' + ' remlet remove remove_constvalue remove_dimensions remove_edge remove_fundamental_dimensions' + ' remove_fundamental_units remove_plot_option remove_vertex rempart remrule' + ' remsym remvalue rename rename_file reset reset_displays residue resolvante' + ' resolvante_alternee1 resolvante_bipartite resolvante_diedrale resolvante_klein' + ' resolvante_klein3 resolvante_produit_sym resolvante_unitaire resolvante_vierer' + ' rest resultant return reveal reverse revert revert2 rgb2level rhs ricci riemann' + ' rinvariant risch rk rmdir rncombine romberg room rootscontract round row' + ' rowop rowswap rreduce run_testsuite %s save saving scalarp scaled_bessel_i' + ' scaled_bessel_i0 scaled_bessel_i1 scalefactors scanmap scatterplot scatterplot_description' + ' scene schur2comp sconcat scopy scsimp scurvature sdowncase sec sech second' + ' sequal sequalignore set_alt_display setdifference set_draw_defaults set_edge_weight' + ' setelmx setequalp setify setp set_partitions set_plot_option set_prompt set_random_state' + ' set_tex_environment set_tex_environment_default setunits setup_autoload set_up_dot_simplifications' + ' set_vertex_label seventh sexplode sf sha1sum sha256sum shortest_path shortest_weighted_path' + ' show showcomps showratvars sierpinskiale sierpinskimap sign signum similaritytransform' + ' simp_inequality simplify_sum simplode simpmetderiv simtran sin sinh sinsert' + ' sinvertcase sixth skewness skewness_bernoulli skewness_beta skewness_binomial' + ' skewness_chi2 skewness_continuous_uniform skewness_discrete_uniform skewness_exp' + ' skewness_f skewness_gamma skewness_general_finite_discrete skewness_geometric' + ' skewness_gumbel skewness_hypergeometric skewness_laplace skewness_logistic' + ' skewness_lognormal skewness_negative_binomial skewness_noncentral_chi2 skewness_noncentral_student_t' + ' skewness_normal skewness_pareto skewness_poisson skewness_rayleigh skewness_student_t' + ' skewness_weibull slength smake small_rhombicosidodecahedron_graph small_rhombicuboctahedron_graph' + ' smax smin smismatch snowmap snub_cube_graph snub_dodecahedron_graph solve' + ' solve_rec solve_rec_rat some somrac sort sparse6_decode sparse6_encode sparse6_export' + ' sparse6_import specint spherical spherical_bessel_j spherical_bessel_y spherical_hankel1' + ' spherical_hankel2 spherical_harmonic spherical_to_xyz splice split sposition' + ' sprint sqfr sqrt sqrtdenest sremove sremovefirst sreverse ssearch ssort sstatus' + ' ssubst ssubstfirst staircase standardize standardize_inverse_trig starplot' + ' starplot_description status std std1 std_bernoulli std_beta std_binomial' + ' std_chi2 std_continuous_uniform std_discrete_uniform std_exp std_f std_gamma' + ' std_general_finite_discrete std_geometric std_gumbel std_hypergeometric std_laplace' + ' std_logistic std_lognormal std_negative_binomial std_noncentral_chi2 std_noncentral_student_t' + ' std_normal std_pareto std_poisson std_rayleigh std_student_t std_weibull' + ' stemplot stirling stirling1 stirling2 strim striml strimr string stringout' + ' stringp strong_components struve_h struve_l sublis sublist sublist_indices' + ' submatrix subsample subset subsetp subst substinpart subst_parallel substpart' + ' substring subvar subvarp sum sumcontract summand_to_rec supcase supcontext' + ' symbolp symmdifference symmetricp system take_channel take_inference tan' + ' tanh taylor taylorinfo taylorp taylor_simplifier taytorat tcl_output tcontract' + ' tellrat tellsimp tellsimpafter tentex tenth test_mean test_means_difference' + ' test_normality test_proportion test_proportions_difference test_rank_sum' + ' test_sign test_signed_rank test_variance test_variance_ratio tex tex1 tex_display' + ' texput %th third throw time timedate timer timer_info tldefint tlimit todd_coxeter' + ' toeplitz tokens to_lisp topological_sort to_poly to_poly_solve totaldisrep' + ' totalfourier totient tpartpol trace tracematrix trace_options transform_sample' + ' translate translate_file transpose treefale tree_reduce treillis treinat' + ' triangle triangularize trigexpand trigrat trigreduce trigsimp trunc truncate' + ' truncated_cube_graph truncated_dodecahedron_graph truncated_icosahedron_graph' + ' truncated_tetrahedron_graph tr_warnings_get tube tutte_graph ueivects uforget' + ' ultraspherical underlying_graph undiff union unique uniteigenvectors unitp' + ' units unit_step unitvector unorder unsum untellrat untimer' + ' untrace uppercasep uricci uriemann uvect vandermonde_matrix var var1 var_bernoulli' + ' var_beta var_binomial var_chi2 var_continuous_uniform var_discrete_uniform' + ' var_exp var_f var_gamma var_general_finite_discrete var_geometric var_gumbel' + ' var_hypergeometric var_laplace var_logistic var_lognormal var_negative_binomial' + ' var_noncentral_chi2 var_noncentral_student_t var_normal var_pareto var_poisson' + ' var_rayleigh var_student_t var_weibull vector vectorpotential vectorsimp' + ' verbify vers vertex_coloring vertex_connectivity vertex_degree vertex_distance' + ' vertex_eccentricity vertex_in_degree vertex_out_degree vertices vertices_to_cycle' + ' vertices_to_path %w weyl wheel_graph wiener_index wigner_3j wigner_6j' + ' wigner_9j with_stdout write_binary_data writebyte write_data writefile wronskian' + ' xreduce xthru %y Zeilberger zeroequiv zerofor zeromatrix zeromatrixp zeta' + ' zgeev zheev zlange zn_add_table zn_carmichael_lambda zn_characteristic_factors' + ' zn_determinant zn_factor_generators zn_invert_by_lu zn_log zn_mult_table' + ' absboxchar activecontexts adapt_depth additive adim aform algebraic' + ' algepsilon algexact aliases allbut all_dotsimp_denoms allocation allsym alphabetic' + ' animation antisymmetric arrays askexp assume_pos assume_pos_pred assumescalar' + ' asymbol atomgrad atrig1 axes axis_3d axis_bottom axis_left axis_right axis_top' + ' azimuth background background_color backsubst berlefact bernstein_explicit' + ' besselexpand beta_args_sum_to_integer beta_expand bftorat bftrunc bindtest' + ' border boundaries_array box boxchar breakup %c capping cauchysum cbrange' + ' cbtics center cflength cframe_flag cnonmet_flag color color_bar color_bar_tics' + ' colorbox columns commutative complex cone context contexts contour contour_levels' + ' cosnpiflag ctaypov ctaypt ctayswitch ctayvar ct_coords ctorsion_flag ctrgsimp' + ' cube current_let_rule_package cylinder data_file_name debugmode decreasing' + ' default_let_rule_package delay dependencies derivabbrev derivsubst detout' + ' diagmetric diff dim dimensions dispflag display2d|10 display_format_internal' + ' distribute_over doallmxops domain domxexpt domxmxops domxnctimes dontfactor' + ' doscmxops doscmxplus dot0nscsimp dot0simp dot1simp dotassoc dotconstrules' + ' dotdistrib dotexptsimp dotident dotscrules draw_graph_program draw_realpart' + ' edge_color edge_coloring edge_partition edge_type edge_width %edispflag' + ' elevation %emode endphi endtheta engineering_format_floats enhanced3d %enumer' + ' epsilon_lp erfflag erf_representation errormsg error_size error_syms error_type' + ' %e_to_numlog eval even evenfun evflag evfun ev_point expandwrt_denom expintexpand' + ' expintrep expon expop exptdispflag exptisolate exptsubst facexpand facsum_combine' + ' factlim factorflag factorial_expand factors_only fb feature features' + ' file_name file_output_append file_search_demo file_search_lisp file_search_maxima|10' + ' file_search_tests file_search_usage file_type_lisp file_type_maxima|10 fill_color' + ' fill_density filled_func fixed_vertices flipflag float2bf font font_size' + ' fortindent fortspaces fpprec fpprintprec functions gamma_expand gammalim' + ' gdet genindex gensumnum GGFCFMAX GGFINFINITY globalsolve gnuplot_command' + ' gnuplot_curve_styles gnuplot_curve_titles gnuplot_default_term_command gnuplot_dumb_term_command' + ' gnuplot_file_args gnuplot_file_name gnuplot_out_file gnuplot_pdf_term_command' + ' gnuplot_pm3d gnuplot_png_term_command gnuplot_postamble gnuplot_preamble' + ' gnuplot_ps_term_command gnuplot_svg_term_command gnuplot_term gnuplot_view_args' + ' Gosper_in_Zeilberger gradefs grid grid2d grind halfangles head_angle head_both' + ' head_length head_type height hypergeometric_representation %iargs ibase' + ' icc1 icc2 icounter idummyx ieqnprint ifb ifc1 ifc2 ifg ifgi ifr iframe_bracket_form' + ' ifri igeowedge_flag ikt1 ikt2 imaginary inchar increasing infeval' + ' infinity inflag infolists inm inmc1 inmc2 intanalysis integer integervalued' + ' integrate_use_rootsof integration_constant integration_constant_counter interpolate_color' + ' intfaclim ip_grid ip_grid_in irrational isolate_wrt_times iterations itr' + ' julia_parameter %k1 %k2 keepfloat key key_pos kinvariant kt label label_alignment' + ' label_orientation labels lassociative lbfgs_ncorrections lbfgs_nfeval_max' + ' leftjust legend letrat let_rule_packages lfg lg lhospitallim limsubst linear' + ' linear_solver linechar linel|10 linenum line_type linewidth line_width linsolve_params' + ' linsolvewarn lispdisp listarith listconstvars listdummyvars lmxchar load_pathname' + ' loadprint logabs logarc logcb logconcoeffp logexpand lognegint logsimp logx' + ' logx_secondary logy logy_secondary logz lriem m1pbranch macroexpansion macros' + ' mainvar manual_demo maperror mapprint matrix_element_add matrix_element_mult' + ' matrix_element_transpose maxapplydepth maxapplyheight maxima_tempdir|10 maxima_userdir|10' + ' maxnegex MAX_ORD maxposex maxpsifracdenom maxpsifracnum maxpsinegint maxpsiposint' + ' maxtayorder mesh_lines_color method mod_big_prime mode_check_errorp' + ' mode_checkp mode_check_warnp mod_test mod_threshold modular_linear_solver' + ' modulus multiplicative multiplicities myoptions nary negdistrib negsumdispflag' + ' newline newtonepsilon newtonmaxiter nextlayerfactor niceindicespref nm nmc' + ' noeval nolabels nonegative_lp noninteger nonscalar noun noundisp nouns np' + ' npi nticks ntrig numer numer_pbranch obase odd oddfun opacity opproperties' + ' opsubst optimprefix optionset orientation origin orthopoly_returns_intervals' + ' outative outchar packagefile palette partswitch pdf_file pfeformat phiresolution' + ' %piargs piece pivot_count_sx pivot_max_sx plot_format plot_options plot_realpart' + ' png_file pochhammer_max_index points pointsize point_size points_joined point_type' + ' poislim poisson poly_coefficient_ring poly_elimination_order polyfactor poly_grobner_algorithm' + ' poly_grobner_debug poly_monomial_order poly_primary_elimination_order poly_return_term_list' + ' poly_secondary_elimination_order poly_top_reduction_only posfun position' + ' powerdisp pred prederror primep_number_of_tests product_use_gamma program' + ' programmode promote_float_to_bigfloat prompt proportional_axes props psexpand' + ' ps_file radexpand radius radsubstflag rassociative ratalgdenom ratchristof' + ' ratdenomdivide rateinstein ratepsilon ratfac rational ratmx ratprint ratriemann' + ' ratsimpexpons ratvarswitch ratweights ratweyl ratwtlvl real realonly redraw' + ' refcheck resolution restart resultant ric riem rmxchar %rnum_list rombergabs' + ' rombergit rombergmin rombergtol rootsconmode rootsepsilon run_viewer same_xy' + ' same_xyz savedef savefactors scalar scalarmatrixp scale scale_lp setcheck' + ' setcheckbreak setval show_edge_color show_edges show_edge_type show_edge_width' + ' show_id show_label showtime show_vertex_color show_vertex_size show_vertex_type' + ' show_vertices show_weight simp simplified_output simplify_products simpproduct' + ' simpsum sinnpiflag solvedecomposes solveexplicit solvefactors solvenullwarn' + ' solveradcan solvetrigwarn space sparse sphere spring_embedding_depth sqrtdispflag' + ' stardisp startphi starttheta stats_numer stringdisp structures style sublis_apply_lambda' + ' subnumsimp sumexpand sumsplitfact surface surface_hide svg_file symmetric' + ' tab taylordepth taylor_logexpand taylor_order_coefficients taylor_truncate_polynomials' + ' tensorkill terminal testsuite_files thetaresolution timer_devalue title tlimswitch' + ' tr track transcompile transform transform_xy translate_fast_arrays transparent' + ' transrun tr_array_as_ref tr_bound_function_applyp tr_file_tty_messagesp tr_float_can_branch_complex' + ' tr_function_call_default trigexpandplus trigexpandtimes triginverses trigsign' + ' trivial_solutions tr_numer tr_optimize_max_loop tr_semicompile tr_state_vars' + ' tr_warn_bad_function_calls tr_warn_fexpr tr_warn_meval tr_warn_mode' + ' tr_warn_undeclared tr_warn_undefined_variable tstep ttyoff tube_extremes' + ' ufg ug %unitexpand unit_vectors uric uriem use_fast_arrays user_preamble' + ' usersetunits values vect_cross verbose vertex_color vertex_coloring vertex_partition' + ' vertex_size vertex_type view warnings weyl width windowname windowtitle wired_surface' + ' wireframe xaxis xaxis_color xaxis_secondary xaxis_type xaxis_width xlabel' + ' xlabel_secondary xlength xrange xrange_secondary xtics xtics_axis xtics_rotate' + ' xtics_rotate_secondary xtics_secondary xtics_secondary_axis xu_grid x_voxel' + ' xy_file xyplane xy_scale yaxis yaxis_color yaxis_secondary yaxis_type yaxis_width' + ' ylabel ylabel_secondary ylength yrange yrange_secondary ytics ytics_axis' + ' ytics_rotate ytics_rotate_secondary ytics_secondary ytics_secondary_axis' + ' yv_grid y_voxel yx_ratio zaxis zaxis_color zaxis_type zaxis_width zeroa zerob' + ' zerobern zeta%pi zlabel zlabel_rotate zlength zmin zn_primroot_limit zn_primroot_pretest'; var SYMBOLS = '_ __ %|0 %%|0'; return { lexemes: '[A-Za-z_%][0-9A-Za-z_%]*', keywords: { keyword: KEYWORDS, literal: LITERALS, built_in: BUILTIN_FUNCTIONS, symbol: SYMBOLS, }, contains: [ { className: 'comment', begin: '/\\*', end: '\\*/', contains: ['self'] }, hljs.QUOTE_STRING_MODE, { className: 'number', relevance: 0, variants: [ { // float number w/ exponent // hmm, I wonder if we ought to include other exponent markers? begin: '\\b(\\d+|\\d+\\.|\\.\\d+|\\d+\\.\\d+)[Ee][-+]?\\d+\\b', }, { // bigfloat number begin: '\\b(\\d+|\\d+\\.|\\.\\d+|\\d+\\.\\d+)[Bb][-+]?\\d+\\b', relevance: 10 }, { // float number w/out exponent // Doesn't seem to recognize floats which start with '.' begin: '\\b(\\.\\d+|\\d+\\.\\d+)\\b', }, { // integer in base up to 36 // Doesn't seem to recognize integers which end with '.' begin: '\\b(\\d+|0[0-9A-Za-z]+)\\.?\\b', } ] } ], illegal: /@/ } }; /***/ }, /* 269 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: 'int float string vector matrix if else switch case default while do for in break ' + 'continue global proc return about abs addAttr addAttributeEditorNodeHelp addDynamic ' + 'addNewShelfTab addPP addPanelCategory addPrefixToName advanceToNextDrivenKey ' + 'affectedNet affects aimConstraint air alias aliasAttr align alignCtx alignCurve ' + 'alignSurface allViewFit ambientLight angle angleBetween animCone animCurveEditor ' + 'animDisplay animView annotate appendStringArray applicationName applyAttrPreset ' + 'applyTake arcLenDimContext arcLengthDimension arclen arrayMapper art3dPaintCtx ' + 'artAttrCtx artAttrPaintVertexCtx artAttrSkinPaintCtx artAttrTool artBuildPaintMenu ' + 'artFluidAttrCtx artPuttyCtx artSelectCtx artSetPaintCtx artUserPaintCtx assignCommand ' + 'assignInputDevice assignViewportFactories attachCurve attachDeviceAttr attachSurface ' + 'attrColorSliderGrp attrCompatibility attrControlGrp attrEnumOptionMenu ' + 'attrEnumOptionMenuGrp attrFieldGrp attrFieldSliderGrp attrNavigationControlGrp ' + 'attrPresetEditWin attributeExists attributeInfo attributeMenu attributeQuery ' + 'autoKeyframe autoPlace bakeClip bakeFluidShading bakePartialHistory bakeResults ' + 'bakeSimulation basename basenameEx batchRender bessel bevel bevelPlus binMembership ' + 'bindSkin blend2 blendShape blendShapeEditor blendShapePanel blendTwoAttr blindDataType ' + 'boneLattice boundary boxDollyCtx boxZoomCtx bufferCurve buildBookmarkMenu ' + 'buildKeyframeMenu button buttonManip CBG cacheFile cacheFileCombine cacheFileMerge ' + 'cacheFileTrack camera cameraView canCreateManip canvas capitalizeString catch ' + 'catchQuiet ceil changeSubdivComponentDisplayLevel changeSubdivRegion channelBox ' + 'character characterMap characterOutlineEditor characterize chdir checkBox checkBoxGrp ' + 'checkDefaultRenderGlobals choice circle circularFillet clamp clear clearCache clip ' + 'clipEditor clipEditorCurrentTimeCtx clipSchedule clipSchedulerOutliner clipTrimBefore ' + 'closeCurve closeSurface cluster cmdFileOutput cmdScrollFieldExecuter ' + 'cmdScrollFieldReporter cmdShell coarsenSubdivSelectionList collision color ' + 'colorAtPoint colorEditor colorIndex colorIndexSliderGrp colorSliderButtonGrp ' + 'colorSliderGrp columnLayout commandEcho commandLine commandPort compactHairSystem ' + 'componentEditor compositingInterop computePolysetVolume condition cone confirmDialog ' + 'connectAttr connectControl connectDynamic connectJoint connectionInfo constrain ' + 'constrainValue constructionHistory container containsMultibyte contextInfo control ' + 'convertFromOldLayers convertIffToPsd convertLightmap convertSolidTx convertTessellation ' + 'convertUnit copyArray copyFlexor copyKey copySkinWeights cos cpButton cpCache ' + 'cpClothSet cpCollision cpConstraint cpConvClothToMesh cpForces cpGetSolverAttr cpPanel ' + 'cpProperty cpRigidCollisionFilter cpSeam cpSetEdit cpSetSolverAttr cpSolver ' + 'cpSolverTypes cpTool cpUpdateClothUVs createDisplayLayer createDrawCtx createEditor ' + 'createLayeredPsdFile createMotionField createNewShelf createNode createRenderLayer ' + 'createSubdivRegion cross crossProduct ctxAbort ctxCompletion ctxEditMode ctxTraverse ' + 'currentCtx currentTime currentTimeCtx currentUnit curve curveAddPtCtx ' + 'curveCVCtx curveEPCtx curveEditorCtx curveIntersect curveMoveEPCtx curveOnSurface ' + 'curveSketchCtx cutKey cycleCheck cylinder dagPose date defaultLightListCheckBox ' + 'defaultNavigation defineDataServer defineVirtualDevice deformer deg_to_rad delete ' + 'deleteAttr deleteShadingGroupsAndMaterials deleteShelfTab deleteUI deleteUnusedBrushes ' + 'delrandstr detachCurve detachDeviceAttr detachSurface deviceEditor devicePanel dgInfo ' + 'dgdirty dgeval dgtimer dimWhen directKeyCtx directionalLight dirmap dirname disable ' + 'disconnectAttr disconnectJoint diskCache displacementToPoly displayAffected ' + 'displayColor displayCull displayLevelOfDetail displayPref displayRGBColor ' + 'displaySmoothness displayStats displayString displaySurface distanceDimContext ' + 'distanceDimension doBlur dolly dollyCtx dopeSheetEditor dot dotProduct ' + 'doubleProfileBirailSurface drag dragAttrContext draggerContext dropoffLocator ' + 'duplicate duplicateCurve duplicateSurface dynCache dynControl dynExport dynExpression ' + 'dynGlobals dynPaintEditor dynParticleCtx dynPref dynRelEdPanel dynRelEditor ' + 'dynamicLoad editAttrLimits editDisplayLayerGlobals editDisplayLayerMembers ' + 'editRenderLayerAdjustment editRenderLayerGlobals editRenderLayerMembers editor ' + 'editorTemplate effector emit emitter enableDevice encodeString endString endsWith env ' + 'equivalent equivalentTol erf error eval evalDeferred evalEcho event ' + 'exactWorldBoundingBox exclusiveLightCheckBox exec executeForEachObject exists exp ' + 'expression expressionEditorListen extendCurve extendSurface extrude fcheck fclose feof ' + 'fflush fgetline fgetword file fileBrowserDialog fileDialog fileExtension fileInfo ' + 'filetest filletCurve filter filterCurve filterExpand filterStudioImport ' + 'findAllIntersections findAnimCurves findKeyframe findMenuItem findRelatedSkinCluster ' + 'finder firstParentOf fitBspline flexor floatEq floatField floatFieldGrp floatScrollBar ' + 'floatSlider floatSlider2 floatSliderButtonGrp floatSliderGrp floor flow fluidCacheInfo ' + 'fluidEmitter fluidVoxelInfo flushUndo fmod fontDialog fopen formLayout format fprint ' + 'frameLayout fread freeFormFillet frewind fromNativePath fwrite gamma gauss ' + 'geometryConstraint getApplicationVersionAsFloat getAttr getClassification ' + 'getDefaultBrush getFileList getFluidAttr getInputDeviceRange getMayaPanelTypes ' + 'getModifiers getPanel getParticleAttr getPluginResource getenv getpid glRender ' + 'glRenderEditor globalStitch gmatch goal gotoBindPose grabColor gradientControl ' + 'gradientControlNoAttr graphDollyCtx graphSelectContext graphTrackCtx gravity grid ' + 'gridLayout group groupObjectsByName HfAddAttractorToAS HfAssignAS HfBuildEqualMap ' + 'HfBuildFurFiles HfBuildFurImages HfCancelAFR HfConnectASToHF HfCreateAttractor ' + 'HfDeleteAS HfEditAS HfPerformCreateAS HfRemoveAttractorFromAS HfSelectAttached ' + 'HfSelectAttractors HfUnAssignAS hardenPointCurve hardware hardwareRenderPanel ' + 'headsUpDisplay headsUpMessage help helpLine hermite hide hilite hitTest hotBox hotkey ' + 'hotkeyCheck hsv_to_rgb hudButton hudSlider hudSliderButton hwReflectionMap hwRender ' + 'hwRenderLoad hyperGraph hyperPanel hyperShade hypot iconTextButton iconTextCheckBox ' + 'iconTextRadioButton iconTextRadioCollection iconTextScrollList iconTextStaticLabel ' + 'ikHandle ikHandleCtx ikHandleDisplayScale ikSolver ikSplineHandleCtx ikSystem ' + 'ikSystemInfo ikfkDisplayMethod illustratorCurves image imfPlugins inheritTransform ' + 'insertJoint insertJointCtx insertKeyCtx insertKnotCurve insertKnotSurface instance ' + 'instanceable instancer intField intFieldGrp intScrollBar intSlider intSliderGrp ' + 'interToUI internalVar intersect iprEngine isAnimCurve isConnected isDirty isParentOf ' + 'isSameObject isTrue isValidObjectName isValidString isValidUiName isolateSelect ' + 'itemFilter itemFilterAttr itemFilterRender itemFilterType joint jointCluster jointCtx ' + 'jointDisplayScale jointLattice keyTangent keyframe keyframeOutliner ' + 'keyframeRegionCurrentTimeCtx keyframeRegionDirectKeyCtx keyframeRegionDollyCtx ' + 'keyframeRegionInsertKeyCtx keyframeRegionMoveKeyCtx keyframeRegionScaleKeyCtx ' + 'keyframeRegionSelectKeyCtx keyframeRegionSetKeyCtx keyframeRegionTrackCtx ' + 'keyframeStats lassoContext lattice latticeDeformKeyCtx launch launchImageEditor ' + 'layerButton layeredShaderPort layeredTexturePort layout layoutDialog lightList ' + 'lightListEditor lightListPanel lightlink lineIntersection linearPrecision linstep ' + 'listAnimatable listAttr listCameras listConnections listDeviceAttachments listHistory ' + 'listInputDeviceAxes listInputDeviceButtons listInputDevices listMenuAnnotation ' + 'listNodeTypes listPanelCategories listRelatives listSets listTransforms ' + 'listUnselected listerEditor loadFluid loadNewShelf loadPlugin ' + 'loadPluginLanguageResources loadPrefObjects localizedPanelLabel lockNode loft log ' + 'longNameOf lookThru ls lsThroughFilter lsType lsUI Mayatomr mag makeIdentity makeLive ' + 'makePaintable makeRoll makeSingleSurface makeTubeOn makebot manipMoveContext ' + 'manipMoveLimitsCtx manipOptions manipRotateContext manipRotateLimitsCtx ' + 'manipScaleContext manipScaleLimitsCtx marker match max memory menu menuBarLayout ' + 'menuEditor menuItem menuItemToShelf menuSet menuSetPref messageLine min minimizeApp ' + 'mirrorJoint modelCurrentTimeCtx modelEditor modelPanel mouse movIn movOut move ' + 'moveIKtoFK moveKeyCtx moveVertexAlongDirection multiProfileBirailSurface mute ' + 'nParticle nameCommand nameField namespace namespaceInfo newPanelItems newton nodeCast ' + 'nodeIconButton nodeOutliner nodePreset nodeType noise nonLinear normalConstraint ' + 'normalize nurbsBoolean nurbsCopyUVSet nurbsCube nurbsEditUV nurbsPlane nurbsSelect ' + 'nurbsSquare nurbsToPoly nurbsToPolygonsPref nurbsToSubdiv nurbsToSubdivPref ' + 'nurbsUVSet nurbsViewDirectionVector objExists objectCenter objectLayer objectType ' + 'objectTypeUI obsoleteProc oceanNurbsPreviewPlane offsetCurve offsetCurveOnSurface ' + 'offsetSurface openGLExtension openMayaPref optionMenu optionMenuGrp optionVar orbit ' + 'orbitCtx orientConstraint outlinerEditor outlinerPanel overrideModifier ' + 'paintEffectsDisplay pairBlend palettePort paneLayout panel panelConfiguration ' + 'panelHistory paramDimContext paramDimension paramLocator parent parentConstraint ' + 'particle particleExists particleInstancer particleRenderInfo partition pasteKey ' + 'pathAnimation pause pclose percent performanceOptions pfxstrokes pickWalk picture ' + 'pixelMove planarSrf plane play playbackOptions playblast plugAttr plugNode pluginInfo ' + 'pluginResourceUtil pointConstraint pointCurveConstraint pointLight pointMatrixMult ' + 'pointOnCurve pointOnSurface pointPosition poleVectorConstraint polyAppend ' + 'polyAppendFacetCtx polyAppendVertex polyAutoProjection polyAverageNormal ' + 'polyAverageVertex polyBevel polyBlendColor polyBlindData polyBoolOp polyBridgeEdge ' + 'polyCacheMonitor polyCheck polyChipOff polyClipboard polyCloseBorder polyCollapseEdge ' + 'polyCollapseFacet polyColorBlindData polyColorDel polyColorPerVertex polyColorSet ' + 'polyCompare polyCone polyCopyUV polyCrease polyCreaseCtx polyCreateFacet ' + 'polyCreateFacetCtx polyCube polyCut polyCutCtx polyCylinder polyCylindricalProjection ' + 'polyDelEdge polyDelFacet polyDelVertex polyDuplicateAndConnect polyDuplicateEdge ' + 'polyEditUV polyEditUVShell polyEvaluate polyExtrudeEdge polyExtrudeFacet ' + 'polyExtrudeVertex polyFlipEdge polyFlipUV polyForceUV polyGeoSampler polyHelix ' + 'polyInfo polyInstallAction polyLayoutUV polyListComponentConversion polyMapCut ' + 'polyMapDel polyMapSew polyMapSewMove polyMergeEdge polyMergeEdgeCtx polyMergeFacet ' + 'polyMergeFacetCtx polyMergeUV polyMergeVertex polyMirrorFace polyMoveEdge ' + 'polyMoveFacet polyMoveFacetUV polyMoveUV polyMoveVertex polyNormal polyNormalPerVertex ' + 'polyNormalizeUV polyOptUvs polyOptions polyOutput polyPipe polyPlanarProjection ' + 'polyPlane polyPlatonicSolid polyPoke polyPrimitive polyPrism polyProjection ' + 'polyPyramid polyQuad polyQueryBlindData polyReduce polySelect polySelectConstraint ' + 'polySelectConstraintMonitor polySelectCtx polySelectEditCtx polySeparate ' + 'polySetToFaceNormal polySewEdge polyShortestPathCtx polySmooth polySoftEdge ' + 'polySphere polySphericalProjection polySplit polySplitCtx polySplitEdge polySplitRing ' + 'polySplitVertex polyStraightenUVBorder polySubdivideEdge polySubdivideFacet ' + 'polyToSubdiv polyTorus polyTransfer polyTriangulate polyUVSet polyUnite polyWedgeFace ' + 'popen popupMenu pose pow preloadRefEd print progressBar progressWindow projFileViewer ' + 'projectCurve projectTangent projectionContext projectionManip promptDialog propModCtx ' + 'propMove psdChannelOutliner psdEditTextureFile psdExport psdTextureFile putenv pwd ' + 'python querySubdiv quit rad_to_deg radial radioButton radioButtonGrp radioCollection ' + 'radioMenuItemCollection rampColorPort rand randomizeFollicles randstate rangeControl ' + 'readTake rebuildCurve rebuildSurface recordAttr recordDevice redo reference ' + 'referenceEdit referenceQuery refineSubdivSelectionList refresh refreshAE ' + 'registerPluginResource rehash reloadImage removeJoint removeMultiInstance ' + 'removePanelCategory rename renameAttr renameSelectionList renameUI render ' + 'renderGlobalsNode renderInfo renderLayerButton renderLayerParent ' + 'renderLayerPostProcess renderLayerUnparent renderManip renderPartition ' + 'renderQualityNode renderSettings renderThumbnailUpdate renderWindowEditor ' + 'renderWindowSelectContext renderer reorder reorderDeformers requires reroot ' + 'resampleFluid resetAE resetPfxToPolyCamera resetTool resolutionNode retarget ' + 'reverseCurve reverseSurface revolve rgb_to_hsv rigidBody rigidSolver roll rollCtx ' + 'rootOf rot rotate rotationInterpolation roundConstantRadius rowColumnLayout rowLayout ' + 'runTimeCommand runup sampleImage saveAllShelves saveAttrPreset saveFluid saveImage ' + 'saveInitialState saveMenu savePrefObjects savePrefs saveShelf saveToolSettings scale ' + 'scaleBrushBrightness scaleComponents scaleConstraint scaleKey scaleKeyCtx sceneEditor ' + 'sceneUIReplacement scmh scriptCtx scriptEditorInfo scriptJob scriptNode scriptTable ' + 'scriptToShelf scriptedPanel scriptedPanelType scrollField scrollLayout sculpt ' + 'searchPathArray seed selLoadSettings select selectContext selectCurveCV selectKey ' + 'selectKeyCtx selectKeyframeRegionCtx selectMode selectPref selectPriority selectType ' + 'selectedNodes selectionConnection separator setAttr setAttrEnumResource ' + 'setAttrMapping setAttrNiceNameResource setConstraintRestPosition ' + 'setDefaultShadingGroup setDrivenKeyframe setDynamic setEditCtx setEditor setFluidAttr ' + 'setFocus setInfinity setInputDeviceMapping setKeyCtx setKeyPath setKeyframe ' + 'setKeyframeBlendshapeTargetWts setMenuMode setNodeNiceNameResource setNodeTypeFlag ' + 'setParent setParticleAttr setPfxToPolyCamera setPluginResource setProject ' + 'setStampDensity setStartupMessage setState setToolTo setUITemplate setXformManip sets ' + 'shadingConnection shadingGeometryRelCtx shadingLightRelCtx shadingNetworkCompare ' + 'shadingNode shapeCompare shelfButton shelfLayout shelfTabLayout shellField ' + 'shortNameOf showHelp showHidden showManipCtx showSelectionInTitle ' + 'showShadingGroupAttrEditor showWindow sign simplify sin singleProfileBirailSurface ' + 'size sizeBytes skinCluster skinPercent smoothCurve smoothTangentSurface smoothstep ' + 'snap2to2 snapKey snapMode snapTogetherCtx snapshot soft softMod softModCtx sort sound ' + 'soundControl source spaceLocator sphere sphrand spotLight spotLightPreviewPort ' + 'spreadSheetEditor spring sqrt squareSurface srtContext stackTrace startString ' + 'startsWith stitchAndExplodeShell stitchSurface stitchSurfacePoints strcmp ' + 'stringArrayCatenate stringArrayContains stringArrayCount stringArrayInsertAtIndex ' + 'stringArrayIntersector stringArrayRemove stringArrayRemoveAtIndex ' + 'stringArrayRemoveDuplicates stringArrayRemoveExact stringArrayToString ' + 'stringToStringArray strip stripPrefixFromName stroke subdAutoProjection ' + 'subdCleanTopology subdCollapse subdDuplicateAndConnect subdEditUV ' + 'subdListComponentConversion subdMapCut subdMapSewMove subdMatchTopology subdMirror ' + 'subdToBlind subdToPoly subdTransferUVsToCache subdiv subdivCrease ' + 'subdivDisplaySmoothness substitute substituteAllString substituteGeometry substring ' + 'surface surfaceSampler surfaceShaderList swatchDisplayPort switchTable symbolButton ' + 'symbolCheckBox sysFile system tabLayout tan tangentConstraint texLatticeDeformContext ' + 'texManipContext texMoveContext texMoveUVShellContext texRotateContext texScaleContext ' + 'texSelectContext texSelectShortestPathCtx texSmudgeUVContext texWinToolCtx text ' + 'textCurves textField textFieldButtonGrp textFieldGrp textManip textScrollList ' + 'textToShelf textureDisplacePlane textureHairColor texturePlacementContext ' + 'textureWindow threadCount threePointArcCtx timeControl timePort timerX toNativePath ' + 'toggle toggleAxis toggleWindowVisibility tokenize tokenizeList tolerance tolower ' + 'toolButton toolCollection toolDropped toolHasOptions toolPropertyWindow torus toupper ' + 'trace track trackCtx transferAttributes transformCompare transformLimits translator ' + 'trim trunc truncateFluidCache truncateHairCache tumble tumbleCtx turbulence ' + 'twoPointArcCtx uiRes uiTemplate unassignInputDevice undo undoInfo ungroup uniform unit ' + 'unloadPlugin untangleUV untitledFileName untrim upAxis updateAE userCtx uvLink ' + 'uvSnapshot validateShelfName vectorize view2dToolCtx viewCamera viewClipPlane ' + 'viewFit viewHeadOn viewLookAt viewManip viewPlace viewSet visor volumeAxis vortex ' + 'waitCursor warning webBrowser webBrowserPrefs whatIs window windowPref wire ' + 'wireContext workspace wrinkle wrinkleContext writeTake xbmLangPathList xform', illegal: '</', contains: [ hljs.C_NUMBER_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, { className: 'string', begin: '`', end: '`', contains: [hljs.BACKSLASH_ESCAPE] }, { // eats variables begin: '[\\$\\%\\@](\\^\\w\\b|#\\w+|[^\\s\\w{]|{\\w+}|\\w+)' }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }; }; /***/ }, /* 270 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = { keyword: 'module use_module import_module include_module end_module initialise ' + 'mutable initialize finalize finalise interface implementation pred ' + 'mode func type inst solver any_pred any_func is semidet det nondet ' + 'multi erroneous failure cc_nondet cc_multi typeclass instance where ' + 'pragma promise external trace atomic or_else require_complete_switch ' + 'require_det require_semidet require_multi require_nondet ' + 'require_cc_multi require_cc_nondet require_erroneous require_failure', meta: // pragma 'inline no_inline type_spec source_file fact_table obsolete memo ' + 'loop_check minimal_model terminates does_not_terminate ' + 'check_termination promise_equivalent_clauses ' + // preprocessor 'foreign_proc foreign_decl foreign_code foreign_type ' + 'foreign_import_module foreign_export_enum foreign_export ' + 'foreign_enum may_call_mercury will_not_call_mercury thread_safe ' + 'not_thread_safe maybe_thread_safe promise_pure promise_semipure ' + 'tabled_for_io local untrailed trailed attach_to_io_state ' + 'can_pass_as_mercury_type stable will_not_throw_exception ' + 'may_modify_trail will_not_modify_trail may_duplicate ' + 'may_not_duplicate affects_liveness does_not_affect_liveness ' + 'doesnt_affect_liveness no_sharing unknown_sharing sharing', built_in: 'some all not if then else true fail false try catch catch_any ' + 'semidet_true semidet_false semidet_fail impure_true impure semipure' }; var COMMENT = hljs.COMMENT('%', '$'); var NUMCODE = { className: 'number', begin: "0'.\\|0[box][0-9a-fA-F]*" }; var ATOM = hljs.inherit(hljs.APOS_STRING_MODE, {relevance: 0}); var STRING = hljs.inherit(hljs.QUOTE_STRING_MODE, {relevance: 0}); var STRING_FMT = { className: 'subst', begin: '\\\\[abfnrtv]\\|\\\\x[0-9a-fA-F]*\\\\\\|%[-+# *.0-9]*[dioxXucsfeEgGp]', relevance: 0 }; STRING.contains.push(STRING_FMT); var IMPLICATION = { className: 'built_in', variants: [ {begin: '<=>'}, {begin: '<=', relevance: 0}, {begin: '=>', relevance: 0}, {begin: '/\\\\'}, {begin: '\\\\/'} ] }; var HEAD_BODY_CONJUNCTION = { className: 'built_in', variants: [ {begin: ':-\\|-->'}, {begin: '=', relevance: 0} ] }; return { aliases: ['m', 'moo'], keywords: KEYWORDS, contains: [ IMPLICATION, HEAD_BODY_CONJUNCTION, COMMENT, hljs.C_BLOCK_COMMENT_MODE, NUMCODE, hljs.NUMBER_MODE, ATOM, STRING, {begin: /:-/} // relevance booster ] }; }; /***/ }, /* 271 */ /***/ function(module, exports) { module.exports = function(hljs) { //local labels: %?[FB]?[AT]?\d{1,2}\w+ return { case_insensitive: true, aliases: ['mips'], lexemes: '\\.?' + hljs.IDENT_RE, keywords: { meta: //GNU preprocs '.2byte .4byte .align .ascii .asciz .balign .byte .code .data .else .end .endif .endm .endr .equ .err .exitm .extern .global .hword .if .ifdef .ifndef .include .irp .long .macro .rept .req .section .set .skip .space .text .word .ltorg ', built_in: '$0 $1 $2 $3 $4 $5 $6 $7 $8 $9 $10 $11 $12 $13 $14 $15 ' + // integer registers '$16 $17 $18 $19 $20 $21 $22 $23 $24 $25 $26 $27 $28 $29 $30 $31 ' + // integer registers 'zero at v0 v1 a0 a1 a2 a3 a4 a5 a6 a7 ' + // integer register aliases 't0 t1 t2 t3 t4 t5 t6 t7 t8 t9 s0 s1 s2 s3 s4 s5 s6 s7 s8 ' + // integer register aliases 'k0 k1 gp sp fp ra ' + // integer register aliases '$f0 $f1 $f2 $f2 $f4 $f5 $f6 $f7 $f8 $f9 $f10 $f11 $f12 $f13 $f14 $f15 ' + // floating-point registers '$f16 $f17 $f18 $f19 $f20 $f21 $f22 $f23 $f24 $f25 $f26 $f27 $f28 $f29 $f30 $f31 ' + // floating-point registers 'Context Random EntryLo0 EntryLo1 Context PageMask Wired EntryHi ' + // Coprocessor 0 registers 'HWREna BadVAddr Count Compare SR IntCtl SRSCtl SRSMap Cause EPC PRId ' + // Coprocessor 0 registers 'EBase Config Config1 Config2 Config3 LLAddr Debug DEPC DESAVE CacheErr ' + // Coprocessor 0 registers 'ECC ErrorEPC TagLo DataLo TagHi DataHi WatchLo WatchHi PerfCtl PerfCnt ' // Coprocessor 0 registers }, contains: [ { className: 'keyword', begin: '\\b('+ //mnemonics // 32-bit integer instructions 'addi?u?|andi?|b(al)?|beql?|bgez(al)?l?|bgtzl?|blezl?|bltz(al)?l?|' + 'bnel?|cl[oz]|divu?|ext|ins|j(al)?|jalr(\.hb)?|jr(\.hb)?|lbu?|lhu?|' + 'll|lui|lw[lr]?|maddu?|mfhi|mflo|movn|movz|move|msubu?|mthi|mtlo|mul|' + 'multu?|nop|nor|ori?|rotrv?|sb|sc|se[bh]|sh|sllv?|slti?u?|srav?|' + 'srlv?|subu?|sw[lr]?|xori?|wsbh|' + // floating-point instructions 'abs\.[sd]|add\.[sd]|alnv.ps|bc1[ft]l?|' + 'c\.(s?f|un|u?eq|[ou]lt|[ou]le|ngle?|seq|l[et]|ng[et])\.[sd]|' + '(ceil|floor|round|trunc)\.[lw]\.[sd]|cfc1|cvt\.d\.[lsw]|' + 'cvt\.l\.[dsw]|cvt\.ps\.s|cvt\.s\.[dlw]|cvt\.s\.p[lu]|cvt\.w\.[dls]|' + 'div\.[ds]|ldx?c1|luxc1|lwx?c1|madd\.[sd]|mfc1|mov[fntz]?\.[ds]|' + 'msub\.[sd]|mth?c1|mul\.[ds]|neg\.[ds]|nmadd\.[ds]|nmsub\.[ds]|' + 'p[lu][lu]\.ps|recip\.fmt|r?sqrt\.[ds]|sdx?c1|sub\.[ds]|suxc1|' + 'swx?c1|' + // system control instructions 'break|cache|d?eret|[de]i|ehb|mfc0|mtc0|pause|prefx?|rdhwr|' + 'rdpgpr|sdbbp|ssnop|synci?|syscall|teqi?|tgei?u?|tlb(p|r|w[ir])|' + 'tlti?u?|tnei?|wait|wrpgpr'+ ')', end: '\\s' }, hljs.COMMENT('[;#]', '$'), hljs.C_BLOCK_COMMENT_MODE, hljs.QUOTE_STRING_MODE, { className: 'string', begin: '\'', end: '[^\\\\]\'', relevance: 0 }, { className: 'title', begin: '\\|', end: '\\|', illegal: '\\n', relevance: 0 }, { className: 'number', variants: [ {begin: '0x[0-9a-f]+'}, //hex {begin: '\\b-?\\d+'} //bare number ], relevance: 0 }, { className: 'symbol', variants: [ {begin: '^\\s*[a-z_\\.\\$][a-z0-9_\\.\\$]+:'}, //GNU MIPS syntax {begin: '^\\s*[0-9]+:'}, // numbered local labels {begin: '[0-9]+[bf]' } // number local label reference (backwards, forwards) ], relevance: 0 } ], illegal: '\/' }; }; /***/ }, /* 272 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: 'environ vocabularies notations constructors definitions ' + 'registrations theorems schemes requirements begin end definition ' + 'registration cluster existence pred func defpred deffunc theorem ' + 'proof let take assume then thus hence ex for st holds consider ' + 'reconsider such that and in provided of as from be being by means ' + 'equals implies iff redefine define now not or attr is mode ' + 'suppose per cases set thesis contradiction scheme reserve struct ' + 'correctness compatibility coherence symmetry assymetry ' + 'reflexivity irreflexivity connectedness uniqueness commutativity ' + 'idempotence involutiveness projectivity', contains: [ hljs.COMMENT('::', '$') ] }; }; /***/ }, /* 273 */ /***/ function(module, exports) { module.exports = function(hljs) { var PERL_KEYWORDS = 'getpwent getservent quotemeta msgrcv scalar kill dbmclose undef lc ' + 'ma syswrite tr send umask sysopen shmwrite vec qx utime local oct semctl localtime ' + 'readpipe do return format read sprintf dbmopen pop getpgrp not getpwnam rewinddir qq' + 'fileno qw endprotoent wait sethostent bless s|0 opendir continue each sleep endgrent ' + 'shutdown dump chomp connect getsockname die socketpair close flock exists index shmget' + 'sub for endpwent redo lstat msgctl setpgrp abs exit select print ref gethostbyaddr ' + 'unshift fcntl syscall goto getnetbyaddr join gmtime symlink semget splice x|0 ' + 'getpeername recv log setsockopt cos last reverse gethostbyname getgrnam study formline ' + 'endhostent times chop length gethostent getnetent pack getprotoent getservbyname rand ' + 'mkdir pos chmod y|0 substr endnetent printf next open msgsnd readdir use unlink ' + 'getsockopt getpriority rindex wantarray hex system getservbyport endservent int chr ' + 'untie rmdir prototype tell listen fork shmread ucfirst setprotoent else sysseek link ' + 'getgrgid shmctl waitpid unpack getnetbyname reset chdir grep split require caller ' + 'lcfirst until warn while values shift telldir getpwuid my getprotobynumber delete and ' + 'sort uc defined srand accept package seekdir getprotobyname semop our rename seek if q|0 ' + 'chroot sysread setpwent no crypt getc chown sqrt write setnetent setpriority foreach ' + 'tie sin msgget map stat getlogin unless elsif truncate exec keys glob tied closedir' + 'ioctl socket readlink eval xor readline binmode setservent eof ord bind alarm pipe ' + 'atan2 getgrent exp time push setgrent gt lt or ne m|0 break given say state when'; var SUBST = { className: 'subst', begin: '[$@]\\{', end: '\\}', keywords: PERL_KEYWORDS }; var METHOD = { begin: '->{', end: '}' // contains defined later }; var VAR = { variants: [ {begin: /\$\d/}, {begin: /[\$%@](\^\w\b|#\w+(::\w+)*|{\w+}|\w+(::\w*)*)/}, {begin: /[\$%@][^\s\w{]/, relevance: 0} ] }; var STRING_CONTAINS = [hljs.BACKSLASH_ESCAPE, SUBST, VAR]; var PERL_DEFAULT_CONTAINS = [ VAR, hljs.HASH_COMMENT_MODE, hljs.COMMENT( '^\\=\\w', '\\=cut', { endsWithParent: true } ), METHOD, { className: 'string', contains: STRING_CONTAINS, variants: [ { begin: 'q[qwxr]?\\s*\\(', end: '\\)', relevance: 5 }, { begin: 'q[qwxr]?\\s*\\[', end: '\\]', relevance: 5 }, { begin: 'q[qwxr]?\\s*\\{', end: '\\}', relevance: 5 }, { begin: 'q[qwxr]?\\s*\\|', end: '\\|', relevance: 5 }, { begin: 'q[qwxr]?\\s*\\<', end: '\\>', relevance: 5 }, { begin: 'qw\\s+q', end: 'q', relevance: 5 }, { begin: '\'', end: '\'', contains: [hljs.BACKSLASH_ESCAPE] }, { begin: '"', end: '"' }, { begin: '`', end: '`', contains: [hljs.BACKSLASH_ESCAPE] }, { begin: '{\\w+}', contains: [], relevance: 0 }, { begin: '\-?\\w+\\s*\\=\\>', contains: [], relevance: 0 } ] }, { className: 'number', begin: '(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b', relevance: 0 }, { // regexp container begin: '(\\/\\/|' + hljs.RE_STARTERS_RE + '|\\b(split|return|print|reverse|grep)\\b)\\s*', keywords: 'split return print reverse grep', relevance: 0, contains: [ hljs.HASH_COMMENT_MODE, { className: 'regexp', begin: '(s|tr|y)/(\\\\.|[^/])*/(\\\\.|[^/])*/[a-z]*', relevance: 10 }, { className: 'regexp', begin: '(m|qr)?/', end: '/[a-z]*', contains: [hljs.BACKSLASH_ESCAPE], relevance: 0 // allows empty "//" which is a common comment delimiter in other languages } ] }, { className: 'function', beginKeywords: 'sub', end: '(\\s*\\(.*?\\))?[;{]', excludeEnd: true, relevance: 5, contains: [hljs.TITLE_MODE] }, { begin: '-\\w\\b', relevance: 0 }, { begin: "^__DATA__$", end: "^__END__$", subLanguage: 'mojolicious', contains: [ { begin: "^@@.*", end: "$", className: "comment" } ] } ]; SUBST.contains = PERL_DEFAULT_CONTAINS; METHOD.contains = PERL_DEFAULT_CONTAINS; return { aliases: ['pl', 'pm'], lexemes: /[\w\.]+/, keywords: PERL_KEYWORDS, contains: PERL_DEFAULT_CONTAINS }; }; /***/ }, /* 274 */ /***/ function(module, exports) { module.exports = function(hljs) { return { subLanguage: 'xml', contains: [ { className: 'meta', begin: '^__(END|DATA)__$' }, // mojolicious line { begin: "^\\s*%{1,2}={0,2}", end: '$', subLanguage: 'perl' }, // mojolicious block { begin: "<%{1,2}={0,2}", end: "={0,1}%>", subLanguage: 'perl', excludeBegin: true, excludeEnd: true } ] }; }; /***/ }, /* 275 */ /***/ function(module, exports) { module.exports = function(hljs) { var NUMBER = { className: 'number', relevance: 0, variants: [ { begin: '[$][a-fA-F0-9]+' }, hljs.NUMBER_MODE ] }; return { case_insensitive: true, keywords: { keyword: 'public private property continue exit extern new try catch ' + 'eachin not abstract final select case default const local global field ' + 'end if then else elseif endif while wend repeat until forever for ' + 'to step next return module inline throw import', built_in: 'DebugLog DebugStop Error Print ACos ACosr ASin ASinr ATan ATan2 ATan2r ATanr Abs Abs Ceil ' + 'Clamp Clamp Cos Cosr Exp Floor Log Max Max Min Min Pow Sgn Sgn Sin Sinr Sqrt Tan Tanr Seed PI HALFPI TWOPI', literal: 'true false null and or shl shr mod' }, illegal: /\/\*/, contains: [ hljs.COMMENT('#rem', '#end'), hljs.COMMENT( "'", '$', { relevance: 0 } ), { className: 'function', beginKeywords: 'function method', end: '[(=:]|$', illegal: /\n/, contains: [ hljs.UNDERSCORE_TITLE_MODE ] }, { className: 'class', beginKeywords: 'class interface', end: '$', contains: [ { beginKeywords: 'extends implements' }, hljs.UNDERSCORE_TITLE_MODE ] }, { className: 'built_in', begin: '\\b(self|super)\\b' }, { className: 'meta', begin: '\\s*#', end: '$', keywords: {'meta-keyword': 'if else elseif endif end then'} }, { className: 'meta', begin: '^\\s*strict\\b' }, { beginKeywords: 'alias', end: '=', contains: [hljs.UNDERSCORE_TITLE_MODE] }, hljs.QUOTE_STRING_MODE, NUMBER ] } }; /***/ }, /* 276 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = { keyword: // Moonscript keywords 'if then not for in while do return else elseif break continue switch and or ' + 'unless when class extends super local import export from using', literal: 'true false nil', built_in: '_G _VERSION assert collectgarbage dofile error getfenv getmetatable ipairs load ' + 'loadfile loadstring module next pairs pcall print rawequal rawget rawset require ' + 'select setfenv setmetatable tonumber tostring type unpack xpcall coroutine debug ' + 'io math os package string table' }; var JS_IDENT_RE = '[A-Za-z$_][0-9A-Za-z$_]*'; var SUBST = { className: 'subst', begin: /#\{/, end: /}/, keywords: KEYWORDS }; var EXPRESSIONS = [ hljs.inherit(hljs.C_NUMBER_MODE, {starts: {end: '(\\s*/)?', relevance: 0}}), // a number tries to eat the following slash to prevent treating it as a regexp { className: 'string', variants: [ { begin: /'/, end: /'/, contains: [hljs.BACKSLASH_ESCAPE] }, { begin: /"/, end: /"/, contains: [hljs.BACKSLASH_ESCAPE, SUBST] } ] }, { className: 'built_in', begin: '@__' + hljs.IDENT_RE }, { begin: '@' + hljs.IDENT_RE // relevance booster on par with CoffeeScript }, { begin: hljs.IDENT_RE + '\\\\' + hljs.IDENT_RE // inst\method } ]; SUBST.contains = EXPRESSIONS; var TITLE = hljs.inherit(hljs.TITLE_MODE, {begin: JS_IDENT_RE}); var PARAMS_RE = '(\\(.*\\))?\\s*\\B[-=]>'; var PARAMS = { className: 'params', begin: '\\([^\\(]', returnBegin: true, /* We need another contained nameless mode to not have every nested pair of parens to be called "params" */ contains: [{ begin: /\(/, end: /\)/, keywords: KEYWORDS, contains: ['self'].concat(EXPRESSIONS) }] }; return { aliases: ['moon'], keywords: KEYWORDS, illegal: /\/\*/, contains: EXPRESSIONS.concat([ hljs.COMMENT('--', '$'), { className: 'function', // function: -> => begin: '^\\s*' + JS_IDENT_RE + '\\s*=\\s*' + PARAMS_RE, end: '[-=]>', returnBegin: true, contains: [TITLE, PARAMS] }, { begin: /[\(,:=]\s*/, // anonymous function start relevance: 0, contains: [ { className: 'function', begin: PARAMS_RE, end: '[-=]>', returnBegin: true, contains: [PARAMS] } ] }, { className: 'class', beginKeywords: 'class', end: '$', illegal: /[:="\[\]]/, contains: [ { beginKeywords: 'extends', endsWithParent: true, illegal: /[:="\[\]]/, contains: [TITLE] }, TITLE ] }, { className: 'name', // table begin: JS_IDENT_RE + ':', end: ':', returnBegin: true, returnEnd: true, relevance: 0 } ]) }; }; /***/ }, /* 277 */ /***/ function(module, exports) { module.exports = function(hljs) { var VAR = { className: 'variable', variants: [ {begin: /\$\d+/}, {begin: /\$\{/, end: /}/}, {begin: '[\\$\\@]' + hljs.UNDERSCORE_IDENT_RE} ] }; var DEFAULT = { endsWithParent: true, lexemes: '[a-z/_]+', keywords: { literal: 'on off yes no true false none blocked debug info notice warn error crit ' + 'select break last permanent redirect kqueue rtsig epoll poll /dev/poll' }, relevance: 0, illegal: '=>', contains: [ hljs.HASH_COMMENT_MODE, { className: 'string', contains: [hljs.BACKSLASH_ESCAPE, VAR], variants: [ {begin: /"/, end: /"/}, {begin: /'/, end: /'/} ] }, // this swallows entire URLs to avoid detecting numbers within { begin: '([a-z]+):/', end: '\\s', endsWithParent: true, excludeEnd: true, contains: [VAR] }, { className: 'regexp', contains: [hljs.BACKSLASH_ESCAPE, VAR], variants: [ {begin: "\\s\\^", end: "\\s|{|;", returnEnd: true}, // regexp locations (~, ~*) {begin: "~\\*?\\s+", end: "\\s|{|;", returnEnd: true}, // *.example.com {begin: "\\*(\\.[a-z\\-]+)+"}, // sub.example.* {begin: "([a-z\\-]+\\.)+\\*"} ] }, // IP { className: 'number', begin: '\\b\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?\\b' }, // units { className: 'number', begin: '\\b\\d+[kKmMgGdshdwy]*\\b', relevance: 0 }, VAR ] }; return { aliases: ['nginxconf'], contains: [ hljs.HASH_COMMENT_MODE, { begin: hljs.UNDERSCORE_IDENT_RE + '\\s+{', returnBegin: true, end: '{', contains: [ { className: 'section', begin: hljs.UNDERSCORE_IDENT_RE } ], relevance: 0 }, { begin: hljs.UNDERSCORE_IDENT_RE + '\\s', end: ';|{', returnBegin: true, contains: [ { className: 'attribute', begin: hljs.UNDERSCORE_IDENT_RE, starts: DEFAULT } ], relevance: 0 } ], illegal: '[^\\s\\}]' }; }; /***/ }, /* 278 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['nim'], keywords: { keyword: 'addr and as asm bind block break case cast const continue converter ' + 'discard distinct div do elif else end enum except export finally ' + 'for from generic if import in include interface is isnot iterator ' + 'let macro method mixin mod nil not notin object of or out proc ptr ' + 'raise ref return shl shr static template try tuple type using var ' + 'when while with without xor yield', literal: 'shared guarded stdin stdout stderr result true false', built_in: 'int int8 int16 int32 int64 uint uint8 uint16 uint32 uint64 float ' + 'float32 float64 bool char string cstring pointer expr stmt void ' + 'auto any range array openarray varargs seq set clong culong cchar ' + 'cschar cshort cint csize clonglong cfloat cdouble clongdouble ' + 'cuchar cushort cuint culonglong cstringarray semistatic' }, contains: [ { className: 'meta', // Actually pragma begin: /{\./, end: /\.}/, relevance: 10 }, { className: 'string', begin: /[a-zA-Z]\w*"/, end: /"/, contains: [{begin: /""/}] }, { className: 'string', begin: /([a-zA-Z]\w*)?"""/, end: /"""/ }, hljs.QUOTE_STRING_MODE, { className: 'type', begin: /\b[A-Z]\w+\b/, relevance: 0 }, { className: 'number', relevance: 0, variants: [ {begin: /\b(0[xX][0-9a-fA-F][_0-9a-fA-F]*)('?[iIuU](8|16|32|64))?/}, {begin: /\b(0o[0-7][_0-7]*)('?[iIuUfF](8|16|32|64))?/}, {begin: /\b(0(b|B)[01][_01]*)('?[iIuUfF](8|16|32|64))?/}, {begin: /\b(\d[_\d]*)('?[iIuUfF](8|16|32|64))?/} ] }, hljs.HASH_COMMENT_MODE ] } }; /***/ }, /* 279 */ /***/ function(module, exports) { module.exports = function(hljs) { var NIX_KEYWORDS = { keyword: 'rec with let in inherit assert if else then', literal: 'true false or and null', built_in: 'import abort baseNameOf dirOf isNull builtins map removeAttrs throw ' + 'toString derivation' }; var ANTIQUOTE = { className: 'subst', begin: /\$\{/, end: /}/, keywords: NIX_KEYWORDS }; var ATTRS = { begin: /[a-zA-Z0-9-_]+(\s*=)/, returnBegin: true, relevance: 0, contains: [ { className: 'attr', begin: /\S+/ } ] }; var STRING = { className: 'string', contains: [ANTIQUOTE], variants: [ {begin: "''", end: "''"}, {begin: '"', end: '"'} ] }; var EXPRESSIONS = [ hljs.NUMBER_MODE, hljs.HASH_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, STRING, ATTRS ]; ANTIQUOTE.contains = EXPRESSIONS; return { aliases: ["nixos"], keywords: NIX_KEYWORDS, contains: EXPRESSIONS }; }; /***/ }, /* 280 */ /***/ function(module, exports) { module.exports = function(hljs) { var CONSTANTS = { className: 'variable', begin: /\$(ADMINTOOLS|APPDATA|CDBURN_AREA|CMDLINE|COMMONFILES32|COMMONFILES64|COMMONFILES|COOKIES|DESKTOP|DOCUMENTS|EXEDIR|EXEFILE|EXEPATH|FAVORITES|FONTS|HISTORY|HWNDPARENT|INSTDIR|INTERNET_CACHE|LANGUAGE|LOCALAPPDATA|MUSIC|NETHOOD|OUTDIR|PICTURES|PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES32|PROGRAMFILES64|PROGRAMFILES|QUICKLAUNCH|RECENT|RESOURCES_LOCALIZED|RESOURCES|SENDTO|SMPROGRAMS|SMSTARTUP|STARTMENU|SYSDIR|TEMP|TEMPLATES|VIDEOS|WINDIR)/ }; var DEFINES = { // ${defines} className: 'variable', begin: /\$+{[\w\.:-]+}/ }; var VARIABLES = { // $variables className: 'variable', begin: /\$+\w+/, illegal: /\(\){}/ }; var LANGUAGES = { // $(language_strings) className: 'variable', begin: /\$+\([\w\^\.:-]+\)/ }; var PARAMETERS = { // command parameters className: 'params', begin: '(ARCHIVE|FILE_ATTRIBUTE_ARCHIVE|FILE_ATTRIBUTE_NORMAL|FILE_ATTRIBUTE_OFFLINE|FILE_ATTRIBUTE_READONLY|FILE_ATTRIBUTE_SYSTEM|FILE_ATTRIBUTE_TEMPORARY|HKCR|HKCU|HKDD|HKEY_CLASSES_ROOT|HKEY_CURRENT_CONFIG|HKEY_CURRENT_USER|HKEY_DYN_DATA|HKEY_LOCAL_MACHINE|HKEY_PERFORMANCE_DATA|HKEY_USERS|HKLM|HKPD|HKU|IDABORT|IDCANCEL|IDIGNORE|IDNO|IDOK|IDRETRY|IDYES|MB_ABORTRETRYIGNORE|MB_DEFBUTTON1|MB_DEFBUTTON2|MB_DEFBUTTON3|MB_DEFBUTTON4|MB_ICONEXCLAMATION|MB_ICONINFORMATION|MB_ICONQUESTION|MB_ICONSTOP|MB_OK|MB_OKCANCEL|MB_RETRYCANCEL|MB_RIGHT|MB_RTLREADING|MB_SETFOREGROUND|MB_TOPMOST|MB_USERICON|MB_YESNO|NORMAL|OFFLINE|READONLY|SHCTX|SHELL_CONTEXT|SYSTEM|TEMPORARY)' }; var COMPILER = { // !compiler_flags className: 'keyword', begin: /\!(addincludedir|addplugindir|appendfile|cd|define|delfile|echo|else|endif|error|execute|finalize|getdllversionsystem|ifdef|ifmacrodef|ifmacrondef|ifndef|if|include|insertmacro|macroend|macro|makensis|packhdr|searchparse|searchreplace|tempfile|undef|verbose|warning)/ }; var METACHARS = { // $\n, $\r, $\t, $$ className: 'subst', begin: /\$(\\[nrt]|\$)/ }; var PLUGINS = { // plug::ins className: 'class', begin: /\w+\:\:\w+/ }; var STRING = { className: 'string', variants: [ { begin: '"', end: '"' }, { begin: '\'', end: '\'' }, { begin: '`', end: '`' } ], illegal: /\n/, contains: [ METACHARS, CONSTANTS, DEFINES, VARIABLES, LANGUAGES ] }; return { case_insensitive: false, keywords: { keyword: 'Abort AddBrandingImage AddSize AllowRootDirInstall AllowSkipFiles AutoCloseWindow BGFont BGGradient BrandingText BringToFront Call CallInstDLL Caption ChangeUI CheckBitmap ClearErrors CompletedText ComponentText CopyFiles CRCCheck CreateDirectory CreateFont CreateShortCut Delete DeleteINISec DeleteINIStr DeleteRegKey DeleteRegValue DetailPrint DetailsButtonText DirText DirVar DirVerify EnableWindow EnumRegKey EnumRegValue Exch Exec ExecShell ExecWait ExpandEnvStrings File FileBufSize FileClose FileErrorText FileOpen FileRead FileReadByte FileReadUTF16LE FileReadWord FileSeek FileWrite FileWriteByte FileWriteUTF16LE FileWriteWord FindClose FindFirst FindNext FindWindow FlushINI FunctionEnd GetCurInstType GetCurrentAddress GetDlgItem GetDLLVersion GetDLLVersionLocal GetErrorLevel GetFileTime GetFileTimeLocal GetFullPathName GetFunctionAddress GetInstDirError GetLabelAddress GetTempFileName Goto HideWindow Icon IfAbort IfErrors IfFileExists IfRebootFlag IfSilent InitPluginsDir InstallButtonText InstallColors InstallDir InstallDirRegKey InstProgressFlags InstType InstTypeGetText InstTypeSetText IntCmp IntCmpU IntFmt IntOp IsWindow LangString LicenseBkColor LicenseData LicenseForceSelection LicenseLangString LicenseText LoadLanguageFile LockWindow LogSet LogText ManifestDPIAware ManifestSupportedOS MessageBox MiscButtonText Name Nop OutFile Page PageCallbacks PageExEnd Pop Push Quit ReadEnvStr ReadINIStr ReadRegDWORD ReadRegStr Reboot RegDLL Rename RequestExecutionLevel ReserveFile Return RMDir SearchPath SectionEnd SectionGetFlags SectionGetInstTypes SectionGetSize SectionGetText SectionGroupEnd SectionIn SectionSetFlags SectionSetInstTypes SectionSetSize SectionSetText SendMessage SetAutoClose SetBrandingImage SetCompress SetCompressor SetCompressorDictSize SetCtlColors SetCurInstType SetDatablockOptimize SetDateSave SetDetailsPrint SetDetailsView SetErrorLevel SetErrors SetFileAttributes SetFont SetOutPath SetOverwrite SetRebootFlag SetRegView SetShellVarContext SetSilent ShowInstDetails ShowUninstDetails ShowWindow SilentInstall SilentUnInstall Sleep SpaceTexts StrCmp StrCmpS StrCpy StrLen SubCaption Unicode UninstallButtonText UninstallCaption UninstallIcon UninstallSubCaption UninstallText UninstPage UnRegDLL Var VIAddVersionKey VIFileVersion VIProductVersion WindowIcon WriteINIStr WriteRegBin WriteRegDWORD WriteRegExpandStr WriteRegStr WriteUninstaller XPStyle', literal: 'admin all auto both bottom bzip2 colored components current custom directory false force hide highest ifdiff ifnewer instfiles lastused leave left license listonly lzma nevershow none normal notset off on open print right show silent silentlog smooth textonly top true try un.components un.custom un.directory un.instfiles un.license uninstConfirm user Win10 Win7 Win8 WinVista zlib' }, contains: [ hljs.HASH_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.COMMENT( ';', '$', { relevance: 0 } ), { className: 'function', beginKeywords: 'Function PageEx Section SectionGroup', end: '$' }, STRING, COMPILER, DEFINES, VARIABLES, LANGUAGES, PARAMETERS, PLUGINS, hljs.NUMBER_MODE ] }; }; /***/ }, /* 281 */ /***/ function(module, exports) { module.exports = function(hljs) { var API_CLASS = { className: 'built_in', begin: '\\b(AV|CA|CF|CG|CI|CL|CM|CN|CT|MK|MP|MTK|MTL|NS|SCN|SK|UI|WK|XC)\\w+', }; var OBJC_KEYWORDS = { keyword: 'int float while char export sizeof typedef const struct for union ' + 'unsigned long volatile static bool mutable if do return goto void ' + 'enum else break extern asm case short default double register explicit ' + 'signed typename this switch continue wchar_t inline readonly assign ' + 'readwrite self @synchronized id typeof ' + 'nonatomic super unichar IBOutlet IBAction strong weak copy ' + 'in out inout bycopy byref oneway __strong __weak __block __autoreleasing ' + '@private @protected @public @try @property @end @throw @catch @finally ' + '@autoreleasepool @synthesize @dynamic @selector @optional @required ' + '@encode @package @import @defs @compatibility_alias ' + '__bridge __bridge_transfer __bridge_retained __bridge_retain ' + '__covariant __contravariant __kindof ' + '_Nonnull _Nullable _Null_unspecified ' + '__FUNCTION__ __PRETTY_FUNCTION__ __attribute__ ' + 'getter setter retain unsafe_unretained ' + 'nonnull nullable null_unspecified null_resettable class instancetype ' + 'NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE NS_REQUIRES_SUPER ' + 'NS_RETURNS_INNER_POINTER NS_INLINE NS_AVAILABLE NS_DEPRECATED ' + 'NS_ENUM NS_OPTIONS NS_SWIFT_UNAVAILABLE ' + 'NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END ' + 'NS_REFINED_FOR_SWIFT NS_SWIFT_NAME NS_SWIFT_NOTHROW ' + 'NS_DURING NS_HANDLER NS_ENDHANDLER NS_VALUERETURN NS_VOIDRETURN', literal: 'false true FALSE TRUE nil YES NO NULL', built_in: 'BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once' }; var LEXEMES = /[a-zA-Z@][a-zA-Z0-9_]*/; var CLASS_KEYWORDS = '@interface @class @protocol @implementation'; return { aliases: ['mm', 'objc', 'obj-c'], keywords: OBJC_KEYWORDS, lexemes: LEXEMES, illegal: '</', contains: [ API_CLASS, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.C_NUMBER_MODE, hljs.QUOTE_STRING_MODE, { className: 'string', variants: [ { begin: '@"', end: '"', illegal: '\\n', contains: [hljs.BACKSLASH_ESCAPE] }, { begin: '\'', end: '[^\\\\]\'', illegal: '[^\\\\][^\']' } ] }, { className: 'meta', begin: '#', end: '$', contains: [ { className: 'meta-string', variants: [ { begin: '\"', end: '\"' }, { begin: '<', end: '>' } ] } ] }, { className: 'class', begin: '(' + CLASS_KEYWORDS.split(' ').join('|') + ')\\b', end: '({|$)', excludeEnd: true, keywords: CLASS_KEYWORDS, lexemes: LEXEMES, contains: [ hljs.UNDERSCORE_TITLE_MODE ] }, { begin: '\\.'+hljs.UNDERSCORE_IDENT_RE, relevance: 0 } ] }; }; /***/ }, /* 282 */ /***/ function(module, exports) { module.exports = function(hljs) { /* missing support for heredoc-like string (OCaml 4.0.2+) */ return { aliases: ['ml'], keywords: { keyword: 'and as assert asr begin class constraint do done downto else end ' + 'exception external for fun function functor if in include ' + 'inherit! inherit initializer land lazy let lor lsl lsr lxor match method!|10 method ' + 'mod module mutable new object of open! open or private rec sig struct ' + 'then to try type val! val virtual when while with ' + /* camlp4 */ 'parser value', built_in: /* built-in types */ 'array bool bytes char exn|5 float int int32 int64 list lazy_t|5 nativeint|5 string unit ' + /* (some) types in Pervasives */ 'in_channel out_channel ref', literal: 'true false' }, illegal: /\/\/|>>/, lexemes: '[a-z_]\\w*!?', contains: [ { className: 'literal', begin: '\\[(\\|\\|)?\\]|\\(\\)', relevance: 0 }, hljs.COMMENT( '\\(\\*', '\\*\\)', { contains: ['self'] } ), { /* type variable */ className: 'symbol', begin: '\'[A-Za-z_](?!\')[\\w\']*' /* the grammar is ambiguous on how 'a'b should be interpreted but not the compiler */ }, { /* polymorphic variant */ className: 'type', begin: '`[A-Z][\\w\']*' }, { /* module or constructor */ className: 'type', begin: '\\b[A-Z][\\w\']*', relevance: 0 }, { /* don't color identifiers, but safely catch all identifiers with '*/ begin: '[a-z_]\\w*\'[\\w\']*', relevance: 0 }, hljs.inherit(hljs.APOS_STRING_MODE, {className: 'string', relevance: 0}), hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}), { className: 'number', begin: '\\b(0[xX][a-fA-F0-9_]+[Lln]?|' + '0[oO][0-7_]+[Lln]?|' + '0[bB][01_]+[Lln]?|' + '[0-9][0-9_]*([Lln]|(\\.[0-9_]*)?([eE][-+]?[0-9_]+)?)?)', relevance: 0 }, { begin: /[-=]>/ // relevance booster } ] } }; /***/ }, /* 283 */ /***/ function(module, exports) { module.exports = function(hljs) { var SPECIAL_VARS = { className: 'keyword', begin: '\\$(f[asn]|t|vp[rtd]|children)' }, LITERALS = { className: 'literal', begin: 'false|true|PI|undef' }, NUMBERS = { className: 'number', begin: '\\b\\d+(\\.\\d+)?(e-?\\d+)?', //adds 1e5, 1e-10 relevance: 0 }, STRING = hljs.inherit(hljs.QUOTE_STRING_MODE,{illegal: null}), PREPRO = { className: 'meta', keywords: {'meta-keyword': 'include use'}, begin: 'include|use <', end: '>' }, PARAMS = { className: 'params', begin: '\\(', end: '\\)', contains: ['self', NUMBERS, STRING, SPECIAL_VARS, LITERALS] }, MODIFIERS = { begin: '[*!#%]', relevance: 0 }, FUNCTIONS = { className: 'function', beginKeywords: 'module function', end: '\\=|\\{', contains: [PARAMS, hljs.UNDERSCORE_TITLE_MODE] }; return { aliases: ['scad'], keywords: { keyword: 'function module include use for intersection_for if else \\%', literal: 'false true PI undef', built_in: 'circle square polygon text sphere cube cylinder polyhedron translate rotate scale resize mirror multmatrix color offset hull minkowski union difference intersection abs sign sin cos tan acos asin atan atan2 floor round ceil ln log pow sqrt exp rands min max concat lookup str chr search version version_num norm cross parent_module echo import import_dxf dxf_linear_extrude linear_extrude rotate_extrude surface projection render children dxf_cross dxf_dim let assign' }, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, NUMBERS, PREPRO, STRING, SPECIAL_VARS, MODIFIERS, FUNCTIONS ] } }; /***/ }, /* 284 */ /***/ function(module, exports) { module.exports = function(hljs) { var OXYGENE_KEYWORDS = 'abstract add and array as asc aspect assembly async begin break block by case class concat const copy constructor continue '+ 'create default delegate desc distinct div do downto dynamic each else empty end ensure enum equals event except exit extension external false '+ 'final finalize finalizer finally flags for forward from function future global group has if implementation implements implies in index inherited '+ 'inline interface into invariants is iterator join locked locking loop matching method mod module namespace nested new nil not notify nullable of '+ 'old on operator or order out override parallel params partial pinned private procedure property protected public queryable raise read readonly '+ 'record reintroduce remove repeat require result reverse sealed select self sequence set shl shr skip static step soft take then to true try tuple '+ 'type union unit unsafe until uses using var virtual raises volatile where while with write xor yield await mapped deprecated stdcall cdecl pascal '+ 'register safecall overload library platform reference packed strict published autoreleasepool selector strong weak unretained'; var CURLY_COMMENT = hljs.COMMENT( '{', '}', { relevance: 0 } ); var PAREN_COMMENT = hljs.COMMENT( '\\(\\*', '\\*\\)', { relevance: 10 } ); var STRING = { className: 'string', begin: '\'', end: '\'', contains: [{begin: '\'\''}] }; var CHAR_STRING = { className: 'string', begin: '(#\\d+)+' }; var FUNCTION = { className: 'function', beginKeywords: 'function constructor destructor procedure method', end: '[:;]', keywords: 'function constructor|10 destructor|10 procedure|10 method|10', contains: [ hljs.TITLE_MODE, { className: 'params', begin: '\\(', end: '\\)', keywords: OXYGENE_KEYWORDS, contains: [STRING, CHAR_STRING] }, CURLY_COMMENT, PAREN_COMMENT ] }; return { case_insensitive: true, lexemes: /\.?\w+/, keywords: OXYGENE_KEYWORDS, illegal: '("|\\$[G-Zg-z]|\\/\\*|</|=>|->)', contains: [ CURLY_COMMENT, PAREN_COMMENT, hljs.C_LINE_COMMENT_MODE, STRING, CHAR_STRING, hljs.NUMBER_MODE, FUNCTION, { className: 'class', begin: '=\\bclass\\b', end: 'end;', keywords: OXYGENE_KEYWORDS, contains: [ STRING, CHAR_STRING, CURLY_COMMENT, PAREN_COMMENT, hljs.C_LINE_COMMENT_MODE, FUNCTION ] } ] }; }; /***/ }, /* 285 */ /***/ function(module, exports) { module.exports = function(hljs) { var CURLY_SUBCOMMENT = hljs.COMMENT( '{', '}', { contains: ['self'] } ); return { subLanguage: 'xml', relevance: 0, contains: [ hljs.COMMENT('^#', '$'), hljs.COMMENT( '\\^rem{', '}', { relevance: 10, contains: [ CURLY_SUBCOMMENT ] } ), { className: 'meta', begin: '^@(?:BASE|USE|CLASS|OPTIONS)$', relevance: 10 }, { className: 'title', begin: '@[\\w\\-]+\\[[\\w^;\\-]*\\](?:\\[[\\w^;\\-]*\\])?(?:.*)$' }, { className: 'variable', begin: '\\$\\{?[\\w\\-\\.\\:]+\\}?' }, { className: 'keyword', begin: '\\^[\\w\\-\\.\\:]+' }, { className: 'number', begin: '\\^#[0-9a-fA-F]+' }, hljs.C_NUMBER_MODE ] }; }; /***/ }, /* 286 */ /***/ function(module, exports) { module.exports = function(hljs) { var MACRO = { className: 'variable', begin: /\$[\w\d#@][\w\d_]*/ }; var TABLE = { className: 'variable', begin: /<(?!\/)/, end: />/ }; var QUOTE_STRING = { className: 'string', begin: /"/, end: /"/ }; return { aliases: ['pf.conf'], lexemes: /[a-z0-9_<>-]+/, keywords: { built_in: /* block match pass are "actions" in pf.conf(5), the rest are * lexically similar top-level commands. */ 'block match pass load anchor|5 antispoof|10 set table', keyword: 'in out log quick on rdomain inet inet6 proto from port os to route' + 'allow-opts divert-packet divert-reply divert-to flags group icmp-type' + 'icmp6-type label once probability recieved-on rtable prio queue' + 'tos tag tagged user keep fragment for os drop' + 'af-to|10 binat-to|10 nat-to|10 rdr-to|10 bitmask least-stats random round-robin' + 'source-hash static-port' + 'dup-to reply-to route-to' + 'parent bandwidth default min max qlimit' + 'block-policy debug fingerprints hostid limit loginterface optimization' + 'reassemble ruleset-optimization basic none profile skip state-defaults' + 'state-policy timeout' + 'const counters persist' + 'no modulate synproxy state|5 floating if-bound no-sync pflow|10 sloppy' + 'source-track global rule max-src-nodes max-src-states max-src-conn' + 'max-src-conn-rate overload flush' + 'scrub|5 max-mss min-ttl no-df|10 random-id', literal: 'all any no-route self urpf-failed egress|5 unknown' }, contains: [ hljs.HASH_COMMENT_MODE, hljs.NUMBER_MODE, hljs.QUOTE_STRING_MODE, MACRO, TABLE ] }; }; /***/ }, /* 287 */ /***/ function(module, exports) { module.exports = function(hljs) { var VARIABLE = { begin: '\\$+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*' }; var PREPROCESSOR = { className: 'meta', begin: /<\?(php)?|\?>/ }; var STRING = { className: 'string', contains: [hljs.BACKSLASH_ESCAPE, PREPROCESSOR], variants: [ { begin: 'b"', end: '"' }, { begin: 'b\'', end: '\'' }, hljs.inherit(hljs.APOS_STRING_MODE, {illegal: null}), hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}) ] }; var NUMBER = {variants: [hljs.BINARY_NUMBER_MODE, hljs.C_NUMBER_MODE]}; return { aliases: ['php3', 'php4', 'php5', 'php6'], case_insensitive: true, keywords: 'and include_once list abstract global private echo interface as static endswitch ' + 'array null if endwhile or const for endforeach self var while isset public ' + 'protected exit foreach throw elseif include __FILE__ empty require_once do xor ' + 'return parent clone use __CLASS__ __LINE__ else break print eval new ' + 'catch __METHOD__ case exception default die require __FUNCTION__ ' + 'enddeclare final try switch continue endfor endif declare unset true false ' + 'trait goto instanceof insteadof __DIR__ __NAMESPACE__ ' + 'yield finally', contains: [ hljs.HASH_COMMENT_MODE, hljs.COMMENT('//', '$', {contains: [PREPROCESSOR]}), hljs.COMMENT( '/\\*', '\\*/', { contains: [ { className: 'doctag', begin: '@[A-Za-z]+' } ] } ), hljs.COMMENT( '__halt_compiler.+?;', false, { endsWithParent: true, keywords: '__halt_compiler', lexemes: hljs.UNDERSCORE_IDENT_RE } ), { className: 'string', begin: /<<<['"]?\w+['"]?$/, end: /^\w+;?$/, contains: [ hljs.BACKSLASH_ESCAPE, { className: 'subst', variants: [ {begin: /\$\w+/}, {begin: /\{\$/, end: /\}/} ] } ] }, PREPROCESSOR, { className: 'keyword', begin: /\$this\b/ }, VARIABLE, { // swallow composed identifiers to avoid parsing them as keywords begin: /(::|->)+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/ }, { className: 'function', beginKeywords: 'function', end: /[;{]/, excludeEnd: true, illegal: '\\$|\\[|%', contains: [ hljs.UNDERSCORE_TITLE_MODE, { className: 'params', begin: '\\(', end: '\\)', contains: [ 'self', VARIABLE, hljs.C_BLOCK_COMMENT_MODE, STRING, NUMBER ] } ] }, { className: 'class', beginKeywords: 'class interface', end: '{', excludeEnd: true, illegal: /[:\(\$"]/, contains: [ {beginKeywords: 'extends implements'}, hljs.UNDERSCORE_TITLE_MODE ] }, { beginKeywords: 'namespace', end: ';', illegal: /[\.']/, contains: [hljs.UNDERSCORE_TITLE_MODE] }, { beginKeywords: 'use', end: ';', contains: [hljs.UNDERSCORE_TITLE_MODE] }, { begin: '=>' // No markup, just a relevance booster }, STRING, NUMBER ] }; }; /***/ }, /* 288 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = { keyword: 'actor addressof and as be break class compile_error compile_intrinsic' + 'consume continue delegate digestof do else elseif embed end error' + 'for fun if ifdef in interface is isnt lambda let match new not object' + 'or primitive recover repeat return struct then trait try type until ' + 'use var where while with xor', meta: 'iso val tag trn box ref', literal: 'this false true' }; var TRIPLE_QUOTE_STRING_MODE = { className: 'string', begin: '"""', end: '"""', relevance: 10 }; var QUOTE_STRING_MODE = { className: 'string', begin: '"', end: '"', contains: [hljs.BACKSLASH_ESCAPE] }; var SINGLE_QUOTE_CHAR_MODE = { className: 'string', begin: '\'', end: '\'', contains: [hljs.BACKSLASH_ESCAPE], relevance: 0 }; var TYPE_NAME = { className: 'type', begin: '\\b_?[A-Z][\\w]*', relevance: 0 }; var PRIMED_NAME = { begin: hljs.IDENT_RE + '\'', relevance: 0 }; var CLASS = { className: 'class', beginKeywords: 'class actor', end: '$', contains: [ hljs.TITLE_MODE, hljs.C_LINE_COMMENT_MODE ] } var FUNCTION = { className: 'function', beginKeywords: 'new fun', end: '=>', contains: [ hljs.TITLE_MODE, { begin: /\(/, end: /\)/, contains: [ TYPE_NAME, PRIMED_NAME, hljs.C_NUMBER_MODE, hljs.C_BLOCK_COMMENT_MODE ] }, { begin: /:/, endsWithParent: true, contains: [TYPE_NAME] }, hljs.C_LINE_COMMENT_MODE ] } return { keywords: KEYWORDS, contains: [ CLASS, FUNCTION, TYPE_NAME, TRIPLE_QUOTE_STRING_MODE, QUOTE_STRING_MODE, SINGLE_QUOTE_CHAR_MODE, PRIMED_NAME, hljs.C_NUMBER_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }; }; /***/ }, /* 289 */ /***/ function(module, exports) { module.exports = function(hljs) { var BACKTICK_ESCAPE = { begin: '`[\\s\\S]', relevance: 0 }; var VAR = { className: 'variable', variants: [ {begin: /\$[\w\d][\w\d_:]*/} ] }; var LITERAL = { className: 'literal', begin: /\$(null|true|false)\b/ }; var QUOTE_STRING = { className: 'string', variants: [ { begin: /"/, end: /"/ }, { begin: /@"/, end: /^"@/ } ], contains: [ BACKTICK_ESCAPE, VAR, { className: 'variable', begin: /\$[A-z]/, end: /[^A-z]/ } ] }; var APOS_STRING = { className: 'string', variants: [ { begin: /'/, end: /'/ }, { begin: /@'/, end: /^'@/ } ] }; var PS_HELPTAGS = { className: 'doctag', variants: [ /* no paramater help tags */ { begin: /\.(synopsis|description|example|inputs|outputs|notes|link|component|role|functionality)/ }, /* one parameter help tags */ { begin: /\.(parameter|forwardhelptargetname|forwardhelpcategory|remotehelprunspace|externalhelp)\s+\S+/ } ] }; var PS_COMMENT = hljs.inherit( hljs.COMMENT(null, null), { variants: [ /* single-line comment */ { begin: /#/, end: /$/ }, /* multi-line comment */ { begin: /<#/, end: /#>/ } ], contains: [PS_HELPTAGS] } ); return { aliases: ['ps'], lexemes: /-?[A-z\.\-]+/, case_insensitive: true, keywords: { keyword: 'if else foreach return function do while until elseif begin for trap data dynamicparam end break throw param continue finally in switch exit filter try process catch', built_in: 'Add-Computer Add-Content Add-History Add-JobTrigger Add-Member Add-PSSnapin Add-Type Checkpoint-Computer Clear-Content Clear-EventLog Clear-History Clear-Host Clear-Item Clear-ItemProperty Clear-Variable Compare-Object Complete-Transaction Connect-PSSession Connect-WSMan Convert-Path ConvertFrom-Csv ConvertFrom-Json ConvertFrom-SecureString ConvertFrom-StringData ConvertTo-Csv ConvertTo-Html ConvertTo-Json ConvertTo-SecureString ConvertTo-Xml Copy-Item Copy-ItemProperty Debug-Process Disable-ComputerRestore Disable-JobTrigger Disable-PSBreakpoint Disable-PSRemoting Disable-PSSessionConfiguration Disable-WSManCredSSP Disconnect-PSSession Disconnect-WSMan Disable-ScheduledJob Enable-ComputerRestore Enable-JobTrigger Enable-PSBreakpoint Enable-PSRemoting Enable-PSSessionConfiguration Enable-ScheduledJob Enable-WSManCredSSP Enter-PSSession Exit-PSSession Export-Alias Export-Clixml Export-Console Export-Counter Export-Csv Export-FormatData Export-ModuleMember Export-PSSession ForEach-Object Format-Custom Format-List Format-Table Format-Wide Get-Acl Get-Alias Get-AuthenticodeSignature Get-ChildItem Get-Command Get-ComputerRestorePoint Get-Content Get-ControlPanelItem Get-Counter Get-Credential Get-Culture Get-Date Get-Event Get-EventLog Get-EventSubscriber Get-ExecutionPolicy Get-FormatData Get-Host Get-HotFix Get-Help Get-History Get-IseSnippet Get-Item Get-ItemProperty Get-Job Get-JobTrigger Get-Location Get-Member Get-Module Get-PfxCertificate Get-Process Get-PSBreakpoint Get-PSCallStack Get-PSDrive Get-PSProvider Get-PSSession Get-PSSessionConfiguration Get-PSSnapin Get-Random Get-ScheduledJob Get-ScheduledJobOption Get-Service Get-TraceSource Get-Transaction Get-TypeData Get-UICulture Get-Unique Get-Variable Get-Verb Get-WinEvent Get-WmiObject Get-WSManCredSSP Get-WSManInstance Group-Object Import-Alias Import-Clixml Import-Counter Import-Csv Import-IseSnippet Import-LocalizedData Import-PSSession Import-Module Invoke-AsWorkflow Invoke-Command Invoke-Expression Invoke-History Invoke-Item Invoke-RestMethod Invoke-WebRequest Invoke-WmiMethod Invoke-WSManAction Join-Path Limit-EventLog Measure-Command Measure-Object Move-Item Move-ItemProperty New-Alias New-Event New-EventLog New-IseSnippet New-Item New-ItemProperty New-JobTrigger New-Object New-Module New-ModuleManifest New-PSDrive New-PSSession New-PSSessionConfigurationFile New-PSSessionOption New-PSTransportOption New-PSWorkflowExecutionOption New-PSWorkflowSession New-ScheduledJobOption New-Service New-TimeSpan New-Variable New-WebServiceProxy New-WinEvent New-WSManInstance New-WSManSessionOption Out-Default Out-File Out-GridView Out-Host Out-Null Out-Printer Out-String Pop-Location Push-Location Read-Host Receive-Job Register-EngineEvent Register-ObjectEvent Register-PSSessionConfiguration Register-ScheduledJob Register-WmiEvent Remove-Computer Remove-Event Remove-EventLog Remove-Item Remove-ItemProperty Remove-Job Remove-JobTrigger Remove-Module Remove-PSBreakpoint Remove-PSDrive Remove-PSSession Remove-PSSnapin Remove-TypeData Remove-Variable Remove-WmiObject Remove-WSManInstance Rename-Computer Rename-Item Rename-ItemProperty Reset-ComputerMachinePassword Resolve-Path Restart-Computer Restart-Service Restore-Computer Resume-Job Resume-Service Save-Help Select-Object Select-String Select-Xml Send-MailMessage Set-Acl Set-Alias Set-AuthenticodeSignature Set-Content Set-Date Set-ExecutionPolicy Set-Item Set-ItemProperty Set-JobTrigger Set-Location Set-PSBreakpoint Set-PSDebug Set-PSSessionConfiguration Set-ScheduledJob Set-ScheduledJobOption Set-Service Set-StrictMode Set-TraceSource Set-Variable Set-WmiInstance Set-WSManInstance Set-WSManQuickConfig Show-Command Show-ControlPanelItem Show-EventLog Sort-Object Split-Path Start-Job Start-Process Start-Service Start-Sleep Start-Transaction Start-Transcript Stop-Computer Stop-Job Stop-Process Stop-Service Stop-Transcript Suspend-Job Suspend-Service Tee-Object Test-ComputerSecureChannel Test-Connection Test-ModuleManifest Test-Path Test-PSSessionConfigurationFile Trace-Command Unblock-File Undo-Transaction Unregister-Event Unregister-PSSessionConfiguration Unregister-ScheduledJob Update-FormatData Update-Help Update-List Update-TypeData Use-Transaction Wait-Event Wait-Job Wait-Process Where-Object Write-Debug Write-Error Write-EventLog Write-Host Write-Output Write-Progress Write-Verbose Write-Warning Add-MDTPersistentDrive Disable-MDTMonitorService Enable-MDTMonitorService Get-MDTDeploymentShareStatistics Get-MDTMonitorData Get-MDTOperatingSystemCatalog Get-MDTPersistentDrive Import-MDTApplication Import-MDTDriver Import-MDTOperatingSystem Import-MDTPackage Import-MDTTaskSequence New-MDTDatabase Remove-MDTMonitorData Remove-MDTPersistentDrive Restore-MDTPersistentDrive Set-MDTMonitorData Test-MDTDeploymentShare Test-MDTMonitorData Update-MDTDatabaseSchema Update-MDTDeploymentShare Update-MDTLinkedDS Update-MDTMedia Update-MDTMedia Add-VamtProductKey Export-VamtData Find-VamtManagedMachine Get-VamtConfirmationId Get-VamtProduct Get-VamtProductKey Import-VamtData Initialize-VamtData Install-VamtConfirmationId Install-VamtProductActivation Install-VamtProductKey Update-VamtProduct', nomarkup: '-ne -eq -lt -gt -ge -le -not -like -notlike -match -notmatch -contains -notcontains -in -notin -replace' }, contains: [ BACKTICK_ESCAPE, hljs.NUMBER_MODE, QUOTE_STRING, APOS_STRING, LITERAL, VAR, PS_COMMENT ] }; }; /***/ }, /* 290 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: { keyword: 'BufferedReader PVector PFont PImage PGraphics HashMap boolean byte char color ' + 'double float int long String Array FloatDict FloatList IntDict IntList JSONArray JSONObject ' + 'Object StringDict StringList Table TableRow XML ' + // Java keywords 'false synchronized int abstract float private char boolean static null if const ' + 'for true while long throw strictfp finally protected import native final return void ' + 'enum else break transient new catch instanceof byte super volatile case assert short ' + 'package default double public try this switch continue throws protected public private', literal: 'P2D P3D HALF_PI PI QUARTER_PI TAU TWO_PI', title: 'setup draw', built_in: 'displayHeight displayWidth mouseY mouseX mousePressed pmouseX pmouseY key ' + 'keyCode pixels focused frameCount frameRate height width ' + 'size createGraphics beginDraw createShape loadShape PShape arc ellipse line point ' + 'quad rect triangle bezier bezierDetail bezierPoint bezierTangent curve curveDetail curvePoint ' + 'curveTangent curveTightness shape shapeMode beginContour beginShape bezierVertex curveVertex ' + 'endContour endShape quadraticVertex vertex ellipseMode noSmooth rectMode smooth strokeCap ' + 'strokeJoin strokeWeight mouseClicked mouseDragged mouseMoved mousePressed mouseReleased ' + 'mouseWheel keyPressed keyPressedkeyReleased keyTyped print println save saveFrame day hour ' + 'millis minute month second year background clear colorMode fill noFill noStroke stroke alpha ' + 'blue brightness color green hue lerpColor red saturation modelX modelY modelZ screenX screenY ' + 'screenZ ambient emissive shininess specular add createImage beginCamera camera endCamera frustum ' + 'ortho perspective printCamera printProjection cursor frameRate noCursor exit loop noLoop popStyle ' + 'pushStyle redraw binary boolean byte char float hex int str unbinary unhex join match matchAll nf ' + 'nfc nfp nfs split splitTokens trim append arrayCopy concat expand reverse shorten sort splice subset ' + 'box sphere sphereDetail createInput createReader loadBytes loadJSONArray loadJSONObject loadStrings ' + 'loadTable loadXML open parseXML saveTable selectFolder selectInput beginRaw beginRecord createOutput ' + 'createWriter endRaw endRecord PrintWritersaveBytes saveJSONArray saveJSONObject saveStream saveStrings ' + 'saveXML selectOutput popMatrix printMatrix pushMatrix resetMatrix rotate rotateX rotateY rotateZ scale ' + 'shearX shearY translate ambientLight directionalLight lightFalloff lights lightSpecular noLights normal ' + 'pointLight spotLight image imageMode loadImage noTint requestImage tint texture textureMode textureWrap ' + 'blend copy filter get loadPixels set updatePixels blendMode loadShader PShaderresetShader shader createFont ' + 'loadFont text textFont textAlign textLeading textMode textSize textWidth textAscent textDescent abs ceil ' + 'constrain dist exp floor lerp log mag map max min norm pow round sq sqrt acos asin atan atan2 cos degrees ' + 'radians sin tan noise noiseDetail noiseSeed random randomGaussian randomSeed' }, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE ] }; }; /***/ }, /* 291 */ /***/ function(module, exports) { module.exports = function(hljs) { return { contains: [ hljs.C_NUMBER_MODE, { begin: '[a-zA-Z_][\\da-zA-Z_]+\\.[\\da-zA-Z_]{1,3}', end: ':', excludeEnd: true }, { begin: '(ncalls|tottime|cumtime)', end: '$', keywords: 'ncalls tottime|10 cumtime|10 filename', relevance: 10 }, { begin: 'function calls', end: '$', contains: [hljs.C_NUMBER_MODE], relevance: 10 }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, { className: 'string', begin: '\\(', end: '\\)$', excludeBegin: true, excludeEnd: true, relevance: 0 } ] }; }; /***/ }, /* 292 */ /***/ function(module, exports) { module.exports = function(hljs) { var ATOM = { begin: /[a-z][A-Za-z0-9_]*/, relevance: 0 }; var VAR = { className: 'symbol', variants: [ {begin: /[A-Z][a-zA-Z0-9_]*/}, {begin: /_[A-Za-z0-9_]*/}, ], relevance: 0 }; var PARENTED = { begin: /\(/, end: /\)/, relevance: 0 }; var LIST = { begin: /\[/, end: /\]/ }; var LINE_COMMENT = { className: 'comment', begin: /%/, end: /$/, contains: [hljs.PHRASAL_WORDS_MODE] }; var BACKTICK_STRING = { className: 'string', begin: /`/, end: /`/, contains: [hljs.BACKSLASH_ESCAPE] }; var CHAR_CODE = { className: 'string', // 0'a etc. begin: /0\'(\\\'|.)/ }; var SPACE_CODE = { className: 'string', begin: /0\'\\s/ // 0'\s }; var PRED_OP = { // relevance booster begin: /:-/ }; var inner = [ ATOM, VAR, PARENTED, PRED_OP, LIST, LINE_COMMENT, hljs.C_BLOCK_COMMENT_MODE, hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, BACKTICK_STRING, CHAR_CODE, SPACE_CODE, hljs.C_NUMBER_MODE ]; PARENTED.contains = inner; LIST.contains = inner; return { contains: inner.concat([ {begin: /\.$/} // relevance booster ]) }; }; /***/ }, /* 293 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: { keyword: 'package import option optional required repeated group', built_in: 'double float int32 int64 uint32 uint64 sint32 sint64 ' + 'fixed32 fixed64 sfixed32 sfixed64 bool string bytes', literal: 'true false' }, contains: [ hljs.QUOTE_STRING_MODE, hljs.NUMBER_MODE, hljs.C_LINE_COMMENT_MODE, { className: 'class', beginKeywords: 'message enum service', end: /\{/, illegal: /\n/, contains: [ hljs.inherit(hljs.TITLE_MODE, { starts: {endsWithParent: true, excludeEnd: true} // hack: eating everything after the first title }) ] }, { className: 'function', beginKeywords: 'rpc', end: /;/, excludeEnd: true, keywords: 'rpc returns' }, { begin: /^\s*[A-Z_]+/, end: /\s*=/, excludeEnd: true } ] }; }; /***/ }, /* 294 */ /***/ function(module, exports) { module.exports = function(hljs) { var PUPPET_KEYWORDS = { keyword: /* language keywords */ 'and case default else elsif false if in import enherits node or true undef unless main settings $string ', literal: /* metaparameters */ 'alias audit before loglevel noop require subscribe tag ' + /* normal attributes */ 'owner ensure group mode name|0 changes context force incl lens load_path onlyif provider returns root show_diff type_check ' + 'en_address ip_address realname command environment hour monute month monthday special target weekday '+ 'creates cwd ogoutput refresh refreshonly tries try_sleep umask backup checksum content ctime force ignore ' + 'links mtime purge recurse recurselimit replace selinux_ignore_defaults selrange selrole seltype seluser source ' + 'souirce_permissions sourceselect validate_cmd validate_replacement allowdupe attribute_membership auth_membership forcelocal gid '+ 'ia_load_module members system host_aliases ip allowed_trunk_vlans description device_url duplex encapsulation etherchannel ' + 'native_vlan speed principals allow_root auth_class auth_type authenticate_user k_of_n mechanisms rule session_owner shared options ' + 'device fstype enable hasrestart directory present absent link atboot blockdevice device dump pass remounts poller_tag use ' + 'message withpath adminfile allow_virtual allowcdrom category configfiles flavor install_options instance package_settings platform ' + 'responsefile status uninstall_options vendor unless_system_user unless_uid binary control flags hasstatus manifest pattern restart running ' + 'start stop allowdupe auths expiry gid groups home iterations key_membership keys managehome membership password password_max_age ' + 'password_min_age profile_membership profiles project purge_ssh_keys role_membership roles salt shell uid baseurl cost descr enabled ' + 'enablegroups exclude failovermethod gpgcheck gpgkey http_caching include includepkgs keepalive metadata_expire metalink mirrorlist ' + 'priority protect proxy proxy_password proxy_username repo_gpgcheck s3_enabled skip_if_unavailable sslcacert sslclientcert sslclientkey ' + 'sslverify mounted', built_in: /* core facts */ 'architecture augeasversion blockdevices boardmanufacturer boardproductname boardserialnumber cfkey dhcp_servers ' + 'domain ec2_ ec2_userdata facterversion filesystems ldom fqdn gid hardwareisa hardwaremodel hostname id|0 interfaces '+ 'ipaddress ipaddress_ ipaddress6 ipaddress6_ iphostnumber is_virtual kernel kernelmajversion kernelrelease kernelversion ' + 'kernelrelease kernelversion lsbdistcodename lsbdistdescription lsbdistid lsbdistrelease lsbmajdistrelease lsbminordistrelease ' + 'lsbrelease macaddress macaddress_ macosx_buildversion macosx_productname macosx_productversion macosx_productverson_major ' + 'macosx_productversion_minor manufacturer memoryfree memorysize netmask metmask_ network_ operatingsystem operatingsystemmajrelease '+ 'operatingsystemrelease osfamily partitions path physicalprocessorcount processor processorcount productname ps puppetversion '+ 'rubysitedir rubyversion selinux selinux_config_mode selinux_config_policy selinux_current_mode selinux_current_mode selinux_enforced '+ 'selinux_policyversion serialnumber sp_ sshdsakey sshecdsakey sshrsakey swapencrypted swapfree swapsize timezone type uniqueid uptime '+ 'uptime_days uptime_hours uptime_seconds uuid virtual vlans xendomains zfs_version zonenae zones zpool_version' }; var COMMENT = hljs.COMMENT('#', '$'); var IDENT_RE = '([A-Za-z_]|::)(\\w|::)*'; var TITLE = hljs.inherit(hljs.TITLE_MODE, {begin: IDENT_RE}); var VARIABLE = {className: 'variable', begin: '\\$' + IDENT_RE}; var STRING = { className: 'string', contains: [hljs.BACKSLASH_ESCAPE, VARIABLE], variants: [ {begin: /'/, end: /'/}, {begin: /"/, end: /"/} ] }; return { aliases: ['pp'], contains: [ COMMENT, VARIABLE, STRING, { beginKeywords: 'class', end: '\\{|;', illegal: /=/, contains: [TITLE, COMMENT] }, { beginKeywords: 'define', end: /\{/, contains: [ { className: 'section', begin: hljs.IDENT_RE, endsParent: true } ] }, { begin: hljs.IDENT_RE + '\\s+\\{', returnBegin: true, end: /\S/, contains: [ { className: 'keyword', begin: hljs.IDENT_RE }, { begin: /\{/, end: /\}/, keywords: PUPPET_KEYWORDS, relevance: 0, contains: [ STRING, COMMENT, { begin:'[a-zA-Z_]+\\s*=>', returnBegin: true, end: '=>', contains: [ { className: 'attr', begin: hljs.IDENT_RE, } ] }, { className: 'number', begin: '(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b', relevance: 0 }, VARIABLE ] } ], relevance: 0 } ] } }; /***/ }, /* 295 */ /***/ function(module, exports) { module.exports = // Base deafult colors in PB IDE: background: #FFFFDF; foreground: #000000; function(hljs) { var STRINGS = { // PB IDE color: #0080FF (Azure Radiance) className: 'string', begin: '(~)?"', end: '"', illegal: '\\n' }; var CONSTANTS = { // PB IDE color: #924B72 (Cannon Pink) // "#" + a letter or underscore + letters, digits or underscores + (optional) "$" className: 'symbol', begin: '#[a-zA-Z_]\\w*\\$?' }; return { aliases: ['pb', 'pbi'], keywords: // PB IDE color: #006666 (Blue Stone) + Bold // The following keywords list was taken and adapted from GuShH's PureBasic language file for GeSHi... 'And As Break CallDebugger Case CompilerCase CompilerDefault CompilerElse CompilerEndIf CompilerEndSelect ' + 'CompilerError CompilerIf CompilerSelect Continue Data DataSection EndDataSection Debug DebugLevel ' + 'Default Define Dim DisableASM DisableDebugger DisableExplicit Else ElseIf EnableASM ' + 'EnableDebugger EnableExplicit End EndEnumeration EndIf EndImport EndInterface EndMacro EndProcedure ' + 'EndSelect EndStructure EndStructureUnion EndWith Enumeration Extends FakeReturn For Next ForEach ' + 'ForEver Global Gosub Goto If Import ImportC IncludeBinary IncludeFile IncludePath Interface Macro ' + 'NewList Not Or ProcedureReturn Protected Prototype ' + 'PrototypeC Read ReDim Repeat Until Restore Return Select Shared Static Step Structure StructureUnion ' + 'Swap To Wend While With XIncludeFile XOr ' + 'Procedure ProcedureC ProcedureCDLL ProcedureDLL Declare DeclareC DeclareCDLL DeclareDLL', contains: [ // COMMENTS | PB IDE color: #00AAAA (Persian Green) hljs.COMMENT(';', '$', {relevance: 0}), { // PROCEDURES DEFINITIONS className: 'function', begin: '\\b(Procedure|Declare)(C|CDLL|DLL)?\\b', end: '\\(', excludeEnd: true, returnBegin: true, contains: [ { // PROCEDURE KEYWORDS | PB IDE color: #006666 (Blue Stone) + Bold className: 'keyword', begin: '(Procedure|Declare)(C|CDLL|DLL)?', excludeEnd: true }, { // PROCEDURE RETURN TYPE SETTING | PB IDE color: #000000 (Black) className: 'type', begin: '\\.\\w*' // end: ' ', }, hljs.UNDERSCORE_TITLE_MODE // PROCEDURE NAME | PB IDE color: #006666 (Blue Stone) ] }, STRINGS, CONSTANTS ] }; }; /***/ }, /* 296 */ /***/ function(module, exports) { module.exports = function(hljs) { var PROMPT = { className: 'meta', begin: /^(>>>|\.\.\.) / }; var STRING = { className: 'string', contains: [hljs.BACKSLASH_ESCAPE], variants: [ { begin: /(u|b)?r?'''/, end: /'''/, contains: [PROMPT], relevance: 10 }, { begin: /(u|b)?r?"""/, end: /"""/, contains: [PROMPT], relevance: 10 }, { begin: /(u|r|ur)'/, end: /'/, relevance: 10 }, { begin: /(u|r|ur)"/, end: /"/, relevance: 10 }, { begin: /(b|br)'/, end: /'/ }, { begin: /(b|br)"/, end: /"/ }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE ] }; var NUMBER = { className: 'number', relevance: 0, variants: [ {begin: hljs.BINARY_NUMBER_RE + '[lLjJ]?'}, {begin: '\\b(0o[0-7]+)[lLjJ]?'}, {begin: hljs.C_NUMBER_RE + '[lLjJ]?'} ] }; var PARAMS = { className: 'params', begin: /\(/, end: /\)/, contains: ['self', PROMPT, NUMBER, STRING] }; return { aliases: ['py', 'gyp'], keywords: { keyword: 'and elif is global as in if from raise for except finally print import pass return ' + 'exec else break not with class assert yield try while continue del or def lambda ' + 'async await nonlocal|10 None True False', built_in: 'Ellipsis NotImplemented' }, illegal: /(<\/|->|\?)|=>/, contains: [ PROMPT, NUMBER, STRING, hljs.HASH_COMMENT_MODE, { variants: [ {className: 'function', beginKeywords: 'def'}, {className: 'class', beginKeywords: 'class'} ], end: /:/, illegal: /[${=;\n,]/, contains: [ hljs.UNDERSCORE_TITLE_MODE, PARAMS, { begin: /->/, endsWithParent: true, keywords: 'None' } ] }, { className: 'meta', begin: /^[\t ]*@/, end: /$/ }, { begin: /\b(print|exec)\(/ // don’t highlight keywords-turned-functions in Python 3 } ] }; }; /***/ }, /* 297 */ /***/ function(module, exports) { module.exports = function(hljs) { var Q_KEYWORDS = { keyword: 'do while select delete by update from', literal: '0b 1b', built_in: 'neg not null string reciprocal floor ceiling signum mod xbar xlog and or each scan over prior mmu lsq inv md5 ltime gtime count first var dev med cov cor all any rand sums prds mins maxs fills deltas ratios avgs differ prev next rank reverse iasc idesc asc desc msum mcount mavg mdev xrank mmin mmax xprev rotate distinct group where flip type key til get value attr cut set upsert raze union inter except cross sv vs sublist enlist read0 read1 hopen hclose hdel hsym hcount peach system ltrim rtrim trim lower upper ssr view tables views cols xcols keys xkey xcol xasc xdesc fkeys meta lj aj aj0 ij pj asof uj ww wj wj1 fby xgroup ungroup ej save load rsave rload show csv parse eval min max avg wavg wsum sin cos tan sum', type: '`float `double int `timestamp `timespan `datetime `time `boolean `symbol `char `byte `short `long `real `month `date `minute `second `guid' }; return { aliases:['k', 'kdb'], keywords: Q_KEYWORDS, lexemes: /(`?)[A-Za-z0-9_]+\b/, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE ] }; }; /***/ }, /* 298 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = { keyword: 'in of on if for while finally var new function do return void else break catch ' + 'instanceof with throw case default try this switch continue typeof delete ' + 'let yield const export super debugger as async await import', literal: 'true false null undefined NaN Infinity', built_in: 'eval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent ' + 'encodeURI encodeURIComponent escape unescape Object Function Boolean Error ' + 'EvalError InternalError RangeError ReferenceError StopIteration SyntaxError ' + 'TypeError URIError Number Math Date String RegExp Array Float32Array ' + 'Float64Array Int16Array Int32Array Int8Array Uint16Array Uint32Array ' + 'Uint8Array Uint8ClampedArray ArrayBuffer DataView JSON Intl arguments require ' + 'module console window document Symbol Set Map WeakSet WeakMap Proxy Reflect ' + 'Behavior bool color coordinate date double enumeration font geocircle georectangle ' + 'geoshape int list matrix4x4 parent point quaternion real rect ' + 'size string url var variant vector2d vector3d vector4d' + 'Promise' }; var QML_IDENT_RE = '[a-zA-Z_][a-zA-Z0-9\\._]*'; // Isolate property statements. Ends at a :, =, ;, ,, a comment or end of line. // Use property class. var PROPERTY = { className: 'keyword', begin: '\\bproperty\\b', starts: { className: 'string', end: '(:|=|;|,|//|/\\*|$)', returnEnd: true } }; // Isolate signal statements. Ends at a ) a comment or end of line. // Use property class. var SIGNAL = { className: 'keyword', begin: '\\bsignal\\b', starts: { className: 'string', end: '(\\(|:|=|;|,|//|/\\*|$)', returnEnd: true } }; // id: is special in QML. When we see id: we want to mark the id: as attribute and // emphasize the token following. var ID_ID = { className: 'attribute', begin: '\\bid\\s*:', starts: { className: 'string', end: QML_IDENT_RE, returnEnd: false } }; // Find QML object attribute. An attribute is a QML identifier followed by :. // Unfortunately it's hard to know where it ends, as it may contain scalars, // objects, object definitions, or javascript. The true end is either when the parent // ends or the next attribute is detected. var QML_ATTRIBUTE = { begin: QML_IDENT_RE + '\\s*:', returnBegin: true, contains: [ { className: 'attribute', begin: QML_IDENT_RE, end: '\\s*:', excludeEnd: true, relevance: 0 } ], relevance: 0 }; // Find QML object. A QML object is a QML identifier followed by { and ends at the matching }. // All we really care about is finding IDENT followed by { and just mark up the IDENT and ignore the {. var QML_OBJECT = { begin: QML_IDENT_RE + '\\s*{', end: '{', returnBegin: true, relevance: 0, contains: [ hljs.inherit(hljs.TITLE_MODE, {begin: QML_IDENT_RE}) ] }; return { aliases: ['qt'], case_insensitive: false, keywords: KEYWORDS, contains: [ { className: 'meta', begin: /^\s*['"]use (strict|asm)['"]/ }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, { // template string className: 'string', begin: '`', end: '`', contains: [ hljs.BACKSLASH_ESCAPE, { className: 'subst', begin: '\\$\\{', end: '\\}' } ] }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: 'number', variants: [ { begin: '\\b(0[bB][01]+)' }, { begin: '\\b(0[oO][0-7]+)' }, { begin: hljs.C_NUMBER_RE } ], relevance: 0 }, { // "value" container begin: '(' + hljs.RE_STARTERS_RE + '|\\b(case|return|throw)\\b)\\s*', keywords: 'return throw case', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.REGEXP_MODE, { // E4X / JSX begin: /</, end: />\s*[);\]]/, relevance: 0, subLanguage: 'xml' } ], relevance: 0 }, SIGNAL, PROPERTY, { className: 'function', beginKeywords: 'function', end: /\{/, excludeEnd: true, contains: [ hljs.inherit(hljs.TITLE_MODE, {begin: /[A-Za-z$_][0-9A-Za-z$_]*/}), { className: 'params', begin: /\(/, end: /\)/, excludeBegin: true, excludeEnd: true, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] } ], illegal: /\[|%/ }, { begin: '\\.' + hljs.IDENT_RE, relevance: 0 // hack: prevents detection of keywords after dots }, ID_ID, QML_ATTRIBUTE, QML_OBJECT ], illegal: /#/ }; }; /***/ }, /* 299 */ /***/ function(module, exports) { module.exports = function(hljs) { var IDENT_RE = '([a-zA-Z]|\\.[a-zA-Z.])[a-zA-Z0-9._]*'; return { contains: [ hljs.HASH_COMMENT_MODE, { begin: IDENT_RE, lexemes: IDENT_RE, keywords: { keyword: 'function if in break next repeat else for return switch while try tryCatch ' + 'stop warning require library attach detach source setMethod setGeneric ' + 'setGroupGeneric setClass ...', literal: 'NULL NA TRUE FALSE T F Inf NaN NA_integer_|10 NA_real_|10 NA_character_|10 ' + 'NA_complex_|10' }, relevance: 0 }, { // hex value className: 'number', begin: "0[xX][0-9a-fA-F]+[Li]?\\b", relevance: 0 }, { // explicit integer className: 'number', begin: "\\d+(?:[eE][+\\-]?\\d*)?L\\b", relevance: 0 }, { // number with trailing decimal className: 'number', begin: "\\d+\\.(?!\\d)(?:i\\b)?", relevance: 0 }, { // number className: 'number', begin: "\\d+(?:\\.\\d*)?(?:[eE][+\\-]?\\d*)?i?\\b", relevance: 0 }, { // number with leading decimal className: 'number', begin: "\\.\\d+(?:[eE][+\\-]?\\d*)?i?\\b", relevance: 0 }, { // escaped identifier begin: '`', end: '`', relevance: 0 }, { className: 'string', contains: [hljs.BACKSLASH_ESCAPE], variants: [ {begin: '"', end: '"'}, {begin: "'", end: "'"} ] } ] }; }; /***/ }, /* 300 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: 'ArchiveRecord AreaLightSource Atmosphere Attribute AttributeBegin AttributeEnd Basis ' + 'Begin Blobby Bound Clipping ClippingPlane Color ColorSamples ConcatTransform Cone ' + 'CoordinateSystem CoordSysTransform CropWindow Curves Cylinder DepthOfField Detail ' + 'DetailRange Disk Displacement Display End ErrorHandler Exposure Exterior Format ' + 'FrameAspectRatio FrameBegin FrameEnd GeneralPolygon GeometricApproximation Geometry ' + 'Hider Hyperboloid Identity Illuminate Imager Interior LightSource ' + 'MakeCubeFaceEnvironment MakeLatLongEnvironment MakeShadow MakeTexture Matte ' + 'MotionBegin MotionEnd NuPatch ObjectBegin ObjectEnd ObjectInstance Opacity Option ' + 'Orientation Paraboloid Patch PatchMesh Perspective PixelFilter PixelSamples ' + 'PixelVariance Points PointsGeneralPolygons PointsPolygons Polygon Procedural Projection ' + 'Quantize ReadArchive RelativeDetail ReverseOrientation Rotate Scale ScreenWindow ' + 'ShadingInterpolation ShadingRate Shutter Sides Skew SolidBegin SolidEnd Sphere ' + 'SubdivisionMesh Surface TextureCoordinates Torus Transform TransformBegin TransformEnd ' + 'TransformPoints Translate TrimCurve WorldBegin WorldEnd', illegal: '</', contains: [ hljs.HASH_COMMENT_MODE, hljs.C_NUMBER_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE ] }; }; /***/ }, /* 301 */ /***/ function(module, exports) { module.exports = function(hljs) { var IDENTIFIER = '[a-zA-Z-_][^\\n{]+\\{'; var PROPERTY = { className: 'attribute', begin: /[a-zA-Z-_]+/, end: /\s*:/, excludeEnd: true, starts: { end: ';', relevance: 0, contains: [ { className: 'variable', begin: /\.[a-zA-Z-_]+/ }, { className: 'keyword', begin: /\(optional\)/ } ] } }; return { aliases: ['graph', 'instances'], case_insensitive: true, keywords: 'import', contains: [ // Facet sections { begin: '^facet ' + IDENTIFIER, end: '}', keywords: 'facet', contains: [ PROPERTY, hljs.HASH_COMMENT_MODE ] }, // Instance sections { begin: '^\\s*instance of ' + IDENTIFIER, end: '}', keywords: 'name count channels instance-data instance-state instance of', illegal: /\S/, contains: [ 'self', PROPERTY, hljs.HASH_COMMENT_MODE ] }, // Component sections { begin: '^' + IDENTIFIER, end: '}', contains: [ PROPERTY, hljs.HASH_COMMENT_MODE ] }, // Comments hljs.HASH_COMMENT_MODE ] }; }; /***/ }, /* 302 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: { keyword: 'float color point normal vector matrix while for if do return else break extern continue', built_in: 'abs acos ambient area asin atan atmosphere attribute calculatenormal ceil cellnoise ' + 'clamp comp concat cos degrees depth Deriv diffuse distance Du Dv environment exp ' + 'faceforward filterstep floor format fresnel incident length lightsource log match ' + 'max min mod noise normalize ntransform opposite option phong pnoise pow printf ' + 'ptlined radians random reflect refract renderinfo round setcomp setxcomp setycomp ' + 'setzcomp shadow sign sin smoothstep specular specularbrdf spline sqrt step tan ' + 'texture textureinfo trace transform vtransform xcomp ycomp zcomp' }, illegal: '</', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, hljs.C_NUMBER_MODE, { className: 'meta', begin: '#', end: '$' }, { className: 'class', beginKeywords: 'surface displacement light volume imager', end: '\\(' }, { beginKeywords: 'illuminate illuminance gather', end: '\\(' } ] }; }; /***/ }, /* 303 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: { keyword: 'BILL_PERIOD BILL_START BILL_STOP RS_EFFECTIVE_START RS_EFFECTIVE_STOP RS_JURIS_CODE RS_OPCO_CODE ' + 'INTDADDATTRIBUTE|5 INTDADDVMSG|5 INTDBLOCKOP|5 INTDBLOCKOPNA|5 INTDCLOSE|5 INTDCOUNT|5 ' + 'INTDCOUNTSTATUSCODE|5 INTDCREATEMASK|5 INTDCREATEDAYMASK|5 INTDCREATEFACTORMASK|5 ' + 'INTDCREATEHANDLE|5 INTDCREATEOVERRIDEDAYMASK|5 INTDCREATEOVERRIDEMASK|5 ' + 'INTDCREATESTATUSCODEMASK|5 INTDCREATETOUPERIOD|5 INTDDELETE|5 INTDDIPTEST|5 INTDEXPORT|5 ' + 'INTDGETERRORCODE|5 INTDGETERRORMESSAGE|5 INTDISEQUAL|5 INTDJOIN|5 INTDLOAD|5 INTDLOADACTUALCUT|5 ' + 'INTDLOADDATES|5 INTDLOADHIST|5 INTDLOADLIST|5 INTDLOADLISTDATES|5 INTDLOADLISTENERGY|5 ' + 'INTDLOADLISTHIST|5 INTDLOADRELATEDCHANNEL|5 INTDLOADSP|5 INTDLOADSTAGING|5 INTDLOADUOM|5 ' + 'INTDLOADUOMDATES|5 INTDLOADUOMHIST|5 INTDLOADVERSION|5 INTDOPEN|5 INTDREADFIRST|5 INTDREADNEXT|5 ' + 'INTDRECCOUNT|5 INTDRELEASE|5 INTDREPLACE|5 INTDROLLAVG|5 INTDROLLPEAK|5 INTDSCALAROP|5 INTDSCALE|5 ' + 'INTDSETATTRIBUTE|5 INTDSETDSTPARTICIPANT|5 INTDSETSTRING|5 INTDSETVALUE|5 INTDSETVALUESTATUS|5 ' + 'INTDSHIFTSTARTTIME|5 INTDSMOOTH|5 INTDSORT|5 INTDSPIKETEST|5 INTDSUBSET|5 INTDTOU|5 ' + 'INTDTOURELEASE|5 INTDTOUVALUE|5 INTDUPDATESTATS|5 INTDVALUE|5 STDEV INTDDELETEEX|5 ' + 'INTDLOADEXACTUAL|5 INTDLOADEXCUT|5 INTDLOADEXDATES|5 INTDLOADEX|5 INTDLOADEXRELATEDCHANNEL|5 ' + 'INTDSAVEEX|5 MVLOAD|5 MVLOADACCT|5 MVLOADACCTDATES|5 MVLOADACCTHIST|5 MVLOADDATES|5 MVLOADHIST|5 ' + 'MVLOADLIST|5 MVLOADLISTDATES|5 MVLOADLISTHIST|5 IF FOR NEXT DONE SELECT END CALL ABORT CLEAR CHANNEL FACTOR LIST NUMBER ' + 'OVERRIDE SET WEEK DISTRIBUTIONNODE ELSE WHEN THEN OTHERWISE IENUM CSV INCLUDE LEAVE RIDER SAVE DELETE ' + 'NOVALUE SECTION WARN SAVE_UPDATE DETERMINANT LABEL REPORT REVENUE EACH ' + 'IN FROM TOTAL CHARGE BLOCK AND OR CSV_FILE RATE_CODE AUXILIARY_DEMAND ' + 'UIDACCOUNT RS BILL_PERIOD_SELECT HOURS_PER_MONTH INTD_ERROR_STOP SEASON_SCHEDULE_NAME ' + 'ACCOUNTFACTOR ARRAYUPPERBOUND CALLSTOREDPROC GETADOCONNECTION GETCONNECT GETDATASOURCE ' + 'GETQUALIFIER GETUSERID HASVALUE LISTCOUNT LISTOP LISTUPDATE LISTVALUE PRORATEFACTOR RSPRORATE ' + 'SETBINPATH SETDBMONITOR WQ_OPEN BILLINGHOURS DATE DATEFROMFLOAT DATETIMEFROMSTRING ' + 'DATETIMETOSTRING DATETOFLOAT DAY DAYDIFF DAYNAME DBDATETIME HOUR MINUTE MONTH MONTHDIFF ' + 'MONTHHOURS MONTHNAME ROUNDDATE SAMEWEEKDAYLASTYEAR SECOND WEEKDAY WEEKDIFF YEAR YEARDAY ' + 'YEARSTR COMPSUM HISTCOUNT HISTMAX HISTMIN HISTMINNZ HISTVALUE MAXNRANGE MAXRANGE MINRANGE ' + 'COMPIKVA COMPKVA COMPKVARFROMKQKW COMPLF IDATTR FLAG LF2KW LF2KWH MAXKW POWERFACTOR ' + 'READING2USAGE AVGSEASON MAXSEASON MONTHLYMERGE SEASONVALUE SUMSEASON ACCTREADDATES ' + 'ACCTTABLELOAD CONFIGADD CONFIGGET CREATEOBJECT CREATEREPORT EMAILCLIENT EXPBLKMDMUSAGE ' + 'EXPMDMUSAGE EXPORT_USAGE FACTORINEFFECT GETUSERSPECIFIEDSTOP INEFFECT ISHOLIDAY RUNRATE ' + 'SAVE_PROFILE SETREPORTTITLE USEREXIT WATFORRUNRATE TO TABLE ACOS ASIN ATAN ATAN2 BITAND CEIL ' + 'COS COSECANT COSH COTANGENT DIVQUOT DIVREM EXP FABS FLOOR FMOD FREPM FREXPN LOG LOG10 MAX MAXN ' + 'MIN MINNZ MODF POW ROUND ROUND2VALUE ROUNDINT SECANT SIN SINH SQROOT TAN TANH FLOAT2STRING ' + 'FLOAT2STRINGNC INSTR LEFT LEN LTRIM MID RIGHT RTRIM STRING STRINGNC TOLOWER TOUPPER TRIM ' + 'NUMDAYS READ_DATE STAGING', built_in: 'IDENTIFIER OPTIONS XML_ELEMENT XML_OP XML_ELEMENT_OF DOMDOCCREATE DOMDOCLOADFILE DOMDOCLOADXML ' + 'DOMDOCSAVEFILE DOMDOCGETROOT DOMDOCADDPI DOMNODEGETNAME DOMNODEGETTYPE DOMNODEGETVALUE DOMNODEGETCHILDCT ' + 'DOMNODEGETFIRSTCHILD DOMNODEGETSIBLING DOMNODECREATECHILDELEMENT DOMNODESETATTRIBUTE ' + 'DOMNODEGETCHILDELEMENTCT DOMNODEGETFIRSTCHILDELEMENT DOMNODEGETSIBLINGELEMENT DOMNODEGETATTRIBUTECT ' + 'DOMNODEGETATTRIBUTEI DOMNODEGETATTRIBUTEBYNAME DOMNODEGETBYNAME' }, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, { className: 'literal', variants: [ {begin: '#\\s+[a-zA-Z\\ \\.]*', relevance: 0}, // looks like #-comment {begin: '#[a-zA-Z\\ \\.]+'} ] } ] }; }; /***/ }, /* 304 */ /***/ function(module, exports) { module.exports = function(hljs) { var NUM_SUFFIX = '([uif](8|16|32|64|size))\?'; var KEYWORDS = 'alignof as be box break const continue crate do else enum extern ' + 'false fn for if impl in let loop match mod mut offsetof once priv ' + 'proc pub pure ref return self Self sizeof static struct super trait true ' + 'type typeof unsafe unsized use virtual while where yield move default ' + 'int i8 i16 i32 i64 isize ' + 'uint u8 u32 u64 usize ' + 'float f32 f64 ' + 'str char bool' var BUILTINS = // prelude 'Copy Send Sized Sync Drop Fn FnMut FnOnce drop Box ToOwned Clone ' + 'PartialEq PartialOrd Eq Ord AsRef AsMut Into From Default Iterator ' + 'Extend IntoIterator DoubleEndedIterator ExactSizeIterator Option ' + 'Result SliceConcatExt String ToString Vec ' + // macros 'assert! assert_eq! bitflags! bytes! cfg! col! concat! concat_idents! ' + 'debug_assert! debug_assert_eq! env! panic! file! format! format_args! ' + 'include_bin! include_str! line! local_data_key! module_path! ' + 'option_env! print! println! select! stringify! try! unimplemented! ' + 'unreachable! vec! write! writeln! macro_rules!'; return { aliases: ['rs'], keywords: { keyword: KEYWORDS, literal: 'true false Some None Ok Err', built_in: BUILTINS }, lexemes: hljs.IDENT_RE + '!?', illegal: '</', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.COMMENT('/\\*', '\\*/', {contains: ['self']}), hljs.inherit(hljs.QUOTE_STRING_MODE, {begin: /b?"/, illegal: null}), { className: 'string', variants: [ { begin: /r(#*)".*?"\1(?!#)/ }, { begin: /b?'\\?(x\w{2}|u\w{4}|U\w{8}|.)'/ } ] }, { className: 'symbol', begin: /'[a-zA-Z_][a-zA-Z0-9_]*/ }, { className: 'number', variants: [ { begin: '\\b0b([01_]+)' + NUM_SUFFIX }, { begin: '\\b0o([0-7_]+)' + NUM_SUFFIX }, { begin: '\\b0x([A-Fa-f0-9_]+)' + NUM_SUFFIX }, { begin: '\\b(\\d[\\d_]*(\\.[0-9_]+)?([eE][+-]?[0-9_]+)?)' + NUM_SUFFIX } ], relevance: 0 }, { className: 'function', beginKeywords: 'fn', end: '(\\(|<)', excludeEnd: true, contains: [hljs.UNDERSCORE_TITLE_MODE] }, { className: 'meta', begin: '#\\!?\\[', end: '\\]', contains: [ { className: 'meta-string', begin: /"/, end: /"/ } ] }, { className: 'class', beginKeywords: 'type', end: ';', contains: [ hljs.inherit(hljs.UNDERSCORE_TITLE_MODE, {endsParent: true}) ], illegal: '\\S' }, { className: 'class', beginKeywords: 'trait enum struct', end: '{', contains: [ hljs.inherit(hljs.UNDERSCORE_TITLE_MODE, {endsParent: true}) ], illegal: '[\\w\\d]' }, { begin: hljs.IDENT_RE + '::', keywords: {built_in: BUILTINS} }, { begin: '->' } ] }; }; /***/ }, /* 305 */ /***/ function(module, exports) { module.exports = function(hljs) { var ANNOTATION = { className: 'meta', begin: '@[A-Za-z]+' }; // used in strings for escaping/interpolation/substitution var SUBST = { className: 'subst', variants: [ {begin: '\\$[A-Za-z0-9_]+'}, {begin: '\\${', end: '}'} ] }; var STRING = { className: 'string', variants: [ { begin: '"', end: '"', illegal: '\\n', contains: [hljs.BACKSLASH_ESCAPE] }, { begin: '"""', end: '"""', relevance: 10 }, { begin: '[a-z]+"', end: '"', illegal: '\\n', contains: [hljs.BACKSLASH_ESCAPE, SUBST] }, { className: 'string', begin: '[a-z]+"""', end: '"""', contains: [SUBST], relevance: 10 } ] }; var SYMBOL = { className: 'symbol', begin: '\'\\w[\\w\\d_]*(?!\')' }; var TYPE = { className: 'type', begin: '\\b[A-Z][A-Za-z0-9_]*', relevance: 0 }; var NAME = { className: 'title', begin: /[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/, relevance: 0 }; var CLASS = { className: 'class', beginKeywords: 'class object trait type', end: /[:={\[\n;]/, excludeEnd: true, contains: [ { beginKeywords: 'extends with', relevance: 10 }, { begin: /\[/, end: /\]/, excludeBegin: true, excludeEnd: true, relevance: 0, contains: [TYPE] }, { className: 'params', begin: /\(/, end: /\)/, excludeBegin: true, excludeEnd: true, relevance: 0, contains: [TYPE] }, NAME ] }; var METHOD = { className: 'function', beginKeywords: 'def', end: /[:={\[(\n;]/, excludeEnd: true, contains: [NAME] }; return { keywords: { literal: 'true false null', keyword: 'type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit' }, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, STRING, SYMBOL, TYPE, METHOD, CLASS, hljs.C_NUMBER_MODE, ANNOTATION ] }; }; /***/ }, /* 306 */ /***/ function(module, exports) { module.exports = function(hljs) { var SCHEME_IDENT_RE = '[^\\(\\)\\[\\]\\{\\}",\'`;#|\\\\\\s]+'; var SCHEME_SIMPLE_NUMBER_RE = '(\\-|\\+)?\\d+([./]\\d+)?'; var SCHEME_COMPLEX_NUMBER_RE = SCHEME_SIMPLE_NUMBER_RE + '[+\\-]' + SCHEME_SIMPLE_NUMBER_RE + 'i'; var BUILTINS = { 'builtin-name': 'case-lambda call/cc class define-class exit-handler field import ' + 'inherit init-field interface let*-values let-values let/ec mixin ' + 'opt-lambda override protect provide public rename require ' + 'require-for-syntax syntax syntax-case syntax-error unit/sig unless ' + 'when with-syntax and begin call-with-current-continuation ' + 'call-with-input-file call-with-output-file case cond define ' + 'define-syntax delay do dynamic-wind else for-each if lambda let let* ' + 'let-syntax letrec letrec-syntax map or syntax-rules \' * + , ,@ - ... / ' + '; < <= = => > >= ` abs acos angle append apply asin assoc assq assv atan ' + 'boolean? caar cadr call-with-input-file call-with-output-file ' + 'call-with-values car cdddar cddddr cdr ceiling char->integer ' + 'char-alphabetic? char-ci<=? char-ci<? char-ci=? char-ci>=? char-ci>? ' + 'char-downcase char-lower-case? char-numeric? char-ready? char-upcase ' + 'char-upper-case? char-whitespace? char<=? char<? char=? char>=? char>? ' + 'char? close-input-port close-output-port complex? cons cos ' + 'current-input-port current-output-port denominator display eof-object? ' + 'eq? equal? eqv? eval even? exact->inexact exact? exp expt floor ' + 'force gcd imag-part inexact->exact inexact? input-port? integer->char ' + 'integer? interaction-environment lcm length list list->string ' + 'list->vector list-ref list-tail list? load log magnitude make-polar ' + 'make-rectangular make-string make-vector max member memq memv min ' + 'modulo negative? newline not null-environment null? number->string ' + 'number? numerator odd? open-input-file open-output-file output-port? ' + 'pair? peek-char port? positive? procedure? quasiquote quote quotient ' + 'rational? rationalize read read-char real-part real? remainder reverse ' + 'round scheme-report-environment set! set-car! set-cdr! sin sqrt string ' + 'string->list string->number string->symbol string-append string-ci<=? ' + 'string-ci<? string-ci=? string-ci>=? string-ci>? string-copy ' + 'string-fill! string-length string-ref string-set! string<=? string<? ' + 'string=? string>=? string>? string? substring symbol->string symbol? ' + 'tan transcript-off transcript-on truncate values vector ' + 'vector->list vector-fill! vector-length vector-ref vector-set! ' + 'with-input-from-file with-output-to-file write write-char zero?' }; var SHEBANG = { className: 'meta', begin: '^#!', end: '$' }; var LITERAL = { className: 'literal', begin: '(#t|#f|#\\\\' + SCHEME_IDENT_RE + '|#\\\\.)' }; var NUMBER = { className: 'number', variants: [ { begin: SCHEME_SIMPLE_NUMBER_RE, relevance: 0 }, { begin: SCHEME_COMPLEX_NUMBER_RE, relevance: 0 }, { begin: '#b[0-1]+(/[0-1]+)?' }, { begin: '#o[0-7]+(/[0-7]+)?' }, { begin: '#x[0-9a-f]+(/[0-9a-f]+)?' } ] }; var STRING = hljs.QUOTE_STRING_MODE; var REGULAR_EXPRESSION = { className: 'regexp', begin: '#[pr]x"', end: '[^\\\\]"' }; var COMMENT_MODES = [ hljs.COMMENT( ';', '$', { relevance: 0 } ), hljs.COMMENT('#\\|', '\\|#') ]; var IDENT = { begin: SCHEME_IDENT_RE, relevance: 0 }; var QUOTED_IDENT = { className: 'symbol', begin: '\'' + SCHEME_IDENT_RE }; var BODY = { endsWithParent: true, relevance: 0 }; var QUOTED_LIST = { begin: /'/, contains: [ { begin: '\\(', end: '\\)', contains: ['self', LITERAL, STRING, NUMBER, IDENT, QUOTED_IDENT] } ] }; var NAME = { className: 'name', begin: SCHEME_IDENT_RE, lexemes: SCHEME_IDENT_RE, keywords: BUILTINS }; var LAMBDA = { begin: /lambda/, endsWithParent: true, returnBegin: true, contains: [ NAME, { begin: /\(/, end: /\)/, endsParent: true, contains: [IDENT], } ] }; var LIST = { variants: [ { begin: '\\(', end: '\\)' }, { begin: '\\[', end: '\\]' } ], contains: [LAMBDA, NAME, BODY] }; BODY.contains = [LITERAL, NUMBER, STRING, IDENT, QUOTED_IDENT, QUOTED_LIST, LIST].concat(COMMENT_MODES); return { illegal: /\S/, contains: [SHEBANG, NUMBER, STRING, QUOTED_IDENT, QUOTED_LIST, LIST].concat(COMMENT_MODES) }; }; /***/ }, /* 307 */ /***/ function(module, exports) { module.exports = function(hljs) { var COMMON_CONTAINS = [ hljs.C_NUMBER_MODE, { className: 'string', begin: '\'|\"', end: '\'|\"', contains: [hljs.BACKSLASH_ESCAPE, {begin: '\'\''}] } ]; return { aliases: ['sci'], lexemes: /%?\w+/, keywords: { keyword: 'abort break case clear catch continue do elseif else endfunction end for function '+ 'global if pause return resume select try then while', literal: '%f %F %t %T %pi %eps %inf %nan %e %i %z %s', built_in: // Scilab has more than 2000 functions. Just list the most commons 'abs and acos asin atan ceil cd chdir clearglobal cosh cos cumprod deff disp error '+ 'exec execstr exists exp eye gettext floor fprintf fread fsolve imag isdef isempty '+ 'isinfisnan isvector lasterror length load linspace list listfiles log10 log2 log '+ 'max min msprintf mclose mopen ones or pathconvert poly printf prod pwd rand real '+ 'round sinh sin size gsort sprintf sqrt strcat strcmps tring sum system tanh tan '+ 'type typename warning zeros matrix' }, illegal: '("|#|/\\*|\\s+/\\w+)', contains: [ { className: 'function', beginKeywords: 'function', end: '$', contains: [ hljs.UNDERSCORE_TITLE_MODE, { className: 'params', begin: '\\(', end: '\\)' } ] }, { begin: '[a-zA-Z_][a-zA-Z_0-9]*(\'+[\\.\']*|[\\.\']+)', end: '', relevance: 0 }, { begin: '\\[', end: '\\]\'*[\\.\']*', relevance: 0, contains: COMMON_CONTAINS }, hljs.COMMENT('//', '$') ].concat(COMMON_CONTAINS) }; }; /***/ }, /* 308 */ /***/ function(module, exports) { module.exports = function(hljs) { var IDENT_RE = '[a-zA-Z-][a-zA-Z0-9_-]*'; var VARIABLE = { className: 'variable', begin: '(\\$' + IDENT_RE + ')\\b' }; var HEXCOLOR = { className: 'number', begin: '#[0-9A-Fa-f]+' }; var DEF_INTERNALS = { className: 'attribute', begin: '[A-Z\\_\\.\\-]+', end: ':', excludeEnd: true, illegal: '[^\\s]', starts: { endsWithParent: true, excludeEnd: true, contains: [ HEXCOLOR, hljs.CSS_NUMBER_MODE, hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: 'meta', begin: '!important' } ] } }; return { case_insensitive: true, illegal: '[=/|\']', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: 'selector-id', begin: '\\#[A-Za-z0-9_-]+', relevance: 0 }, { className: 'selector-class', begin: '\\.[A-Za-z0-9_-]+', relevance: 0 }, { className: 'selector-attr', begin: '\\[', end: '\\]', illegal: '$' }, { className: 'selector-tag', // begin: IDENT_RE, end: '[,|\\s]' begin: '\\b(a|abbr|acronym|address|area|article|aside|audio|b|base|big|blockquote|body|br|button|canvas|caption|cite|code|col|colgroup|command|datalist|dd|del|details|dfn|div|dl|dt|em|embed|fieldset|figcaption|figure|footer|form|frame|frameset|(h[1-6])|head|header|hgroup|hr|html|i|iframe|img|input|ins|kbd|keygen|label|legend|li|link|map|mark|meta|meter|nav|noframes|noscript|object|ol|optgroup|option|output|p|param|pre|progress|q|rp|rt|ruby|samp|script|section|select|small|span|strike|strong|style|sub|sup|table|tbody|td|textarea|tfoot|th|thead|time|title|tr|tt|ul|var|video)\\b', relevance: 0 }, { begin: ':(visited|valid|root|right|required|read-write|read-only|out-range|optional|only-of-type|only-child|nth-of-type|nth-last-of-type|nth-last-child|nth-child|not|link|left|last-of-type|last-child|lang|invalid|indeterminate|in-range|hover|focus|first-of-type|first-line|first-letter|first-child|first|enabled|empty|disabled|default|checked|before|after|active)' }, { begin: '::(after|before|choices|first-letter|first-line|repeat-index|repeat-item|selection|value)' }, VARIABLE, { className: 'attribute', begin: '\\b(z-index|word-wrap|word-spacing|word-break|width|widows|white-space|visibility|vertical-align|unicode-bidi|transition-timing-function|transition-property|transition-duration|transition-delay|transition|transform-style|transform-origin|transform|top|text-underline-position|text-transform|text-shadow|text-rendering|text-overflow|text-indent|text-decoration-style|text-decoration-line|text-decoration-color|text-decoration|text-align-last|text-align|tab-size|table-layout|right|resize|quotes|position|pointer-events|perspective-origin|perspective|page-break-inside|page-break-before|page-break-after|padding-top|padding-right|padding-left|padding-bottom|padding|overflow-y|overflow-x|overflow-wrap|overflow|outline-width|outline-style|outline-offset|outline-color|outline|orphans|order|opacity|object-position|object-fit|normal|none|nav-up|nav-right|nav-left|nav-index|nav-down|min-width|min-height|max-width|max-height|mask|marks|margin-top|margin-right|margin-left|margin-bottom|margin|list-style-type|list-style-position|list-style-image|list-style|line-height|letter-spacing|left|justify-content|initial|inherit|ime-mode|image-orientation|image-resolution|image-rendering|icon|hyphens|height|font-weight|font-variant-ligatures|font-variant|font-style|font-stretch|font-size-adjust|font-size|font-language-override|font-kerning|font-feature-settings|font-family|font|float|flex-wrap|flex-shrink|flex-grow|flex-flow|flex-direction|flex-basis|flex|filter|empty-cells|display|direction|cursor|counter-reset|counter-increment|content|column-width|column-span|column-rule-width|column-rule-style|column-rule-color|column-rule|column-gap|column-fill|column-count|columns|color|clip-path|clip|clear|caption-side|break-inside|break-before|break-after|box-sizing|box-shadow|box-decoration-break|bottom|border-width|border-top-width|border-top-style|border-top-right-radius|border-top-left-radius|border-top-color|border-top|border-style|border-spacing|border-right-width|border-right-style|border-right-color|border-right|border-radius|border-left-width|border-left-style|border-left-color|border-left|border-image-width|border-image-source|border-image-slice|border-image-repeat|border-image-outset|border-image|border-color|border-collapse|border-bottom-width|border-bottom-style|border-bottom-right-radius|border-bottom-left-radius|border-bottom-color|border-bottom|border|background-size|background-repeat|background-position|background-origin|background-image|background-color|background-clip|background-attachment|background-blend-mode|background|backface-visibility|auto|animation-timing-function|animation-play-state|animation-name|animation-iteration-count|animation-fill-mode|animation-duration|animation-direction|animation-delay|animation|align-self|align-items|align-content)\\b', illegal: '[^\\s]' }, { begin: '\\b(whitespace|wait|w-resize|visible|vertical-text|vertical-ideographic|uppercase|upper-roman|upper-alpha|underline|transparent|top|thin|thick|text|text-top|text-bottom|tb-rl|table-header-group|table-footer-group|sw-resize|super|strict|static|square|solid|small-caps|separate|se-resize|scroll|s-resize|rtl|row-resize|ridge|right|repeat|repeat-y|repeat-x|relative|progress|pointer|overline|outside|outset|oblique|nowrap|not-allowed|normal|none|nw-resize|no-repeat|no-drop|newspaper|ne-resize|n-resize|move|middle|medium|ltr|lr-tb|lowercase|lower-roman|lower-alpha|loose|list-item|line|line-through|line-edge|lighter|left|keep-all|justify|italic|inter-word|inter-ideograph|inside|inset|inline|inline-block|inherit|inactive|ideograph-space|ideograph-parenthesis|ideograph-numeric|ideograph-alpha|horizontal|hidden|help|hand|groove|fixed|ellipsis|e-resize|double|dotted|distribute|distribute-space|distribute-letter|distribute-all-lines|disc|disabled|default|decimal|dashed|crosshair|collapse|col-resize|circle|char|center|capitalize|break-word|break-all|bottom|both|bolder|bold|block|bidi-override|below|baseline|auto|always|all-scroll|absolute|table|table-cell)\\b' }, { begin: ':', end: ';', contains: [ VARIABLE, HEXCOLOR, hljs.CSS_NUMBER_MODE, hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, { className: 'meta', begin: '!important' } ] }, { begin: '@', end: '[{;]', keywords: 'mixin include extend for if else each while charset import debug media page content font-face namespace warn', contains: [ VARIABLE, hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, HEXCOLOR, hljs.CSS_NUMBER_MODE, { begin: '\\s[A-Za-z0-9_.-]+', relevance: 0 } ] } ] }; }; /***/ }, /* 309 */ /***/ function(module, exports) { module.exports = function(hljs) { var smali_instr_low_prio = ['add', 'and', 'cmp', 'cmpg', 'cmpl', 'const', 'div', 'double', 'float', 'goto', 'if', 'int', 'long', 'move', 'mul', 'neg', 'new', 'nop', 'not', 'or', 'rem', 'return', 'shl', 'shr', 'sput', 'sub', 'throw', 'ushr', 'xor']; var smali_instr_high_prio = ['aget', 'aput', 'array', 'check', 'execute', 'fill', 'filled', 'goto/16', 'goto/32', 'iget', 'instance', 'invoke', 'iput', 'monitor', 'packed', 'sget', 'sparse']; var smali_keywords = ['transient', 'constructor', 'abstract', 'final', 'synthetic', 'public', 'private', 'protected', 'static', 'bridge', 'system']; return { aliases: ['smali'], contains: [ { className: 'string', begin: '"', end: '"', relevance: 0 }, hljs.COMMENT( '#', '$', { relevance: 0 } ), { className: 'keyword', variants: [ {begin: '\\s*\\.end\\s[a-zA-Z0-9]*'}, {begin: '^[ ]*\\.[a-zA-Z]*', relevance: 0}, {begin: '\\s:[a-zA-Z_0-9]*', relevance: 0}, {begin: '\\s(' + smali_keywords.join('|') + ')'} ] }, { className: 'built_in', variants : [ { begin: '\\s('+smali_instr_low_prio.join('|')+')\\s' }, { begin: '\\s('+smali_instr_low_prio.join('|')+')((\\-|/)[a-zA-Z0-9]+)+\\s', relevance: 10 }, { begin: '\\s('+smali_instr_high_prio.join('|')+')((\\-|/)[a-zA-Z0-9]+)*\\s', relevance: 10 }, ] }, { className: 'class', begin: 'L[^\(;:\n]*;', relevance: 0 }, { begin: '[vp][0-9]+', } ] }; }; /***/ }, /* 310 */ /***/ function(module, exports) { module.exports = function(hljs) { var VAR_IDENT_RE = '[a-z][a-zA-Z0-9_]*'; var CHAR = { className: 'string', begin: '\\$.{1}' }; var SYMBOL = { className: 'symbol', begin: '#' + hljs.UNDERSCORE_IDENT_RE }; return { aliases: ['st'], keywords: 'self super nil true false thisContext', // only 6 contains: [ hljs.COMMENT('"', '"'), hljs.APOS_STRING_MODE, { className: 'type', begin: '\\b[A-Z][A-Za-z0-9_]*', relevance: 0 }, { begin: VAR_IDENT_RE + ':', relevance: 0 }, hljs.C_NUMBER_MODE, SYMBOL, CHAR, { // This looks more complicated than needed to avoid combinatorial // explosion under V8. It effectively means `| var1 var2 ... |` with // whitespace adjacent to `|` being optional. begin: '\\|[ ]*' + VAR_IDENT_RE + '([ ]+' + VAR_IDENT_RE + ')*[ ]*\\|', returnBegin: true, end: /\|/, illegal: /\S/, contains: [{begin: '(\\|[ ]*)?' + VAR_IDENT_RE}] }, { begin: '\\#\\(', end: '\\)', contains: [ hljs.APOS_STRING_MODE, CHAR, hljs.C_NUMBER_MODE, SYMBOL ] } ] }; }; /***/ }, /* 311 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['ml'], keywords: { keyword: /* according to Definition of Standard ML 97 */ 'abstype and andalso as case datatype do else end eqtype ' + 'exception fn fun functor handle if in include infix infixr ' + 'let local nonfix of op open orelse raise rec sharing sig ' + 'signature struct structure then type val with withtype where while', built_in: /* built-in types according to basis library */ 'array bool char exn int list option order real ref string substring vector unit word', literal: 'true false NONE SOME LESS EQUAL GREATER nil' }, illegal: /\/\/|>>/, lexemes: '[a-z_]\\w*!?', contains: [ { className: 'literal', begin: /\[(\|\|)?\]|\(\)/, relevance: 0 }, hljs.COMMENT( '\\(\\*', '\\*\\)', { contains: ['self'] } ), { /* type variable */ className: 'symbol', begin: '\'[A-Za-z_](?!\')[\\w\']*' /* the grammar is ambiguous on how 'a'b should be interpreted but not the compiler */ }, { /* polymorphic variant */ className: 'type', begin: '`[A-Z][\\w\']*' }, { /* module or constructor */ className: 'type', begin: '\\b[A-Z][\\w\']*', relevance: 0 }, { /* don't color identifiers, but safely catch all identifiers with '*/ begin: '[a-z_]\\w*\'[\\w\']*' }, hljs.inherit(hljs.APOS_STRING_MODE, {className: 'string', relevance: 0}), hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}), { className: 'number', begin: '\\b(0[xX][a-fA-F0-9_]+[Lln]?|' + '0[oO][0-7_]+[Lln]?|' + '0[bB][01_]+[Lln]?|' + '[0-9][0-9_]*([Lln]|(\\.[0-9_]*)?([eE][-+]?[0-9_]+)?)?)', relevance: 0 }, { begin: /[-=]>/ // relevance booster } ] }; }; /***/ }, /* 312 */ /***/ function(module, exports) { module.exports = function(hljs) { var CPP = hljs.getLanguage('cpp').exports; // In SQF, a variable start with _ var VARIABLE = { className: 'variable', begin: /\b_+[a-zA-Z_]\w*/ }; // In SQF, a function should fit myTag_fnc_myFunction pattern // https://community.bistudio.com/wiki/Functions_Library_(Arma_3)#Adding_a_Function var FUNCTION = { className: 'title', begin: /[a-zA-Z][a-zA-Z0-9]+_fnc_\w*/ }; // In SQF strings, quotes matching the start are escaped by adding a consecutive. // Example of single escaped quotes: " "" " and ' '' '. var STRINGS = { className: 'string', variants: [ { begin: '"', end: '"', contains: [{begin: '""', relevance: 0}] }, { begin: '\'', end: '\'', contains: [{begin: '\'\'', relevance: 0}] } ] }; return { aliases: ['sqf'], case_insensitive: true, keywords: { keyword: 'case catch default do else exit exitWith for forEach from if ' + 'switch then throw to try waitUntil while with', built_in: 'abs accTime acos action actionIDs actionKeys actionKeysImages actionKeysNames ' + 'actionKeysNamesArray actionName actionParams activateAddons activatedAddons activateKey ' + 'add3DENConnection add3DENEventHandler add3DENLayer addAction addBackpack addBackpackCargo ' + 'addBackpackCargoGlobal addBackpackGlobal addCamShake addCuratorAddons addCuratorCameraArea ' + 'addCuratorEditableObjects addCuratorEditingArea addCuratorPoints addEditorObject addEventHandler ' + 'addGoggles addGroupIcon addHandgunItem addHeadgear addItem addItemCargo addItemCargoGlobal ' + 'addItemPool addItemToBackpack addItemToUniform addItemToVest addLiveStats addMagazine ' + 'addMagazineAmmoCargo addMagazineCargo addMagazineCargoGlobal addMagazineGlobal addMagazinePool ' + 'addMagazines addMagazineTurret addMenu addMenuItem addMissionEventHandler addMPEventHandler ' + 'addMusicEventHandler addOwnedMine addPlayerScores addPrimaryWeaponItem ' + 'addPublicVariableEventHandler addRating addResources addScore addScoreSide addSecondaryWeaponItem ' + 'addSwitchableUnit addTeamMember addToRemainsCollector addUniform addVehicle addVest addWaypoint ' + 'addWeapon addWeaponCargo addWeaponCargoGlobal addWeaponGlobal addWeaponItem addWeaponPool ' + 'addWeaponTurret agent agents AGLToASL aimedAtTarget aimPos airDensityRTD airportSide ' + 'AISFinishHeal alive all3DENEntities allControls allCurators allCutLayers allDead allDeadMen ' + 'allDisplays allGroups allMapMarkers allMines allMissionObjects allow3DMode allowCrewInImmobile ' + 'allowCuratorLogicIgnoreAreas allowDamage allowDammage allowFileOperations allowFleeing allowGetIn ' + 'allowSprint allPlayers allSites allTurrets allUnits allUnitsUAV allVariables ammo and animate ' + 'animateDoor animateSource animationNames animationPhase animationSourcePhase animationState ' + 'append apply armoryPoints arrayIntersect asin ASLToAGL ASLToATL assert assignAsCargo ' + 'assignAsCargoIndex assignAsCommander assignAsDriver assignAsGunner assignAsTurret assignCurator ' + 'assignedCargo assignedCommander assignedDriver assignedGunner assignedItems assignedTarget ' + 'assignedTeam assignedVehicle assignedVehicleRole assignItem assignTeam assignToAirport atan atan2 ' + 'atg ATLToASL attachedObject attachedObjects attachedTo attachObject attachTo attackEnabled ' + 'backpack backpackCargo backpackContainer backpackItems backpackMagazines backpackSpaceFor ' + 'behaviour benchmark binocular blufor boundingBox boundingBoxReal boundingCenter breakOut breakTo ' + 'briefingName buildingExit buildingPos buttonAction buttonSetAction cadetMode call callExtension ' + 'camCommand camCommit camCommitPrepared camCommitted camConstuctionSetParams camCreate camDestroy ' + 'cameraEffect cameraEffectEnableHUD cameraInterest cameraOn cameraView campaignConfigFile ' + 'camPreload camPreloaded camPrepareBank camPrepareDir camPrepareDive camPrepareFocus camPrepareFov ' + 'camPrepareFovRange camPreparePos camPrepareRelPos camPrepareTarget camSetBank camSetDir ' + 'camSetDive camSetFocus camSetFov camSetFovRange camSetPos camSetRelPos camSetTarget camTarget ' + 'camUseNVG canAdd canAddItemToBackpack canAddItemToUniform canAddItemToVest ' + 'cancelSimpleTaskDestination canFire canMove canSlingLoad canStand canSuspend canUnloadInCombat ' + 'canVehicleCargo captive captiveNum cbChecked cbSetChecked ceil channelEnabled cheatsEnabled ' + 'checkAIFeature checkVisibility civilian className clearAllItemsFromBackpack clearBackpackCargo ' + 'clearBackpackCargoGlobal clearGroupIcons clearItemCargo clearItemCargoGlobal clearItemPool ' + 'clearMagazineCargo clearMagazineCargoGlobal clearMagazinePool clearOverlay clearRadio ' + 'clearWeaponCargo clearWeaponCargoGlobal clearWeaponPool clientOwner closeDialog closeDisplay ' + 'closeOverlay collapseObjectTree collect3DENHistory combatMode commandArtilleryFire commandChat ' + 'commander commandFire commandFollow commandFSM commandGetOut commandingMenu commandMove ' + 'commandRadio commandStop commandSuppressiveFire commandTarget commandWatch comment commitOverlay ' + 'compile compileFinal completedFSM composeText configClasses configFile configHierarchy configName ' + 'configNull configProperties configSourceAddonList configSourceMod configSourceModList ' + 'connectTerminalToUAV controlNull controlsGroupCtrl copyFromClipboard copyToClipboard ' + 'copyWaypoints cos count countEnemy countFriendly countSide countType countUnknown ' + 'create3DENComposition create3DENEntity createAgent createCenter createDialog createDiaryLink ' + 'createDiaryRecord createDiarySubject createDisplay createGearDialog createGroup ' + 'createGuardedPoint createLocation createMarker createMarkerLocal createMenu createMine ' + 'createMissionDisplay createMPCampaignDisplay createSimpleObject createSimpleTask createSite ' + 'createSoundSource createTask createTeam createTrigger createUnit createVehicle createVehicleCrew ' + 'createVehicleLocal crew ctrlActivate ctrlAddEventHandler ctrlAngle ctrlAutoScrollDelay ' + 'ctrlAutoScrollRewind ctrlAutoScrollSpeed ctrlChecked ctrlClassName ctrlCommit ctrlCommitted ' + 'ctrlCreate ctrlDelete ctrlEnable ctrlEnabled ctrlFade ctrlHTMLLoaded ctrlIDC ctrlIDD ' + 'ctrlMapAnimAdd ctrlMapAnimClear ctrlMapAnimCommit ctrlMapAnimDone ctrlMapCursor ctrlMapMouseOver ' + 'ctrlMapScale ctrlMapScreenToWorld ctrlMapWorldToScreen ctrlModel ctrlModelDirAndUp ctrlModelScale ' + 'ctrlParent ctrlParentControlsGroup ctrlPosition ctrlRemoveAllEventHandlers ctrlRemoveEventHandler ' + 'ctrlScale ctrlSetActiveColor ctrlSetAngle ctrlSetAutoScrollDelay ctrlSetAutoScrollRewind ' + 'ctrlSetAutoScrollSpeed ctrlSetBackgroundColor ctrlSetChecked ctrlSetEventHandler ctrlSetFade ' + 'ctrlSetFocus ctrlSetFont ctrlSetFontH1 ctrlSetFontH1B ctrlSetFontH2 ctrlSetFontH2B ctrlSetFontH3 ' + 'ctrlSetFontH3B ctrlSetFontH4 ctrlSetFontH4B ctrlSetFontH5 ctrlSetFontH5B ctrlSetFontH6 ' + 'ctrlSetFontH6B ctrlSetFontHeight ctrlSetFontHeightH1 ctrlSetFontHeightH2 ctrlSetFontHeightH3 ' + 'ctrlSetFontHeightH4 ctrlSetFontHeightH5 ctrlSetFontHeightH6 ctrlSetFontHeightSecondary ' + 'ctrlSetFontP ctrlSetFontPB ctrlSetFontSecondary ctrlSetForegroundColor ctrlSetModel ' + 'ctrlSetModelDirAndUp ctrlSetModelScale ctrlSetPosition ctrlSetScale ctrlSetStructuredText ' + 'ctrlSetText ctrlSetTextColor ctrlSetTooltip ctrlSetTooltipColorBox ctrlSetTooltipColorShade ' + 'ctrlSetTooltipColorText ctrlShow ctrlShown ctrlText ctrlTextHeight ctrlType ctrlVisible ' + 'curatorAddons curatorCamera curatorCameraArea curatorCameraAreaCeiling curatorCoef ' + 'curatorEditableObjects curatorEditingArea curatorEditingAreaType curatorMouseOver curatorPoints ' + 'curatorRegisteredObjects curatorSelected curatorWaypointCost current3DENOperation currentChannel ' + 'currentCommand currentMagazine currentMagazineDetail currentMagazineDetailTurret ' + 'currentMagazineTurret currentMuzzle currentNamespace currentTask currentTasks currentThrowable ' + 'currentVisionMode currentWaypoint currentWeapon currentWeaponMode currentWeaponTurret ' + 'currentZeroing cursorObject cursorTarget customChat customRadio cutFadeOut cutObj cutRsc cutText ' + 'damage date dateToNumber daytime deActivateKey debriefingText debugFSM debugLog deg ' + 'delete3DENEntities deleteAt deleteCenter deleteCollection deleteEditorObject deleteGroup ' + 'deleteIdentity deleteLocation deleteMarker deleteMarkerLocal deleteRange deleteResources ' + 'deleteSite deleteStatus deleteTeam deleteVehicle deleteVehicleCrew deleteWaypoint detach ' + 'detectedMines diag_activeMissionFSMs diag_activeScripts diag_activeSQFScripts ' + 'diag_activeSQSScripts diag_captureFrame diag_captureSlowFrame diag_codePerformance diag_drawMode ' + 'diag_enable diag_enabled diag_fps diag_fpsMin diag_frameNo diag_list diag_log diag_logSlowFrame ' + 'diag_mergeConfigFile diag_recordTurretLimits diag_tickTime diag_toggle dialog diarySubjectExists ' + 'didJIP didJIPOwner difficulty difficultyEnabled difficultyEnabledRTD difficultyOption direction ' + 'directSay disableAI disableCollisionWith disableConversation disableDebriefingStats ' + 'disableNVGEquipment disableRemoteSensors disableSerialization disableTIEquipment ' + 'disableUAVConnectability disableUserInput displayAddEventHandler displayCtrl displayNull ' + 'displayParent displayRemoveAllEventHandlers displayRemoveEventHandler displaySetEventHandler ' + 'dissolveTeam distance distance2D distanceSqr distributionRegion do3DENAction doArtilleryFire ' + 'doFire doFollow doFSM doGetOut doMove doorPhase doStop doSuppressiveFire doTarget doWatch ' + 'drawArrow drawEllipse drawIcon drawIcon3D drawLine drawLine3D drawLink drawLocation drawPolygon ' + 'drawRectangle driver drop east echo edit3DENMissionAttributes editObject editorSetEventHandler ' + 'effectiveCommander emptyPositions enableAI enableAIFeature enableAimPrecision enableAttack ' + 'enableAudioFeature enableCamShake enableCaustics enableChannel enableCollisionWith enableCopilot ' + 'enableDebriefingStats enableDiagLegend enableEndDialog enableEngineArtillery enableEnvironment ' + 'enableFatigue enableGunLights enableIRLasers enableMimics enablePersonTurret enableRadio ' + 'enableReload enableRopeAttach enableSatNormalOnDetail enableSaving enableSentences ' + 'enableSimulation enableSimulationGlobal enableStamina enableTeamSwitch enableUAVConnectability ' + 'enableUAVWaypoints enableVehicleCargo endLoadingScreen endMission engineOn enginesIsOnRTD ' + 'enginesRpmRTD enginesTorqueRTD entities estimatedEndServerTime estimatedTimeLeft ' + 'evalObjectArgument everyBackpack everyContainer exec execEditorScript execFSM execVM exp ' + 'expectedDestination exportJIPMessages eyeDirection eyePos face faction fadeMusic fadeRadio ' + 'fadeSound fadeSpeech failMission fillWeaponsFromPool find findCover findDisplay findEditorObject ' + 'findEmptyPosition findEmptyPositionReady findNearestEnemy finishMissionInit finite fire ' + 'fireAtTarget firstBackpack flag flagOwner flagSide flagTexture fleeing floor flyInHeight ' + 'flyInHeightASL fog fogForecast fogParams forceAddUniform forcedMap forceEnd forceMap forceRespawn ' + 'forceSpeed forceWalk forceWeaponFire forceWeatherChange forEachMember forEachMemberAgent ' + 'forEachMemberTeam format formation formationDirection formationLeader formationMembers ' + 'formationPosition formationTask formatText formLeader freeLook fromEditor fuel fullCrew ' + 'gearIDCAmmoCount gearSlotAmmoCount gearSlotData get3DENActionState get3DENAttribute get3DENCamera ' + 'get3DENConnections get3DENEntity get3DENEntityID get3DENGrid get3DENIconsVisible ' + 'get3DENLayerEntities get3DENLinesVisible get3DENMissionAttribute get3DENMouseOver get3DENSelected ' + 'getAimingCoef getAllHitPointsDamage getAllOwnedMines getAmmoCargo getAnimAimPrecision ' + 'getAnimSpeedCoef getArray getArtilleryAmmo getArtilleryComputerSettings getArtilleryETA ' + 'getAssignedCuratorLogic getAssignedCuratorUnit getBackpackCargo getBleedingRemaining ' + 'getBurningValue getCameraViewDirection getCargoIndex getCenterOfMass getClientState ' + 'getClientStateNumber getConnectedUAV getCustomAimingCoef getDammage getDescription getDir ' + 'getDirVisual getDLCs getEditorCamera getEditorMode getEditorObjectScope getElevationOffset ' + 'getFatigue getFriend getFSMVariable getFuelCargo getGroupIcon getGroupIconParams getGroupIcons ' + 'getHideFrom getHit getHitIndex getHitPointDamage getItemCargo getMagazineCargo getMarkerColor ' + 'getMarkerPos getMarkerSize getMarkerType getMass getMissionConfig getMissionConfigValue ' + 'getMissionDLCs getMissionLayerEntities getModelInfo getMousePosition getNumber getObjectArgument ' + 'getObjectChildren getObjectDLC getObjectMaterials getObjectProxy getObjectTextures getObjectType ' + 'getObjectViewDistance getOxygenRemaining getPersonUsedDLCs getPilotCameraDirection ' + 'getPilotCameraPosition getPilotCameraRotation getPilotCameraTarget getPlayerChannel ' + 'getPlayerScores getPlayerUID getPos getPosASL getPosASLVisual getPosASLW getPosATL ' + 'getPosATLVisual getPosVisual getPosWorld getRelDir getRelPos getRemoteSensorsDisabled ' + 'getRepairCargo getResolution getShadowDistance getShotParents getSlingLoad getSpeed getStamina ' + 'getStatValue getSuppression getTerrainHeightASL getText getUnitLoadout getUnitTrait getVariable ' + 'getVehicleCargo getWeaponCargo getWeaponSway getWPPos glanceAt globalChat globalRadio goggles ' + 'goto group groupChat groupFromNetId groupIconSelectable groupIconsVisible groupId groupOwner ' + 'groupRadio groupSelectedUnits groupSelectUnit grpNull gunner gusts halt handgunItems ' + 'handgunMagazine handgunWeapon handsHit hasInterface hasPilotCamera hasWeapon hcAllGroups ' + 'hcGroupParams hcLeader hcRemoveAllGroups hcRemoveGroup hcSelected hcSelectGroup hcSetGroup ' + 'hcShowBar hcShownBar headgear hideBody hideObject hideObjectGlobal hideSelection hint hintC ' + 'hintCadet hintSilent hmd hostMission htmlLoad HUDMovementLevels humidity image importAllGroups ' + 'importance in inArea inAreaArray incapacitatedState independent inflame inflamed ' + 'inGameUISetEventHandler inheritsFrom initAmbientLife inPolygon inputAction inRangeOfArtillery ' + 'insertEditorObject intersect is3DEN is3DENMultiplayer isAbleToBreathe isAgent isArray ' + 'isAutoHoverOn isAutonomous isAutotest isBleeding isBurning isClass isCollisionLightOn ' + 'isCopilotEnabled isDedicated isDLCAvailable isEngineOn isEqualTo isEqualType isEqualTypeAll ' + 'isEqualTypeAny isEqualTypeArray isEqualTypeParams isFilePatchingEnabled isFlashlightOn ' + 'isFlatEmpty isForcedWalk isFormationLeader isHidden isInRemainsCollector ' + 'isInstructorFigureEnabled isIRLaserOn isKeyActive isKindOf isLightOn isLocalized isManualFire ' + 'isMarkedForCollection isMultiplayer isMultiplayerSolo isNil isNull isNumber isObjectHidden ' + 'isObjectRTD isOnRoad isPipEnabled isPlayer isRealTime isRemoteExecuted isRemoteExecutedJIP ' + 'isServer isShowing3DIcons isSprintAllowed isStaminaEnabled isSteamMission ' + 'isStreamFriendlyUIEnabled isText isTouchingGround isTurnedOut isTutHintsEnabled isUAVConnectable ' + 'isUAVConnected isUniformAllowed isVehicleCargo isWalking isWeaponDeployed isWeaponRested ' + 'itemCargo items itemsWithMagazines join joinAs joinAsSilent joinSilent joinString kbAddDatabase ' + 'kbAddDatabaseTargets kbAddTopic kbHasTopic kbReact kbRemoveTopic kbTell kbWasSaid keyImage ' + 'keyName knowsAbout land landAt landResult language laserTarget lbAdd lbClear lbColor lbCurSel ' + 'lbData lbDelete lbIsSelected lbPicture lbSelection lbSetColor lbSetCurSel lbSetData lbSetPicture ' + 'lbSetPictureColor lbSetPictureColorDisabled lbSetPictureColorSelected lbSetSelectColor ' + 'lbSetSelectColorRight lbSetSelected lbSetTooltip lbSetValue lbSize lbSort lbSortByValue lbText ' + 'lbValue leader leaderboardDeInit leaderboardGetRows leaderboardInit leaveVehicle libraryCredits ' + 'libraryDisclaimers lifeState lightAttachObject lightDetachObject lightIsOn lightnings limitSpeed ' + 'linearConversion lineBreak lineIntersects lineIntersectsObjs lineIntersectsSurfaces ' + 'lineIntersectsWith linkItem list listObjects ln lnbAddArray lnbAddColumn lnbAddRow lnbClear ' + 'lnbColor lnbCurSelRow lnbData lnbDeleteColumn lnbDeleteRow lnbGetColumnsPosition lnbPicture ' + 'lnbSetColor lnbSetColumnsPos lnbSetCurSelRow lnbSetData lnbSetPicture lnbSetText lnbSetValue ' + 'lnbSize lnbText lnbValue load loadAbs loadBackpack loadFile loadGame loadIdentity loadMagazine ' + 'loadOverlay loadStatus loadUniform loadVest local localize locationNull locationPosition lock ' + 'lockCameraTo lockCargo lockDriver locked lockedCargo lockedDriver lockedTurret lockIdentity ' + 'lockTurret lockWP log logEntities logNetwork logNetworkTerminate lookAt lookAtPos magazineCargo ' + 'magazines magazinesAllTurrets magazinesAmmo magazinesAmmoCargo magazinesAmmoFull magazinesDetail ' + 'magazinesDetailBackpack magazinesDetailUniform magazinesDetailVest magazinesTurret ' + 'magazineTurretAmmo mapAnimAdd mapAnimClear mapAnimCommit mapAnimDone mapCenterOnCamera ' + 'mapGridPosition markAsFinishedOnSteam markerAlpha markerBrush markerColor markerDir markerPos ' + 'markerShape markerSize markerText markerType max members menuAction menuAdd menuChecked menuClear ' + 'menuCollapse menuData menuDelete menuEnable menuEnabled menuExpand menuHover menuPicture ' + 'menuSetAction menuSetCheck menuSetData menuSetPicture menuSetValue menuShortcut menuShortcutText ' + 'menuSize menuSort menuText menuURL menuValue min mineActive mineDetectedBy missionConfigFile ' + 'missionDifficulty missionName missionNamespace missionStart missionVersion mod modelToWorld ' + 'modelToWorldVisual modParams moonIntensity moonPhase morale move move3DENCamera moveInAny ' + 'moveInCargo moveInCommander moveInDriver moveInGunner moveInTurret moveObjectToEnd moveOut ' + 'moveTime moveTo moveToCompleted moveToFailed musicVolume name nameSound nearEntities ' + 'nearestBuilding nearestLocation nearestLocations nearestLocationWithDubbing nearestObject ' + 'nearestObjects nearestTerrainObjects nearObjects nearObjectsReady nearRoads nearSupplies ' + 'nearTargets needReload netId netObjNull newOverlay nextMenuItemIndex nextWeatherChange nMenuItems ' + 'not numberToDate objectCurators objectFromNetId objectParent objNull objStatus onBriefingGroup ' + 'onBriefingNotes onBriefingPlan onBriefingTeamSwitch onCommandModeChanged onDoubleClick ' + 'onEachFrame onGroupIconClick onGroupIconOverEnter onGroupIconOverLeave onHCGroupSelectionChanged ' + 'onMapSingleClick onPlayerConnected onPlayerDisconnected onPreloadFinished onPreloadStarted ' + 'onShowNewObject onTeamSwitch openCuratorInterface openDLCPage openMap openYoutubeVideo opfor or ' + 'orderGetIn overcast overcastForecast owner param params parseNumber parseText parsingNamespace ' + 'particlesQuality pi pickWeaponPool pitch pixelGrid pixelGridBase pixelGridNoUIScale pixelH pixelW ' + 'playableSlotsNumber playableUnits playAction playActionNow player playerRespawnTime playerSide ' + 'playersNumber playGesture playMission playMove playMoveNow playMusic playScriptedMission ' + 'playSound playSound3D position positionCameraToWorld posScreenToWorld posWorldToScreen ' + 'ppEffectAdjust ppEffectCommit ppEffectCommitted ppEffectCreate ppEffectDestroy ppEffectEnable ' + 'ppEffectEnabled ppEffectForceInNVG precision preloadCamera preloadObject preloadSound ' + 'preloadTitleObj preloadTitleRsc preprocessFile preprocessFileLineNumbers primaryWeapon ' + 'primaryWeaponItems primaryWeaponMagazine priority private processDiaryLink productVersion ' + 'profileName profileNamespace profileNameSteam progressLoadingScreen progressPosition ' + 'progressSetPosition publicVariable publicVariableClient publicVariableServer pushBack ' + 'pushBackUnique putWeaponPool queryItemsPool queryMagazinePool queryWeaponPool rad radioChannelAdd ' + 'radioChannelCreate radioChannelRemove radioChannelSetCallSign radioChannelSetLabel radioVolume ' + 'rain rainbow random rank rankId rating rectangular registeredTasks registerTask reload ' + 'reloadEnabled remoteControl remoteExec remoteExecCall remove3DENConnection remove3DENEventHandler ' + 'remove3DENLayer removeAction removeAll3DENEventHandlers removeAllActions removeAllAssignedItems ' + 'removeAllContainers removeAllCuratorAddons removeAllCuratorCameraAreas ' + 'removeAllCuratorEditingAreas removeAllEventHandlers removeAllHandgunItems removeAllItems ' + 'removeAllItemsWithMagazines removeAllMissionEventHandlers removeAllMPEventHandlers ' + 'removeAllMusicEventHandlers removeAllOwnedMines removeAllPrimaryWeaponItems removeAllWeapons ' + 'removeBackpack removeBackpackGlobal removeCuratorAddons removeCuratorCameraArea ' + 'removeCuratorEditableObjects removeCuratorEditingArea removeDrawIcon removeDrawLinks ' + 'removeEventHandler removeFromRemainsCollector removeGoggles removeGroupIcon removeHandgunItem ' + 'removeHeadgear removeItem removeItemFromBackpack removeItemFromUniform removeItemFromVest ' + 'removeItems removeMagazine removeMagazineGlobal removeMagazines removeMagazinesTurret ' + 'removeMagazineTurret removeMenuItem removeMissionEventHandler removeMPEventHandler ' + 'removeMusicEventHandler removeOwnedMine removePrimaryWeaponItem removeSecondaryWeaponItem ' + 'removeSimpleTask removeSwitchableUnit removeTeamMember removeUniform removeVest removeWeapon ' + 'removeWeaponGlobal removeWeaponTurret requiredVersion resetCamShake resetSubgroupDirection ' + 'resistance resize resources respawnVehicle restartEditorCamera reveal revealMine reverse ' + 'reversedMouseY roadAt roadsConnectedTo roleDescription ropeAttachedObjects ropeAttachedTo ' + 'ropeAttachEnabled ropeAttachTo ropeCreate ropeCut ropeDestroy ropeDetach ropeEndPosition ' + 'ropeLength ropes ropeUnwind ropeUnwound rotorsForcesRTD rotorsRpmRTD round runInitScript ' + 'safeZoneH safeZoneW safeZoneWAbs safeZoneX safeZoneXAbs safeZoneY save3DENInventory saveGame ' + 'saveIdentity saveJoysticks saveOverlay saveProfileNamespace saveStatus saveVar savingEnabled say ' + 'say2D say3D scopeName score scoreSide screenshot screenToWorld scriptDone scriptName scriptNull ' + 'scudState secondaryWeapon secondaryWeaponItems secondaryWeaponMagazine select selectBestPlaces ' + 'selectDiarySubject selectedEditorObjects selectEditorObject selectionNames selectionPosition ' + 'selectLeader selectMax selectMin selectNoPlayer selectPlayer selectRandom selectWeapon ' + 'selectWeaponTurret sendAUMessage sendSimpleCommand sendTask sendTaskResult sendUDPMessage ' + 'serverCommand serverCommandAvailable serverCommandExecutable serverName serverTime set ' + 'set3DENAttribute set3DENAttributes set3DENGrid set3DENIconsVisible set3DENLayer ' + 'set3DENLinesVisible set3DENMissionAttributes set3DENModelsVisible set3DENObjectType ' + 'set3DENSelected setAccTime setAirportSide setAmmo setAmmoCargo setAnimSpeedCoef setAperture ' + 'setApertureNew setArmoryPoints setAttributes setAutonomous setBehaviour setBleedingRemaining ' + 'setCameraInterest setCamShakeDefParams setCamShakeParams setCamUseTi setCaptive setCenterOfMass ' + 'setCollisionLight setCombatMode setCompassOscillation setCuratorCameraAreaCeiling setCuratorCoef ' + 'setCuratorEditingAreaType setCuratorWaypointCost setCurrentChannel setCurrentTask ' + 'setCurrentWaypoint setCustomAimCoef setDamage setDammage setDate setDebriefingText ' + 'setDefaultCamera setDestination setDetailMapBlendPars setDir setDirection setDrawIcon ' + 'setDropInterval setEditorMode setEditorObjectScope setEffectCondition setFace setFaceAnimation ' + 'setFatigue setFlagOwner setFlagSide setFlagTexture setFog setFormation setFormationTask ' + 'setFormDir setFriend setFromEditor setFSMVariable setFuel setFuelCargo setGroupIcon ' + 'setGroupIconParams setGroupIconsSelectable setGroupIconsVisible setGroupId setGroupIdGlobal ' + 'setGroupOwner setGusts setHideBehind setHit setHitIndex setHitPointDamage setHorizonParallaxCoef ' + 'setHUDMovementLevels setIdentity setImportance setLeader setLightAmbient setLightAttenuation ' + 'setLightBrightness setLightColor setLightDayLight setLightFlareMaxDistance setLightFlareSize ' + 'setLightIntensity setLightnings setLightUseFlare setLocalWindParams setMagazineTurretAmmo ' + 'setMarkerAlpha setMarkerAlphaLocal setMarkerBrush setMarkerBrushLocal setMarkerColor ' + 'setMarkerColorLocal setMarkerDir setMarkerDirLocal setMarkerPos setMarkerPosLocal setMarkerShape ' + 'setMarkerShapeLocal setMarkerSize setMarkerSizeLocal setMarkerText setMarkerTextLocal ' + 'setMarkerType setMarkerTypeLocal setMass setMimic setMousePosition setMusicEffect ' + 'setMusicEventHandler setName setNameSound setObjectArguments setObjectMaterial ' + 'setObjectMaterialGlobal setObjectProxy setObjectTexture setObjectTextureGlobal ' + 'setObjectViewDistance setOvercast setOwner setOxygenRemaining setParticleCircle setParticleClass ' + 'setParticleFire setParticleParams setParticleRandom setPilotCameraDirection ' + 'setPilotCameraRotation setPilotCameraTarget setPilotLight setPiPEffect setPitch setPlayable ' + 'setPlayerRespawnTime setPos setPosASL setPosASL2 setPosASLW setPosATL setPosition setPosWorld ' + 'setRadioMsg setRain setRainbow setRandomLip setRank setRectangular setRepairCargo ' + 'setShadowDistance setShotParents setSide setSimpleTaskAlwaysVisible setSimpleTaskCustomData ' + 'setSimpleTaskDescription setSimpleTaskDestination setSimpleTaskTarget setSimpleTaskType ' + 'setSimulWeatherLayers setSize setSkill setSlingLoad setSoundEffect setSpeaker setSpeech ' + 'setSpeedMode setStamina setStaminaScheme setStatValue setSuppression setSystemOfUnits ' + 'setTargetAge setTaskResult setTaskState setTerrainGrid setText setTimeMultiplier setTitleEffect ' + 'setTriggerActivation setTriggerArea setTriggerStatements setTriggerText setTriggerTimeout ' + 'setTriggerType setType setUnconscious setUnitAbility setUnitLoadout setUnitPos setUnitPosWeak ' + 'setUnitRank setUnitRecoilCoefficient setUnitTrait setUnloadInCombat setUserActionText setVariable ' + 'setVectorDir setVectorDirAndUp setVectorUp setVehicleAmmo setVehicleAmmoDef setVehicleArmor ' + 'setVehicleCargo setVehicleId setVehicleLock setVehiclePosition setVehicleTiPars setVehicleVarName ' + 'setVelocity setVelocityTransformation setViewDistance setVisibleIfTreeCollapsed setWaves ' + 'setWaypointBehaviour setWaypointCombatMode setWaypointCompletionRadius setWaypointDescription ' + 'setWaypointForceBehaviour setWaypointFormation setWaypointHousePosition setWaypointLoiterRadius ' + 'setWaypointLoiterType setWaypointName setWaypointPosition setWaypointScript setWaypointSpeed ' + 'setWaypointStatements setWaypointTimeout setWaypointType setWaypointVisible ' + 'setWeaponReloadingTime setWind setWindDir setWindForce setWindStr setWPPos show3DIcons showChat ' + 'showCinemaBorder showCommandingMenu showCompass showCuratorCompass showGPS showHUD showLegend ' + 'showMap shownArtilleryComputer shownChat shownCompass shownCuratorCompass showNewEditorObject ' + 'shownGPS shownHUD shownMap shownPad shownRadio shownScoretable shownUAVFeed shownWarrant ' + 'shownWatch showPad showRadio showScoretable showSubtitles showUAVFeed showWarrant showWatch ' + 'showWaypoint showWaypoints side sideAmbientLife sideChat sideEmpty sideEnemy sideFriendly ' + 'sideLogic sideRadio sideUnknown simpleTasks simulationEnabled simulCloudDensity ' + 'simulCloudOcclusion simulInClouds simulWeatherSync sin size sizeOf skill skillFinal skipTime ' + 'sleep sliderPosition sliderRange sliderSetPosition sliderSetRange sliderSetSpeed sliderSpeed ' + 'slingLoadAssistantShown soldierMagazines someAmmo sort soundVolume spawn speaker speed speedMode ' + 'splitString sqrt squadParams stance startLoadingScreen step stop stopEngineRTD stopped str ' + 'sunOrMoon supportInfo suppressFor surfaceIsWater surfaceNormal surfaceType swimInDepth ' + 'switchableUnits switchAction switchCamera switchGesture switchLight switchMove ' + 'synchronizedObjects synchronizedTriggers synchronizedWaypoints synchronizeObjectsAdd ' + 'synchronizeObjectsRemove synchronizeTrigger synchronizeWaypoint systemChat systemOfUnits tan ' + 'targetKnowledge targetsAggregate targetsQuery taskAlwaysVisible taskChildren taskCompleted ' + 'taskCustomData taskDescription taskDestination taskHint taskMarkerOffset taskNull taskParent ' + 'taskResult taskState taskType teamMember teamMemberNull teamName teams teamSwitch ' + 'teamSwitchEnabled teamType terminate terrainIntersect terrainIntersectASL text textLog ' + 'textLogFormat tg time timeMultiplier titleCut titleFadeOut titleObj titleRsc titleText toArray ' + 'toFixed toLower toString toUpper triggerActivated triggerActivation triggerArea ' + 'triggerAttachedVehicle triggerAttachObject triggerAttachVehicle triggerStatements triggerText ' + 'triggerTimeout triggerTimeoutCurrent triggerType turretLocal turretOwner turretUnit tvAdd tvClear ' + 'tvCollapse tvCount tvCurSel tvData tvDelete tvExpand tvPicture tvSetCurSel tvSetData tvSetPicture ' + 'tvSetPictureColor tvSetPictureColorDisabled tvSetPictureColorSelected tvSetPictureRight ' + 'tvSetPictureRightColor tvSetPictureRightColorDisabled tvSetPictureRightColorSelected tvSetText ' + 'tvSetTooltip tvSetValue tvSort tvSortByValue tvText tvTooltip tvValue type typeName typeOf ' + 'UAVControl uiNamespace uiSleep unassignCurator unassignItem unassignTeam unassignVehicle ' + 'underwater uniform uniformContainer uniformItems uniformMagazines unitAddons unitAimPosition ' + 'unitAimPositionVisual unitBackpack unitIsUAV unitPos unitReady unitRecoilCoefficient units ' + 'unitsBelowHeight unlinkItem unlockAchievement unregisterTask updateDrawIcon updateMenuItem ' + 'updateObjectTree useAISteeringComponent useAudioTimeForMoves vectorAdd vectorCos ' + 'vectorCrossProduct vectorDiff vectorDir vectorDirVisual vectorDistance vectorDistanceSqr ' + 'vectorDotProduct vectorFromTo vectorMagnitude vectorMagnitudeSqr vectorMultiply vectorNormalized ' + 'vectorUp vectorUpVisual vehicle vehicleCargoEnabled vehicleChat vehicleRadio vehicles ' + 'vehicleVarName velocity velocityModelSpace verifySignature vest vestContainer vestItems ' + 'vestMagazines viewDistance visibleCompass visibleGPS visibleMap visiblePosition ' + 'visiblePositionASL visibleScoretable visibleWatch waves waypointAttachedObject ' + 'waypointAttachedVehicle waypointAttachObject waypointAttachVehicle waypointBehaviour ' + 'waypointCombatMode waypointCompletionRadius waypointDescription waypointForceBehaviour ' + 'waypointFormation waypointHousePosition waypointLoiterRadius waypointLoiterType waypointName ' + 'waypointPosition waypoints waypointScript waypointsEnabledUAV waypointShow waypointSpeed ' + 'waypointStatements waypointTimeout waypointTimeoutCurrent waypointType waypointVisible ' + 'weaponAccessories weaponAccessoriesCargo weaponCargo weaponDirection weaponInertia weaponLowered ' + 'weapons weaponsItems weaponsItemsCargo weaponState weaponsTurret weightRTD west WFSideText wind', literal: 'true false nil' }, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.NUMBER_MODE, VARIABLE, FUNCTION, STRINGS, CPP.preprocessor ], illegal: /#/ }; }; /***/ }, /* 313 */ /***/ function(module, exports) { module.exports = function(hljs) { var COMMENT_MODE = hljs.COMMENT('--', '$'); return { case_insensitive: true, illegal: /[<>{}*#]/, contains: [ { beginKeywords: 'begin end start commit rollback savepoint lock alter create drop rename call ' + 'delete do handler insert load replace select truncate update set show pragma grant ' + 'merge describe use explain help declare prepare execute deallocate release ' + 'unlock purge reset change stop analyze cache flush optimize repair kill ' + 'install uninstall checksum restore check backup revoke comment', end: /;/, endsWithParent: true, lexemes: /[\w\.]+/, keywords: { keyword: 'abort abs absolute acc acce accep accept access accessed accessible account acos action activate add ' + 'addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias ' + 'allocate allow alter always analyze ancillary and any anydata anydataset anyschema anytype apply ' + 'archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan ' + 'atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid ' + 'authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile ' + 'before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float ' + 'binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound ' + 'buffer_cache buffer_pool build bulk by byte byteordermark bytes cache caching call calling cancel ' + 'capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base ' + 'char_length character_length characters characterset charindex charset charsetform charsetid check ' + 'checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close ' + 'cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation ' + 'collect colu colum column column_value columns columns_updated comment commit compact compatibility ' + 'compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn ' + 'connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection ' + 'consider consistent constant constraint constraints constructor container content contents context ' + 'contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost ' + 'count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation ' + 'critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user ' + 'cursor curtime customdatum cycle data database databases datafile datafiles datalength date_add ' + 'date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts ' + 'day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate ' + 'declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults ' + 'deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank ' + 'depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor ' + 'deterministic diagnostics difference dimension direct_load directory disable disable_all ' + 'disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div ' + 'do document domain dotnet double downgrade drop dumpfile duplicate duration each edition editionable ' + 'editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt ' + 'end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors ' + 'escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding ' + 'execu execut execute exempt exists exit exp expire explain export export_set extended extent external ' + 'external_1 external_2 externally extract failed failed_login_attempts failover failure far fast ' + 'feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final ' + 'finish first first_value fixed flash_cache flashback floor flush following follows for forall force ' + 'form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ' + 'ftp full function general generated get get_format get_lock getdate getutcdate global global_name ' + 'globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups ' + 'gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex ' + 'hierarchy high high_priority hosts hour http id ident_current ident_incr ident_seed identified ' + 'identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment ' + 'index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile ' + 'initial initialized initially initrans inmemory inner innodb input insert install instance instantiable ' + 'instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat ' + 'is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists ' + 'keep keep_duplicates key keys kill language large last last_day last_insert_id last_value lax lcase ' + 'lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit ' + 'lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate ' + 'locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call ' + 'logoff logon logs long loop low low_priority lower lpad lrtrim ltrim main make_set makedate maketime ' + 'managed management manual map mapping mask master master_pos_wait match matched materialized max ' + 'maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans ' + 'md5 measures median medium member memcompress memory merge microsecond mid migration min minextents ' + 'minimum mining minus minute minvalue missing mod mode model modification modify module monitoring month ' + 'months mount move movement multiset mutex name name_const names nan national native natural nav nchar ' + 'nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile ' + 'nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile ' + 'nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder ' + 'nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck ' + 'noswitch not nothing notice notrim novalidate now nowait nth_value nullif nulls num numb numbe ' + 'nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ' + 'ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old ' + 'on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date ' + 'oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary ' + 'out outer outfile outline output over overflow overriding package pad parallel parallel_enable ' + 'parameters parent parse partial partition partitions pascal passing password password_grace_time ' + 'password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex ' + 'pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc ' + 'performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin ' + 'policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction ' + 'prediction_cost prediction_details prediction_probability prediction_set prepare present preserve ' + 'prior priority private private_sga privileges procedural procedure procedure_analyze processlist ' + 'profiles project prompt protection public publishingservername purge quarter query quick quiesce quota ' + 'quotename radians raise rand range rank raw read reads readsize rebuild record records ' + 'recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh ' + 'regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy ' + 'reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename ' + 'repair repeat replace replicate replication required reset resetlogs resize resource respect restore ' + 'restricted result result_cache resumable resume retention return returning returns reuse reverse revoke ' + 'right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows ' + 'rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll ' + 'sdo_georaster sdo_topo_geometry search sec_to_time second section securefile security seed segment select ' + 'self sequence sequential serializable server servererror session session_user sessions_per_user set ' + 'sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor ' + 'si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin ' + 'size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex ' + 'source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows ' + 'sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone ' + 'standby start starting startup statement static statistics stats_binomial_test stats_crosstab ' + 'stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep ' + 'stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev ' + 'stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate ' + 'subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum ' + 'suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate ' + 'sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime table tables tablespace tan tdo ' + 'template temporary terminated tertiary_weights test than then thread through tier ties time time_format ' + 'time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr ' + 'timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking ' + 'transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate ' + 'try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress ' + 'under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unpivot ' + 'unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert ' + 'url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date ' + 'utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var ' + 'var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray ' + 'verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear ' + 'wellformed when whene whenev wheneve whenever where while whitespace with within without work wrapped ' + 'xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces ' + 'xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek', literal: 'true false null', built_in: 'array bigint binary bit blob boolean char character date dec decimal float int int8 integer interval number ' + 'numeric real record serial serial8 smallint text varchar varying void' }, contains: [ { className: 'string', begin: '\'', end: '\'', contains: [hljs.BACKSLASH_ESCAPE, {begin: '\'\''}] }, { className: 'string', begin: '"', end: '"', contains: [hljs.BACKSLASH_ESCAPE, {begin: '""'}] }, { className: 'string', begin: '`', end: '`', contains: [hljs.BACKSLASH_ESCAPE] }, hljs.C_NUMBER_MODE, hljs.C_BLOCK_COMMENT_MODE, COMMENT_MODE ] }, hljs.C_BLOCK_COMMENT_MODE, COMMENT_MODE ] }; }; /***/ }, /* 314 */ /***/ function(module, exports) { module.exports = function(hljs) { return { contains: [ hljs.HASH_COMMENT_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, { begin: hljs.UNDERSCORE_IDENT_RE, lexemes: hljs.UNDERSCORE_IDENT_RE, keywords: { // Stan's keywords name: 'for in while repeat until if then else', // Stan's probablity distributions (less beta and gamma, as commonly // used for parameter names). So far, _log and _rng variants are not // included symbol: 'bernoulli bernoulli_logit binomial binomial_logit ' + 'beta_binomial hypergeometric categorical categorical_logit ' + 'ordered_logistic neg_binomial neg_binomial_2 ' + 'neg_binomial_2_log poisson poisson_log multinomial normal ' + 'exp_mod_normal skew_normal student_t cauchy double_exponential ' + 'logistic gumbel lognormal chi_square inv_chi_square ' + 'scaled_inv_chi_square exponential inv_gamma weibull frechet ' + 'rayleigh wiener pareto pareto_type_2 von_mises uniform ' + 'multi_normal multi_normal_prec multi_normal_cholesky multi_gp ' + 'multi_gp_cholesky multi_student_t gaussian_dlm_obs dirichlet ' + 'lkj_corr lkj_corr_cholesky wishart inv_wishart', // Stan's data types 'selector-tag': 'int real vector simplex unit_vector ordered positive_ordered ' + 'row_vector matrix cholesky_factor_corr cholesky_factor_cov ' + 'corr_matrix cov_matrix', // Stan's model blocks title: 'functions model data parameters quantities transformed ' + 'generated', literal: 'true false' }, relevance: 0 }, // The below is all taken from the R language definition { // hex value className: 'number', begin: "0[xX][0-9a-fA-F]+[Li]?\\b", relevance: 0 }, { // hex value className: 'number', begin: "0[xX][0-9a-fA-F]+[Li]?\\b", relevance: 0 }, { // explicit integer className: 'number', begin: "\\d+(?:[eE][+\\-]?\\d*)?L\\b", relevance: 0 }, { // number with trailing decimal className: 'number', begin: "\\d+\\.(?!\\d)(?:i\\b)?", relevance: 0 }, { // number className: 'number', begin: "\\d+(?:\\.\\d*)?(?:[eE][+\\-]?\\d*)?i?\\b", relevance: 0 }, { // number with leading decimal className: 'number', begin: "\\.\\d+(?:[eE][+\\-]?\\d*)?i?\\b", relevance: 0 } ] }; }; /***/ }, /* 315 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['do', 'ado'], case_insensitive: true, keywords: 'if else in foreach for forv forva forval forvalu forvalue forvalues by bys bysort xi quietly qui capture about ac ac_7 acprplot acprplot_7 adjust ado adopath adoupdate alpha ameans an ano anov anova anova_estat anova_terms anovadef aorder ap app appe appen append arch arch_dr arch_estat arch_p archlm areg areg_p args arima arima_dr arima_estat arima_p as asmprobit asmprobit_estat asmprobit_lf asmprobit_mfx__dlg asmprobit_p ass asse asser assert avplot avplot_7 avplots avplots_7 bcskew0 bgodfrey binreg bip0_lf biplot bipp_lf bipr_lf bipr_p biprobit bitest bitesti bitowt blogit bmemsize boot bootsamp bootstrap bootstrap_8 boxco_l boxco_p boxcox boxcox_6 boxcox_p bprobit br break brier bro brow brows browse brr brrstat bs bs_7 bsampl_w bsample bsample_7 bsqreg bstat bstat_7 bstat_8 bstrap bstrap_7 ca ca_estat ca_p cabiplot camat canon canon_8 canon_8_p canon_estat canon_p cap caprojection capt captu captur capture cat cc cchart cchart_7 cci cd censobs_table centile cf char chdir checkdlgfiles checkestimationsample checkhlpfiles checksum chelp ci cii cl class classutil clear cli clis clist clo clog clog_lf clog_p clogi clogi_sw clogit clogit_lf clogit_p clogitp clogl_sw cloglog clonevar clslistarray cluster cluster_measures cluster_stop cluster_tree cluster_tree_8 clustermat cmdlog cnr cnre cnreg cnreg_p cnreg_sw cnsreg codebook collaps4 collapse colormult_nb colormult_nw compare compress conf confi confir confirm conren cons const constr constra constrai constrain constraint continue contract copy copyright copysource cor corc corr corr2data corr_anti corr_kmo corr_smc corre correl correla correlat correlate corrgram cou coun count cox cox_p cox_sw coxbase coxhaz coxvar cprplot cprplot_7 crc cret cretu cretur creturn cross cs cscript cscript_log csi ct ct_is ctset ctst_5 ctst_st cttost cumsp cumsp_7 cumul cusum cusum_7 cutil d|0 datasig datasign datasigna datasignat datasignatu datasignatur datasignature datetof db dbeta de dec deco decod decode deff des desc descr descri describ describe destring dfbeta dfgls dfuller di di_g dir dirstats dis discard disp disp_res disp_s displ displa display distinct do doe doed doedi doedit dotplot dotplot_7 dprobit drawnorm drop ds ds_util dstdize duplicates durbina dwstat dydx e|0 ed edi edit egen eivreg emdef en enc enco encod encode eq erase ereg ereg_lf ereg_p ereg_sw ereghet ereghet_glf ereghet_glf_sh ereghet_gp ereghet_ilf ereghet_ilf_sh ereghet_ip eret eretu eretur ereturn err erro error est est_cfexist est_cfname est_clickable est_expand est_hold est_table est_unhold est_unholdok estat estat_default estat_summ estat_vce_only esti estimates etodow etof etomdy ex exi exit expand expandcl fac fact facto factor factor_estat factor_p factor_pca_rotated factor_rotate factormat fcast fcast_compute fcast_graph fdades fdadesc fdadescr fdadescri fdadescrib fdadescribe fdasav fdasave fdause fh_st file open file read file close file filefilter fillin find_hlp_file findfile findit findit_7 fit fl fli flis flist for5_0 form forma format fpredict frac_154 frac_adj frac_chk frac_cox frac_ddp frac_dis frac_dv frac_in frac_mun frac_pp frac_pq frac_pv frac_wgt frac_xo fracgen fracplot fracplot_7 fracpoly fracpred fron_ex fron_hn fron_p fron_tn fron_tn2 frontier ftodate ftoe ftomdy ftowdate g|0 gamhet_glf gamhet_gp gamhet_ilf gamhet_ip gamma gamma_d2 gamma_p gamma_sw gammahet gdi_hexagon gdi_spokes ge gen gene gener genera generat generate genrank genstd genvmean gettoken gl gladder gladder_7 glim_l01 glim_l02 glim_l03 glim_l04 glim_l05 glim_l06 glim_l07 glim_l08 glim_l09 glim_l10 glim_l11 glim_l12 glim_lf glim_mu glim_nw1 glim_nw2 glim_nw3 glim_p glim_v1 glim_v2 glim_v3 glim_v4 glim_v5 glim_v6 glim_v7 glm glm_6 glm_p glm_sw glmpred glo glob globa global glogit glogit_8 glogit_p gmeans gnbre_lf gnbreg gnbreg_5 gnbreg_p gomp_lf gompe_sw gomper_p gompertz gompertzhet gomphet_glf gomphet_glf_sh gomphet_gp gomphet_ilf gomphet_ilf_sh gomphet_ip gphdot gphpen gphprint gprefs gprobi_p gprobit gprobit_8 gr gr7 gr_copy gr_current gr_db gr_describe gr_dir gr_draw gr_draw_replay gr_drop gr_edit gr_editviewopts gr_example gr_example2 gr_export gr_print gr_qscheme gr_query gr_read gr_rename gr_replay gr_save gr_set gr_setscheme gr_table gr_undo gr_use graph graph7 grebar greigen greigen_7 greigen_8 grmeanby grmeanby_7 gs_fileinfo gs_filetype gs_graphinfo gs_stat gsort gwood h|0 hadimvo hareg hausman haver he heck_d2 heckma_p heckman heckp_lf heckpr_p heckprob hel help hereg hetpr_lf hetpr_p hetprob hettest hexdump hilite hist hist_7 histogram hlogit hlu hmeans hotel hotelling hprobit hreg hsearch icd9 icd9_ff icd9p iis impute imtest inbase include inf infi infil infile infix inp inpu input ins insheet insp inspe inspec inspect integ inten intreg intreg_7 intreg_p intrg2_ll intrg_ll intrg_ll2 ipolate iqreg ir irf irf_create irfm iri is_svy is_svysum isid istdize ivprob_1_lf ivprob_lf ivprobit ivprobit_p ivreg ivreg_footnote ivtob_1_lf ivtob_lf ivtobit ivtobit_p jackknife jacknife jknife jknife_6 jknife_8 jkstat joinby kalarma1 kap kap_3 kapmeier kappa kapwgt kdensity kdensity_7 keep ksm ksmirnov ktau kwallis l|0 la lab labe label labelbook ladder levels levelsof leverage lfit lfit_p li lincom line linktest lis list lloghet_glf lloghet_glf_sh lloghet_gp lloghet_ilf lloghet_ilf_sh lloghet_ip llogi_sw llogis_p llogist llogistic llogistichet lnorm_lf lnorm_sw lnorma_p lnormal lnormalhet lnormhet_glf lnormhet_glf_sh lnormhet_gp lnormhet_ilf lnormhet_ilf_sh lnormhet_ip lnskew0 loadingplot loc loca local log logi logis_lf logistic logistic_p logit logit_estat logit_p loglogs logrank loneway lookfor lookup lowess lowess_7 lpredict lrecomp lroc lroc_7 lrtest ls lsens lsens_7 lsens_x lstat ltable ltable_7 ltriang lv lvr2plot lvr2plot_7 m|0 ma mac macr macro makecns man manova manova_estat manova_p manovatest mantel mark markin markout marksample mat mat_capp mat_order mat_put_rr mat_rapp mata mata_clear mata_describe mata_drop mata_matdescribe mata_matsave mata_matuse mata_memory mata_mlib mata_mosave mata_rename mata_which matalabel matcproc matlist matname matr matri matrix matrix_input__dlg matstrik mcc mcci md0_ md1_ md1debug_ md2_ md2debug_ mds mds_estat mds_p mdsconfig mdslong mdsmat mdsshepard mdytoe mdytof me_derd mean means median memory memsize meqparse mer merg merge mfp mfx mhelp mhodds minbound mixed_ll mixed_ll_reparm mkassert mkdir mkmat mkspline ml ml_5 ml_adjs ml_bhhhs ml_c_d ml_check ml_clear ml_cnt ml_debug ml_defd ml_e0 ml_e0_bfgs ml_e0_cycle ml_e0_dfp ml_e0i ml_e1 ml_e1_bfgs ml_e1_bhhh ml_e1_cycle ml_e1_dfp ml_e2 ml_e2_cycle ml_ebfg0 ml_ebfr0 ml_ebfr1 ml_ebh0q ml_ebhh0 ml_ebhr0 ml_ebr0i ml_ecr0i ml_edfp0 ml_edfr0 ml_edfr1 ml_edr0i ml_eds ml_eer0i ml_egr0i ml_elf ml_elf_bfgs ml_elf_bhhh ml_elf_cycle ml_elf_dfp ml_elfi ml_elfs ml_enr0i ml_enrr0 ml_erdu0 ml_erdu0_bfgs ml_erdu0_bhhh ml_erdu0_bhhhq ml_erdu0_cycle ml_erdu0_dfp ml_erdu0_nrbfgs ml_exde ml_footnote ml_geqnr ml_grad0 ml_graph ml_hbhhh ml_hd0 ml_hold ml_init ml_inv ml_log ml_max ml_mlout ml_mlout_8 ml_model ml_nb0 ml_opt ml_p ml_plot ml_query ml_rdgrd ml_repor ml_s_e ml_score ml_searc ml_technique ml_unhold mleval mlf_ mlmatbysum mlmatsum mlog mlogi mlogit mlogit_footnote mlogit_p mlopts mlsum mlvecsum mnl0_ mor more mov move mprobit mprobit_lf mprobit_p mrdu0_ mrdu1_ mvdecode mvencode mvreg mvreg_estat n|0 nbreg nbreg_al nbreg_lf nbreg_p nbreg_sw nestreg net newey newey_7 newey_p news nl nl_7 nl_9 nl_9_p nl_p nl_p_7 nlcom nlcom_p nlexp2 nlexp2_7 nlexp2a nlexp2a_7 nlexp3 nlexp3_7 nlgom3 nlgom3_7 nlgom4 nlgom4_7 nlinit nllog3 nllog3_7 nllog4 nllog4_7 nlog_rd nlogit nlogit_p nlogitgen nlogittree nlpred no nobreak noi nois noisi noisil noisily note notes notes_dlg nptrend numlabel numlist odbc old_ver olo olog ologi ologi_sw ologit ologit_p ologitp on one onew onewa oneway op_colnm op_comp op_diff op_inv op_str opr opro oprob oprob_sw oprobi oprobi_p oprobit oprobitp opts_exclusive order orthog orthpoly ou out outf outfi outfil outfile outs outsh outshe outshee outsheet ovtest pac pac_7 palette parse parse_dissim pause pca pca_8 pca_display pca_estat pca_p pca_rotate pcamat pchart pchart_7 pchi pchi_7 pcorr pctile pentium pergram pergram_7 permute permute_8 personal peto_st pkcollapse pkcross pkequiv pkexamine pkexamine_7 pkshape pksumm pksumm_7 pl plo plot plugin pnorm pnorm_7 poisgof poiss_lf poiss_sw poisso_p poisson poisson_estat post postclose postfile postutil pperron pr prais prais_e prais_e2 prais_p predict predictnl preserve print pro prob probi probit probit_estat probit_p proc_time procoverlay procrustes procrustes_estat procrustes_p profiler prog progr progra program prop proportion prtest prtesti pwcorr pwd q\\s qby qbys qchi qchi_7 qladder qladder_7 qnorm qnorm_7 qqplot qqplot_7 qreg qreg_c qreg_p qreg_sw qu quadchk quantile quantile_7 que quer query range ranksum ratio rchart rchart_7 rcof recast reclink recode reg reg3 reg3_p regdw regr regre regre_p2 regres regres_p regress regress_estat regriv_p remap ren rena renam rename renpfix repeat replace report reshape restore ret retu retur return rm rmdir robvar roccomp roccomp_7 roccomp_8 rocf_lf rocfit rocfit_8 rocgold rocplot rocplot_7 roctab roctab_7 rolling rologit rologit_p rot rota rotat rotate rotatemat rreg rreg_p ru run runtest rvfplot rvfplot_7 rvpplot rvpplot_7 sa safesum sample sampsi sav save savedresults saveold sc sca scal scala scalar scatter scm_mine sco scob_lf scob_p scobi_sw scobit scor score scoreplot scoreplot_help scree screeplot screeplot_help sdtest sdtesti se search separate seperate serrbar serrbar_7 serset set set_defaults sfrancia sh she shel shell shewhart shewhart_7 signestimationsample signrank signtest simul simul_7 simulate simulate_8 sktest sleep slogit slogit_d2 slogit_p smooth snapspan so sor sort spearman spikeplot spikeplot_7 spikeplt spline_x split sqreg sqreg_p sret sretu sretur sreturn ssc st st_ct st_hc st_hcd st_hcd_sh st_is st_issys st_note st_promo st_set st_show st_smpl st_subid stack statsby statsby_8 stbase stci stci_7 stcox stcox_estat stcox_fr stcox_fr_ll stcox_p stcox_sw stcoxkm stcoxkm_7 stcstat stcurv stcurve stcurve_7 stdes stem stepwise stereg stfill stgen stir stjoin stmc stmh stphplot stphplot_7 stphtest stphtest_7 stptime strate strate_7 streg streg_sw streset sts sts_7 stset stsplit stsum sttocc sttoct stvary stweib su suest suest_8 sum summ summa summar summari summariz summarize sunflower sureg survcurv survsum svar svar_p svmat svy svy_disp svy_dreg svy_est svy_est_7 svy_estat svy_get svy_gnbreg_p svy_head svy_header svy_heckman_p svy_heckprob_p svy_intreg_p svy_ivreg_p svy_logistic_p svy_logit_p svy_mlogit_p svy_nbreg_p svy_ologit_p svy_oprobit_p svy_poisson_p svy_probit_p svy_regress_p svy_sub svy_sub_7 svy_x svy_x_7 svy_x_p svydes svydes_8 svygen svygnbreg svyheckman svyheckprob svyintreg svyintreg_7 svyintrg svyivreg svylc svylog_p svylogit svymarkout svymarkout_8 svymean svymlog svymlogit svynbreg svyolog svyologit svyoprob svyoprobit svyopts svypois svypois_7 svypoisson svyprobit svyprobt svyprop svyprop_7 svyratio svyreg svyreg_p svyregress svyset svyset_7 svyset_8 svytab svytab_7 svytest svytotal sw sw_8 swcnreg swcox swereg swilk swlogis swlogit swologit swoprbt swpois swprobit swqreg swtobit swweib symmetry symmi symplot symplot_7 syntax sysdescribe sysdir sysuse szroeter ta tab tab1 tab2 tab_or tabd tabdi tabdis tabdisp tabi table tabodds tabodds_7 tabstat tabu tabul tabula tabulat tabulate te tempfile tempname tempvar tes test testnl testparm teststd tetrachoric time_it timer tis tob tobi tobit tobit_p tobit_sw token tokeni tokeniz tokenize tostring total translate translator transmap treat_ll treatr_p treatreg trim trnb_cons trnb_mean trpoiss_d2 trunc_ll truncr_p truncreg tsappend tset tsfill tsline tsline_ex tsreport tsrevar tsrline tsset tssmooth tsunab ttest ttesti tut_chk tut_wait tutorial tw tware_st two twoway twoway__fpfit_serset twoway__function_gen twoway__histogram_gen twoway__ipoint_serset twoway__ipoints_serset twoway__kdensity_gen twoway__lfit_serset twoway__normgen_gen twoway__pci_serset twoway__qfit_serset twoway__scatteri_serset twoway__sunflower_gen twoway_ksm_serset ty typ type typeof u|0 unab unabbrev unabcmd update us use uselabel var var_mkcompanion var_p varbasic varfcast vargranger varirf varirf_add varirf_cgraph varirf_create varirf_ctable varirf_describe varirf_dir varirf_drop varirf_erase varirf_graph varirf_ograph varirf_rename varirf_set varirf_table varlist varlmar varnorm varsoc varstable varstable_w varstable_w2 varwle vce vec vec_fevd vec_mkphi vec_p vec_p_w vecirf_create veclmar veclmar_w vecnorm vecnorm_w vecrank vecstable verinst vers versi versio version view viewsource vif vwls wdatetof webdescribe webseek webuse weib1_lf weib2_lf weib_lf weib_lf0 weibhet_glf weibhet_glf_sh weibhet_glfa weibhet_glfa_sh weibhet_gp weibhet_ilf weibhet_ilf_sh weibhet_ilfa weibhet_ilfa_sh weibhet_ip weibu_sw weibul_p weibull weibull_c weibull_s weibullhet wh whelp whi which whil while wilc_st wilcoxon win wind windo window winexec wntestb wntestb_7 wntestq xchart xchart_7 xcorr xcorr_7 xi xi_6 xmlsav xmlsave xmluse xpose xsh xshe xshel xshell xt_iis xt_tis xtab_p xtabond xtbin_p xtclog xtcloglog xtcloglog_8 xtcloglog_d2 xtcloglog_pa_p xtcloglog_re_p xtcnt_p xtcorr xtdata xtdes xtfront_p xtfrontier xtgee xtgee_elink xtgee_estat xtgee_makeivar xtgee_p xtgee_plink xtgls xtgls_p xthaus xthausman xtht_p xthtaylor xtile xtint_p xtintreg xtintreg_8 xtintreg_d2 xtintreg_p xtivp_1 xtivp_2 xtivreg xtline xtline_ex xtlogit xtlogit_8 xtlogit_d2 xtlogit_fe_p xtlogit_pa_p xtlogit_re_p xtmixed xtmixed_estat xtmixed_p xtnb_fe xtnb_lf xtnbreg xtnbreg_pa_p xtnbreg_refe_p xtpcse xtpcse_p xtpois xtpoisson xtpoisson_d2 xtpoisson_pa_p xtpoisson_refe_p xtpred xtprobit xtprobit_8 xtprobit_d2 xtprobit_re_p xtps_fe xtps_lf xtps_ren xtps_ren_8 xtrar_p xtrc xtrc_p xtrchh xtrefe_p xtreg xtreg_be xtreg_fe xtreg_ml xtreg_pa_p xtreg_re xtregar xtrere_p xtset xtsf_ll xtsf_llti xtsum xttab xttest0 xttobit xttobit_8 xttobit_p xttrans yx yxview__barlike_draw yxview_area_draw yxview_bar_draw yxview_dot_draw yxview_dropline_draw yxview_function_draw yxview_iarrow_draw yxview_ilabels_draw yxview_normal_draw yxview_pcarrow_draw yxview_pcbarrow_draw yxview_pccapsym_draw yxview_pcscatter_draw yxview_pcspike_draw yxview_rarea_draw yxview_rbar_draw yxview_rbarm_draw yxview_rcap_draw yxview_rcapsym_draw yxview_rconnected_draw yxview_rline_draw yxview_rscatter_draw yxview_rspike_draw yxview_spike_draw yxview_sunflower_draw zap_s zinb zinb_llf zinb_plf zip zip_llf zip_p zip_plf zt_ct_5 zt_hc_5 zt_hcd_5 zt_is_5 zt_iss_5 zt_sho_5 zt_smp_5 ztbase_5 ztcox_5 ztdes_5 ztereg_5 ztfill_5 ztgen_5 ztir_5 ztjoin_5 ztnb ztnb_p ztp ztp_p zts_5 ztset_5 ztspli_5 ztsum_5 zttoct_5 ztvary_5 ztweib_5', contains: [ { className: 'symbol', begin: /`[a-zA-Z0-9_]+'/ }, { className: 'variable', begin: /\$\{?[a-zA-Z0-9_]+\}?/ }, { className: 'string', variants: [ {begin: '`"[^\r\n]*?"\''}, {begin: '"[^\r\n"]*"'} ] }, { className: 'built_in', variants: [ { begin: '\\b(abs|acos|asin|atan|atan2|atanh|ceil|cloglog|comb|cos|digamma|exp|floor|invcloglog|invlogit|ln|lnfact|lnfactorial|lngamma|log|log10|max|min|mod|reldif|round|sign|sin|sqrt|sum|tan|tanh|trigamma|trunc|betaden|Binomial|binorm|binormal|chi2|chi2tail|dgammapda|dgammapdada|dgammapdadx|dgammapdx|dgammapdxdx|F|Fden|Ftail|gammaden|gammap|ibeta|invbinomial|invchi2|invchi2tail|invF|invFtail|invgammap|invibeta|invnchi2|invnFtail|invnibeta|invnorm|invnormal|invttail|nbetaden|nchi2|nFden|nFtail|nibeta|norm|normal|normalden|normd|npnchi2|tden|ttail|uniform|abbrev|char|index|indexnot|length|lower|ltrim|match|plural|proper|real|regexm|regexr|regexs|reverse|rtrim|string|strlen|strlower|strltrim|strmatch|strofreal|strpos|strproper|strreverse|strrtrim|strtrim|strupper|subinstr|subinword|substr|trim|upper|word|wordcount|_caller|autocode|byteorder|chop|clip|cond|e|epsdouble|epsfloat|group|inlist|inrange|irecode|matrix|maxbyte|maxdouble|maxfloat|maxint|maxlong|mi|minbyte|mindouble|minfloat|minint|minlong|missing|r|recode|replay|return|s|scalar|d|date|day|dow|doy|halfyear|mdy|month|quarter|week|year|d|daily|dofd|dofh|dofm|dofq|dofw|dofy|h|halfyearly|hofd|m|mofd|monthly|q|qofd|quarterly|tin|twithin|w|weekly|wofd|y|yearly|yh|ym|yofd|yq|yw|cholesky|colnumb|colsof|corr|det|diag|diag0cnt|el|get|hadamard|I|inv|invsym|issym|issymmetric|J|matmissing|matuniform|mreldif|nullmat|rownumb|rowsof|sweep|syminv|trace|vec|vecdiag)(?=\\(|$)' } ] }, hljs.COMMENT('^[ \t]*\\*.*$', false), hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ] }; }; /***/ }, /* 316 */ /***/ function(module, exports) { module.exports = function(hljs) { var STEP21_IDENT_RE = '[A-Z_][A-Z0-9_.]*'; var STEP21_KEYWORDS = { keyword: 'HEADER ENDSEC DATA' }; var STEP21_START = { className: 'meta', begin: 'ISO-10303-21;', relevance: 10 }; var STEP21_CLOSE = { className: 'meta', begin: 'END-ISO-10303-21;', relevance: 10 }; return { aliases: ['p21', 'step', 'stp'], case_insensitive: true, // STEP 21 is case insensitive in theory, in practice all non-comments are capitalized. lexemes: STEP21_IDENT_RE, keywords: STEP21_KEYWORDS, contains: [ STEP21_START, STEP21_CLOSE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.COMMENT('/\\*\\*!', '\\*/'), hljs.C_NUMBER_MODE, hljs.inherit(hljs.APOS_STRING_MODE, {illegal: null}), hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}), { className: 'string', begin: "'", end: "'" }, { className: 'symbol', variants: [ { begin: '#', end: '\\d+', illegal: '\\W' } ] } ] }; }; /***/ }, /* 317 */ /***/ function(module, exports) { module.exports = function(hljs) { var VARIABLE = { className: 'variable', begin: '\\$' + hljs.IDENT_RE }; var HEX_COLOR = { className: 'number', begin: '#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})' }; var AT_KEYWORDS = [ 'charset', 'css', 'debug', 'extend', 'font-face', 'for', 'import', 'include', 'media', 'mixin', 'page', 'warn', 'while' ]; var PSEUDO_SELECTORS = [ 'after', 'before', 'first-letter', 'first-line', 'active', 'first-child', 'focus', 'hover', 'lang', 'link', 'visited' ]; var TAGS = [ 'a', 'abbr', 'address', 'article', 'aside', 'audio', 'b', 'blockquote', 'body', 'button', 'canvas', 'caption', 'cite', 'code', 'dd', 'del', 'details', 'dfn', 'div', 'dl', 'dt', 'em', 'fieldset', 'figcaption', 'figure', 'footer', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'header', 'hgroup', 'html', 'i', 'iframe', 'img', 'input', 'ins', 'kbd', 'label', 'legend', 'li', 'mark', 'menu', 'nav', 'object', 'ol', 'p', 'q', 'quote', 'samp', 'section', 'span', 'strong', 'summary', 'sup', 'table', 'tbody', 'td', 'textarea', 'tfoot', 'th', 'thead', 'time', 'tr', 'ul', 'var', 'video' ]; var TAG_END = '[\\.\\s\\n\\[\\:,]'; var ATTRIBUTES = [ 'align-content', 'align-items', 'align-self', 'animation', 'animation-delay', 'animation-direction', 'animation-duration', 'animation-fill-mode', 'animation-iteration-count', 'animation-name', 'animation-play-state', 'animation-timing-function', 'auto', 'backface-visibility', 'background', 'background-attachment', 'background-clip', 'background-color', 'background-image', 'background-origin', 'background-position', 'background-repeat', 'background-size', 'border', 'border-bottom', 'border-bottom-color', 'border-bottom-left-radius', 'border-bottom-right-radius', 'border-bottom-style', 'border-bottom-width', 'border-collapse', 'border-color', 'border-image', 'border-image-outset', 'border-image-repeat', 'border-image-slice', 'border-image-source', 'border-image-width', 'border-left', 'border-left-color', 'border-left-style', 'border-left-width', 'border-radius', 'border-right', 'border-right-color', 'border-right-style', 'border-right-width', 'border-spacing', 'border-style', 'border-top', 'border-top-color', 'border-top-left-radius', 'border-top-right-radius', 'border-top-style', 'border-top-width', 'border-width', 'bottom', 'box-decoration-break', 'box-shadow', 'box-sizing', 'break-after', 'break-before', 'break-inside', 'caption-side', 'clear', 'clip', 'clip-path', 'color', 'column-count', 'column-fill', 'column-gap', 'column-rule', 'column-rule-color', 'column-rule-style', 'column-rule-width', 'column-span', 'column-width', 'columns', 'content', 'counter-increment', 'counter-reset', 'cursor', 'direction', 'display', 'empty-cells', 'filter', 'flex', 'flex-basis', 'flex-direction', 'flex-flow', 'flex-grow', 'flex-shrink', 'flex-wrap', 'float', 'font', 'font-family', 'font-feature-settings', 'font-kerning', 'font-language-override', 'font-size', 'font-size-adjust', 'font-stretch', 'font-style', 'font-variant', 'font-variant-ligatures', 'font-weight', 'height', 'hyphens', 'icon', 'image-orientation', 'image-rendering', 'image-resolution', 'ime-mode', 'inherit', 'initial', 'justify-content', 'left', 'letter-spacing', 'line-height', 'list-style', 'list-style-image', 'list-style-position', 'list-style-type', 'margin', 'margin-bottom', 'margin-left', 'margin-right', 'margin-top', 'marks', 'mask', 'max-height', 'max-width', 'min-height', 'min-width', 'nav-down', 'nav-index', 'nav-left', 'nav-right', 'nav-up', 'none', 'normal', 'object-fit', 'object-position', 'opacity', 'order', 'orphans', 'outline', 'outline-color', 'outline-offset', 'outline-style', 'outline-width', 'overflow', 'overflow-wrap', 'overflow-x', 'overflow-y', 'padding', 'padding-bottom', 'padding-left', 'padding-right', 'padding-top', 'page-break-after', 'page-break-before', 'page-break-inside', 'perspective', 'perspective-origin', 'pointer-events', 'position', 'quotes', 'resize', 'right', 'tab-size', 'table-layout', 'text-align', 'text-align-last', 'text-decoration', 'text-decoration-color', 'text-decoration-line', 'text-decoration-style', 'text-indent', 'text-overflow', 'text-rendering', 'text-shadow', 'text-transform', 'text-underline-position', 'top', 'transform', 'transform-origin', 'transform-style', 'transition', 'transition-delay', 'transition-duration', 'transition-property', 'transition-timing-function', 'unicode-bidi', 'vertical-align', 'visibility', 'white-space', 'widows', 'width', 'word-break', 'word-spacing', 'word-wrap', 'z-index' ]; // illegals var ILLEGAL = [ '\\?', '(\\bReturn\\b)', // monkey '(\\bEnd\\b)', // monkey '(\\bend\\b)', // vbscript '(\\bdef\\b)', // gradle ';', // a whole lot of languages '#\\s', // markdown '\\*\\s', // markdown '===\\s', // markdown '\\|', '%', // prolog ]; return { aliases: ['styl'], case_insensitive: false, keywords: 'if else for in', illegal: '(' + ILLEGAL.join('|') + ')', contains: [ // strings hljs.QUOTE_STRING_MODE, hljs.APOS_STRING_MODE, // comments hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, // hex colors HEX_COLOR, // class tag { begin: '\\.[a-zA-Z][a-zA-Z0-9_-]*' + TAG_END, returnBegin: true, contains: [ {className: 'selector-class', begin: '\\.[a-zA-Z][a-zA-Z0-9_-]*'} ] }, // id tag { begin: '\\#[a-zA-Z][a-zA-Z0-9_-]*' + TAG_END, returnBegin: true, contains: [ {className: 'selector-id', begin: '\\#[a-zA-Z][a-zA-Z0-9_-]*'} ] }, // tags { begin: '\\b(' + TAGS.join('|') + ')' + TAG_END, returnBegin: true, contains: [ {className: 'selector-tag', begin: '\\b[a-zA-Z][a-zA-Z0-9_-]*'} ] }, // psuedo selectors { begin: '&?:?:\\b(' + PSEUDO_SELECTORS.join('|') + ')' + TAG_END }, // @ keywords { begin: '\@(' + AT_KEYWORDS.join('|') + ')\\b' }, // variables VARIABLE, // dimension hljs.CSS_NUMBER_MODE, // number hljs.NUMBER_MODE, // functions // - only from beginning of line + whitespace { className: 'function', begin: '^[a-zA-Z][a-zA-Z0-9_\-]*\\(.*\\)', illegal: '[\\n]', returnBegin: true, contains: [ {className: 'title', begin: '\\b[a-zA-Z][a-zA-Z0-9_\-]*'}, { className: 'params', begin: /\(/, end: /\)/, contains: [ HEX_COLOR, VARIABLE, hljs.APOS_STRING_MODE, hljs.CSS_NUMBER_MODE, hljs.NUMBER_MODE, hljs.QUOTE_STRING_MODE ] } ] }, // attributes // - only from beginning of line + whitespace // - must have whitespace after it { className: 'attribute', begin: '\\b(' + ATTRIBUTES.reverse().join('|') + ')\\b', starts: { // value container end: /;|$/, contains: [ HEX_COLOR, VARIABLE, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.CSS_NUMBER_MODE, hljs.NUMBER_MODE, hljs.C_BLOCK_COMMENT_MODE ], illegal: /\./, relevance: 0 } } ] }; }; /***/ }, /* 318 */ /***/ function(module, exports) { module.exports = function(hljs) { var DETAILS = { className: 'string', begin: '\\[\n(multipart)?', end: '\\]\n' }; var TIME = { className: 'string', begin: '\\d{4}-\\d{2}-\\d{2}(\\s+)\\d{2}:\\d{2}:\\d{2}\.\\d+Z' }; var PROGRESSVALUE = { className: 'string', begin: '(\\+|-)\\d+' }; var KEYWORDS = { className: 'keyword', relevance: 10, variants: [ { begin: '^(test|testing|success|successful|failure|error|skip|xfail|uxsuccess)(:?)\\s+(test)?' }, { begin: '^progress(:?)(\\s+)?(pop|push)?' }, { begin: '^tags:' }, { begin: '^time:' } ], }; return { case_insensitive: true, contains: [ DETAILS, TIME, PROGRESSVALUE, KEYWORDS ] }; }; /***/ }, /* 319 */ /***/ function(module, exports) { module.exports = function(hljs) { var SWIFT_KEYWORDS = { keyword: '__COLUMN__ __FILE__ __FUNCTION__ __LINE__ as as! as? associativity ' + 'break case catch class continue convenience default defer deinit didSet do ' + 'dynamic dynamicType else enum extension fallthrough false final for func ' + 'get guard if import in indirect infix init inout internal is lazy left let ' + 'mutating nil none nonmutating operator optional override postfix precedence ' + 'prefix private protocol Protocol public repeat required rethrows return ' + 'right self Self set static struct subscript super switch throw throws true ' + 'try try! try? Type typealias unowned var weak where while willSet', literal: 'true false nil', built_in: 'abs advance alignof alignofValue anyGenerator assert assertionFailure ' + 'bridgeFromObjectiveC bridgeFromObjectiveCUnconditional bridgeToObjectiveC ' + 'bridgeToObjectiveCUnconditional c contains count countElements countLeadingZeros ' + 'debugPrint debugPrintln distance dropFirst dropLast dump encodeBitsAsWords ' + 'enumerate equal fatalError filter find getBridgedObjectiveCType getVaList ' + 'indices insertionSort isBridgedToObjectiveC isBridgedVerbatimToObjectiveC ' + 'isUniquelyReferenced isUniquelyReferencedNonObjC join lazy lexicographicalCompare ' + 'map max maxElement min minElement numericCast overlaps partition posix ' + 'precondition preconditionFailure print println quickSort readLine reduce reflect ' + 'reinterpretCast reverse roundUpToAlignment sizeof sizeofValue sort split ' + 'startsWith stride strideof strideofValue swap toString transcode ' + 'underestimateCount unsafeAddressOf unsafeBitCast unsafeDowncast unsafeUnwrap ' + 'unsafeReflect withExtendedLifetime withObjectAtPlusZero withUnsafePointer ' + 'withUnsafePointerToObject withUnsafeMutablePointer withUnsafeMutablePointers ' + 'withUnsafePointer withUnsafePointers withVaList zip' }; var TYPE = { className: 'type', begin: '\\b[A-Z][\\w\u00C0-\u02B8\']*', relevance: 0 }; var BLOCK_COMMENT = hljs.COMMENT( '/\\*', '\\*/', { contains: ['self'] } ); var SUBST = { className: 'subst', begin: /\\\(/, end: '\\)', keywords: SWIFT_KEYWORDS, contains: [] // assigned later }; var NUMBERS = { className: 'number', begin: '\\b([\\d_]+(\\.[\\deE_]+)?|0x[a-fA-F0-9_]+(\\.[a-fA-F0-9p_]+)?|0b[01_]+|0o[0-7_]+)\\b', relevance: 0 }; var QUOTE_STRING_MODE = hljs.inherit(hljs.QUOTE_STRING_MODE, { contains: [SUBST, hljs.BACKSLASH_ESCAPE] }); SUBST.contains = [NUMBERS]; return { keywords: SWIFT_KEYWORDS, contains: [ QUOTE_STRING_MODE, hljs.C_LINE_COMMENT_MODE, BLOCK_COMMENT, TYPE, NUMBERS, { className: 'function', beginKeywords: 'func', end: '{', excludeEnd: true, contains: [ hljs.inherit(hljs.TITLE_MODE, { begin: /[A-Za-z$_][0-9A-Za-z$_]*/ }), { begin: /</, end: />/ }, { className: 'params', begin: /\(/, end: /\)/, endsParent: true, keywords: SWIFT_KEYWORDS, contains: [ 'self', NUMBERS, QUOTE_STRING_MODE, hljs.C_BLOCK_COMMENT_MODE, {begin: ':'} // relevance booster ], illegal: /["']/ } ], illegal: /\[|%/ }, { className: 'class', beginKeywords: 'struct protocol class extension enum', keywords: SWIFT_KEYWORDS, end: '\\{', excludeEnd: true, contains: [ hljs.inherit(hljs.TITLE_MODE, {begin: /[A-Za-z$_][\u00C0-\u02B80-9A-Za-z$_]*/}) ] }, { className: 'meta', // @attributes begin: '(@warn_unused_result|@exported|@lazy|@noescape|' + '@NSCopying|@NSManaged|@objc|@convention|@required|' + '@noreturn|@IBAction|@IBDesignable|@IBInspectable|@IBOutlet|' + '@infix|@prefix|@postfix|@autoclosure|@testable|@available|' + '@nonobjc|@NSApplicationMain|@UIApplicationMain)' }, { beginKeywords: 'import', end: /$/, contains: [hljs.C_LINE_COMMENT_MODE, BLOCK_COMMENT] } ] }; }; /***/ }, /* 320 */ /***/ function(module, exports) { module.exports = function(hljs) { var COMMENT = { className: 'comment', begin: /\$noop\(/, end: /\)/, contains: [{ begin: /\(/, end: /\)/, contains: ['self', { begin: /\\./ }] }], relevance: 10 }; var FUNCTION = { className: 'keyword', begin: /\$(?!noop)[a-zA-Z][_a-zA-Z0-9]*/, end: /\(/, excludeEnd: true }; var VARIABLE = { className: 'variable', begin: /%[_a-zA-Z0-9:]*/, end: '%' }; var ESCAPE_SEQUENCE = { className: 'symbol', begin: /\\./ }; return { contains: [ COMMENT, FUNCTION, VARIABLE, ESCAPE_SEQUENCE ] }; }; /***/ }, /* 321 */ /***/ function(module, exports) { module.exports = function(hljs) { var LITERALS = {literal: '{ } true false yes no Yes No True False null'}; var keyPrefix = '^[ \\-]*'; var keyName = '[a-zA-Z_][\\w\\-]*'; var KEY = { className: 'attr', variants: [ { begin: keyPrefix + keyName + ":"}, { begin: keyPrefix + '"' + keyName + '"' + ":"}, { begin: keyPrefix + "'" + keyName + "'" + ":"} ] }; var TEMPLATE_VARIABLES = { className: 'template-variable', variants: [ { begin: '\{\{', end: '\}\}' }, // jinja templates Ansible { begin: '%\{', end: '\}' } // Ruby i18n ] }; var STRING = { className: 'string', relevance: 0, variants: [ {begin: /'/, end: /'/}, {begin: /"/, end: /"/} ], contains: [ hljs.BACKSLASH_ESCAPE, TEMPLATE_VARIABLES ] }; return { case_insensitive: true, aliases: ['yml', 'YAML', 'yaml'], contains: [ KEY, { className: 'meta', begin: '^---\s*$', relevance: 10 }, { // multi line string className: 'string', begin: '[\\|>] *$', returnEnd: true, contains: STRING.contains, // very simple termination: next hash key end: KEY.variants[0].begin }, { // Ruby/Rails erb begin: '<%[%=-]?', end: '[%-]?%>', subLanguage: 'ruby', excludeBegin: true, excludeEnd: true, relevance: 0 }, { // data type className: 'type', begin: '!!' + hljs.UNDERSCORE_IDENT_RE, }, { // fragment id &ref className: 'meta', begin: '&' + hljs.UNDERSCORE_IDENT_RE + '$', }, { // fragment reference *ref className: 'meta', begin: '\\*' + hljs.UNDERSCORE_IDENT_RE + '$' }, { // array listing className: 'bullet', begin: '^ *-', relevance: 0 }, STRING, hljs.HASH_COMMENT_MODE, hljs.C_NUMBER_MODE ], keywords: LITERALS }; }; /***/ }, /* 322 */ /***/ function(module, exports) { module.exports = function(hljs) { return { case_insensitive: true, contains: [ hljs.HASH_COMMENT_MODE, // version of format and total amount of testcases { className: 'meta', variants: [ { begin: '^TAP version (\\d+)$' }, { begin: '^1\\.\\.(\\d+)$' } ], }, // YAML block { begin: '(\s+)?---$', end: '\\.\\.\\.$', subLanguage: 'yaml', relevance: 0 }, // testcase number { className: 'number', begin: ' (\\d+) ' }, // testcase status and description { className: 'symbol', variants: [ { begin: '^ok' }, { begin: '^not ok' } ], }, ] }; }; /***/ }, /* 323 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['tk'], keywords: 'after append apply array auto_execok auto_import auto_load auto_mkindex ' + 'auto_mkindex_old auto_qualify auto_reset bgerror binary break catch cd chan clock ' + 'close concat continue dde dict encoding eof error eval exec exit expr fblocked ' + 'fconfigure fcopy file fileevent filename flush for foreach format gets glob global ' + 'history http if incr info interp join lappend|10 lassign|10 lindex|10 linsert|10 list ' + 'llength|10 load lrange|10 lrepeat|10 lreplace|10 lreverse|10 lsearch|10 lset|10 lsort|10 '+ 'mathfunc mathop memory msgcat namespace open package parray pid pkg::create pkg_mkIndex '+ 'platform platform::shell proc puts pwd read refchan regexp registry regsub|10 rename '+ 'return safe scan seek set socket source split string subst switch tcl_endOfWord '+ 'tcl_findLibrary tcl_startOfNextWord tcl_startOfPreviousWord tcl_wordBreakAfter '+ 'tcl_wordBreakBefore tcltest tclvars tell time tm trace unknown unload unset update '+ 'uplevel upvar variable vwait while', contains: [ hljs.COMMENT(';[ \\t]*#', '$'), hljs.COMMENT('^[ \\t]*#', '$'), { beginKeywords: 'proc', end: '[\\{]', excludeEnd: true, contains: [ { className: 'title', begin: '[ \\t\\n\\r]+(::)?[a-zA-Z_]((::)?[a-zA-Z0-9_])*', end: '[ \\t\\n\\r]', endsWithParent: true, excludeEnd: true } ] }, { excludeEnd: true, variants: [ { begin: '\\$(\\{)?(::)?[a-zA-Z_]((::)?[a-zA-Z0-9_])*\\(([a-zA-Z0-9_])*\\)', end: '[^a-zA-Z0-9_\\}\\$]' }, { begin: '\\$(\\{)?(::)?[a-zA-Z_]((::)?[a-zA-Z0-9_])*', end: '(\\))?[^a-zA-Z0-9_\\}\\$]' } ] }, { className: 'string', contains: [hljs.BACKSLASH_ESCAPE], variants: [ hljs.inherit(hljs.APOS_STRING_MODE, {illegal: null}), hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}) ] }, { className: 'number', variants: [hljs.BINARY_NUMBER_MODE, hljs.C_NUMBER_MODE] } ] } }; /***/ }, /* 324 */ /***/ function(module, exports) { module.exports = function(hljs) { var COMMAND = { className: 'tag', begin: /\\/, relevance: 0, contains: [ { className: 'name', variants: [ {begin: /[a-zA-Zа-яА-я]+[*]?/}, {begin: /[^a-zA-Zа-яА-я0-9]/} ], starts: { endsWithParent: true, relevance: 0, contains: [ { className: 'string', // because it looks like attributes in HTML tags variants: [ {begin: /\[/, end: /\]/}, {begin: /\{/, end: /\}/} ] }, { begin: /\s*=\s*/, endsWithParent: true, relevance: 0, contains: [ { className: 'number', begin: /-?\d*\.?\d+(pt|pc|mm|cm|in|dd|cc|ex|em)?/ } ] } ] } } ] }; return { contains: [ COMMAND, { className: 'formula', contains: [COMMAND], relevance: 0, variants: [ {begin: /\$\$/, end: /\$\$/}, {begin: /\$/, end: /\$/} ] }, hljs.COMMENT( '%', '$', { relevance: 0 } ) ] }; }; /***/ }, /* 325 */ /***/ function(module, exports) { module.exports = function(hljs) { var BUILT_IN_TYPES = 'bool byte i16 i32 i64 double string binary'; return { keywords: { keyword: 'namespace const typedef struct enum service exception void oneway set list map required optional', built_in: BUILT_IN_TYPES, literal: 'true false' }, contains: [ hljs.QUOTE_STRING_MODE, hljs.NUMBER_MODE, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: 'class', beginKeywords: 'struct enum service exception', end: /\{/, illegal: /\n/, contains: [ hljs.inherit(hljs.TITLE_MODE, { starts: {endsWithParent: true, excludeEnd: true} // hack: eating everything after the first title }) ] }, { begin: '\\b(set|list|map)\\s*<', end: '>', keywords: BUILT_IN_TYPES, contains: ['self'] } ] }; }; /***/ }, /* 326 */ /***/ function(module, exports) { module.exports = function(hljs) { var TPID = { className: 'number', begin: '[1-9][0-9]*', /* no leading zeros */ relevance: 0 }; var TPLABEL = { className: 'symbol', begin: ':[^\\]]+' }; var TPDATA = { className: 'built_in', begin: '(AR|P|PAYLOAD|PR|R|SR|RSR|LBL|VR|UALM|MESSAGE|UTOOL|UFRAME|TIMER|\ TIMER_OVERFLOW|JOINT_MAX_SPEED|RESUME_PROG|DIAG_REC)\\[', end: '\\]', contains: [ 'self', TPID, TPLABEL ] }; var TPIO = { className: 'built_in', begin: '(AI|AO|DI|DO|F|RI|RO|UI|UO|GI|GO|SI|SO)\\[', end: '\\]', contains: [ 'self', TPID, hljs.QUOTE_STRING_MODE, /* for pos section at bottom */ TPLABEL ] }; return { keywords: { keyword: 'ABORT ACC ADJUST AND AP_LD BREAK CALL CNT COL CONDITION CONFIG DA DB ' + 'DIV DETECT ELSE END ENDFOR ERR_NUM ERROR_PROG FINE FOR GP GUARD INC ' + 'IF JMP LINEAR_MAX_SPEED LOCK MOD MONITOR OFFSET Offset OR OVERRIDE ' + 'PAUSE PREG PTH RT_LD RUN SELECT SKIP Skip TA TB TO TOOL_OFFSET ' + 'Tool_Offset UF UT UFRAME_NUM UTOOL_NUM UNLOCK WAIT X Y Z W P R STRLEN ' + 'SUBSTR FINDSTR VOFFSET PROG ATTR MN POS', literal: 'ON OFF max_speed LPOS JPOS ENABLE DISABLE START STOP RESET' }, contains: [ TPDATA, TPIO, { className: 'keyword', begin: '/(PROG|ATTR|MN|POS|END)\\b' }, { /* this is for cases like ,CALL */ className: 'keyword', begin: '(CALL|RUN|POINT_LOGIC|LBL)\\b' }, { /* this is for cases like CNT100 where the default lexemes do not * separate the keyword and the number */ className: 'keyword', begin: '\\b(ACC|CNT|Skip|Offset|PSPD|RT_LD|AP_LD|Tool_Offset)' }, { /* to catch numbers that do not have a word boundary on the left */ className: 'number', begin: '\\d+(sec|msec|mm/sec|cm/min|inch/min|deg/sec|mm|in|cm)?\\b', relevance: 0 }, hljs.COMMENT('//', '[;$]'), hljs.COMMENT('!', '[;$]'), hljs.COMMENT('--eg:', '$'), hljs.QUOTE_STRING_MODE, { className: 'string', begin: '\'', end: '\'' }, hljs.C_NUMBER_MODE, { className: 'variable', begin: '\\$[A-Za-z0-9_]+' } ] }; }; /***/ }, /* 327 */ /***/ function(module, exports) { module.exports = function(hljs) { var PARAMS = { className: 'params', begin: '\\(', end: '\\)' }; var FUNCTION_NAMES = 'attribute block constant cycle date dump include ' + 'max min parent random range source template_from_string'; var FUNCTIONS = { beginKeywords: FUNCTION_NAMES, keywords: {name: FUNCTION_NAMES}, relevance: 0, contains: [ PARAMS ] }; var FILTER = { begin: /\|[A-Za-z_]+:?/, keywords: 'abs batch capitalize convert_encoding date date_modify default ' + 'escape first format join json_encode keys last length lower ' + 'merge nl2br number_format raw replace reverse round slice sort split ' + 'striptags title trim upper url_encode', contains: [ FUNCTIONS ] }; var TAGS = 'autoescape block do embed extends filter flush for ' + 'if import include macro sandbox set spaceless use verbatim'; TAGS = TAGS + ' ' + TAGS.split(' ').map(function(t){return 'end' + t}).join(' '); return { aliases: ['craftcms'], case_insensitive: true, subLanguage: 'xml', contains: [ hljs.COMMENT(/\{#/, /#}/), { className: 'template-tag', begin: /\{%/, end: /%}/, contains: [ { className: 'name', begin: /\w+/, keywords: TAGS, starts: { endsWithParent: true, contains: [FILTER, FUNCTIONS], relevance: 0 } } ] }, { className: 'template-variable', begin: /\{\{/, end: /}}/, contains: ['self', FILTER, FUNCTIONS] } ] }; }; /***/ }, /* 328 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = { keyword: 'in if for while finally var new function do return void else break catch ' + 'instanceof with throw case default try this switch continue typeof delete ' + 'let yield const class public private protected get set super ' + 'static implements enum export import declare type namespace abstract', literal: 'true false null undefined NaN Infinity', built_in: 'eval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent ' + 'encodeURI encodeURIComponent escape unescape Object Function Boolean Error ' + 'EvalError InternalError RangeError ReferenceError StopIteration SyntaxError ' + 'TypeError URIError Number Math Date String RegExp Array Float32Array ' + 'Float64Array Int16Array Int32Array Int8Array Uint16Array Uint32Array ' + 'Uint8Array Uint8ClampedArray ArrayBuffer DataView JSON Intl arguments require ' + 'module console window document any number boolean string void' }; return { aliases: ['ts'], keywords: KEYWORDS, contains: [ { className: 'meta', begin: /^\s*['"]use strict['"]/ }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, { // template string className: 'string', begin: '`', end: '`', contains: [ hljs.BACKSLASH_ESCAPE, { className: 'subst', begin: '\\$\\{', end: '\\}' } ] }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: 'number', variants: [ { begin: '\\b(0[bB][01]+)' }, { begin: '\\b(0[oO][0-7]+)' }, { begin: hljs.C_NUMBER_RE } ], relevance: 0 }, { // "value" container begin: '(' + hljs.RE_STARTERS_RE + '|\\b(case|return|throw)\\b)\\s*', keywords: 'return throw case', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, hljs.REGEXP_MODE ], relevance: 0 }, { className: 'function', begin: 'function', end: /[\{;]/, excludeEnd: true, keywords: KEYWORDS, contains: [ 'self', hljs.inherit(hljs.TITLE_MODE, {begin: /[A-Za-z$_][0-9A-Za-z$_]*/}), { className: 'params', begin: /\(/, end: /\)/, excludeBegin: true, excludeEnd: true, keywords: KEYWORDS, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ], illegal: /["'\(]/ } ], illegal: /%/, relevance: 0 // () => {} is more typical in TypeScript }, { beginKeywords: 'constructor', end: /\{/, excludeEnd: true, contains: [ 'self', { className: 'params', begin: /\(/, end: /\)/, excludeBegin: true, excludeEnd: true, keywords: KEYWORDS, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE ], illegal: /["'\(]/ } ] }, { // prevent references like module.id from being higlighted as module definitions begin: /module\./, keywords: {built_in: 'module'}, relevance: 0 }, { beginKeywords: 'module', end: /\{/, excludeEnd: true }, { beginKeywords: 'interface', end: /\{/, excludeEnd: true, keywords: 'interface extends' }, { begin: /\$[(.]/ // relevance booster for a pattern common to JS libs: `$(something)` and `$.something` }, { begin: '\\.' + hljs.IDENT_RE, relevance: 0 // hack: prevents detection of keywords after dots }, { className: 'meta', begin: '@[A-Za-z]+' } ] }; }; /***/ }, /* 329 */ /***/ function(module, exports) { module.exports = function(hljs) { return { keywords: { keyword: // Value types 'char uchar unichar int uint long ulong short ushort int8 int16 int32 int64 uint8 ' + 'uint16 uint32 uint64 float double bool struct enum string void ' + // Reference types 'weak unowned owned ' + // Modifiers 'async signal static abstract interface override virtual delegate ' + // Control Structures 'if while do for foreach else switch case break default return try catch ' + // Visibility 'public private protected internal ' + // Other 'using new this get set const stdout stdin stderr var', built_in: 'DBus GLib CCode Gee Object Gtk Posix', literal: 'false true null' }, contains: [ { className: 'class', beginKeywords: 'class interface namespace', end: '{', excludeEnd: true, illegal: '[^,:\\n\\s\\.]', contains: [ hljs.UNDERSCORE_TITLE_MODE ] }, hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, { className: 'string', begin: '"""', end: '"""', relevance: 5 }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE, hljs.C_NUMBER_MODE, { className: 'meta', begin: '^#', end: '$', relevance: 2 } ] }; }; /***/ }, /* 330 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['vb'], case_insensitive: true, keywords: { keyword: 'addhandler addressof alias and andalso aggregate ansi as assembly auto binary by byref byval ' + /* a-b */ 'call case catch class compare const continue custom declare default delegate dim distinct do ' + /* c-d */ 'each equals else elseif end enum erase error event exit explicit finally for friend from function ' + /* e-f */ 'get global goto group handles if implements imports in inherits interface into is isfalse isnot istrue ' + /* g-i */ 'join key let lib like loop me mid mod module mustinherit mustoverride mybase myclass ' + /* j-m */ 'namespace narrowing new next not notinheritable notoverridable ' + /* n */ 'of off on operator option optional or order orelse overloads overridable overrides ' + /* o */ 'paramarray partial preserve private property protected public ' + /* p */ 'raiseevent readonly redim rem removehandler resume return ' + /* r */ 'select set shadows shared skip static step stop structure strict sub synclock ' + /* s */ 'take text then throw to try unicode until using when where while widening with withevents writeonly xor', /* t-x */ built_in: 'boolean byte cbool cbyte cchar cdate cdec cdbl char cint clng cobj csbyte cshort csng cstr ctype ' + /* b-c */ 'date decimal directcast double gettype getxmlnamespace iif integer long object ' + /* d-o */ 'sbyte short single string trycast typeof uinteger ulong ushort', /* s-u */ literal: 'true false nothing' }, illegal: '//|{|}|endif|gosub|variant|wend', /* reserved deprecated keywords */ contains: [ hljs.inherit(hljs.QUOTE_STRING_MODE, {contains: [{begin: '""'}]}), hljs.COMMENT( '\'', '$', { returnBegin: true, contains: [ { className: 'doctag', begin: '\'\'\'|<!--|-->', contains: [hljs.PHRASAL_WORDS_MODE] }, { className: 'doctag', begin: '</?', end: '>', contains: [hljs.PHRASAL_WORDS_MODE] } ] } ), hljs.C_NUMBER_MODE, { className: 'meta', begin: '#', end: '$', keywords: {'meta-keyword': 'if else elseif end region externalsource'} } ] }; }; /***/ }, /* 331 */ /***/ function(module, exports) { module.exports = function(hljs) { return { aliases: ['vbs'], case_insensitive: true, keywords: { keyword: 'call class const dim do loop erase execute executeglobal exit for each next function ' + 'if then else on error option explicit new private property let get public randomize ' + 'redim rem select case set stop sub while wend with end to elseif is or xor and not ' + 'class_initialize class_terminate default preserve in me byval byref step resume goto', built_in: 'lcase month vartype instrrev ubound setlocale getobject rgb getref string ' + 'weekdayname rnd dateadd monthname now day minute isarray cbool round formatcurrency ' + 'conversions csng timevalue second year space abs clng timeserial fixs len asc ' + 'isempty maths dateserial atn timer isobject filter weekday datevalue ccur isdate ' + 'instr datediff formatdatetime replace isnull right sgn array snumeric log cdbl hex ' + 'chr lbound msgbox ucase getlocale cos cdate cbyte rtrim join hour oct typename trim ' + 'strcomp int createobject loadpicture tan formatnumber mid scriptenginebuildversion ' + 'scriptengine split scriptengineminorversion cint sin datepart ltrim sqr ' + 'scriptenginemajorversion time derived eval date formatpercent exp inputbox left ascw ' + 'chrw regexp server response request cstr err', literal: 'true false null nothing empty' }, illegal: '//', contains: [ hljs.inherit(hljs.QUOTE_STRING_MODE, {contains: [{begin: '""'}]}), hljs.COMMENT( /'/, /$/, { relevance: 0 } ), hljs.C_NUMBER_MODE ] }; }; /***/ }, /* 332 */ /***/ function(module, exports) { module.exports = function(hljs) { return { subLanguage: 'xml', contains: [ { begin: '<%', end: '%>', subLanguage: 'vbscript' } ] }; }; /***/ }, /* 333 */ /***/ function(module, exports) { module.exports = function(hljs) { var SV_KEYWORDS = { keyword: 'accept_on alias always always_comb always_ff always_latch and assert assign ' + 'assume automatic before begin bind bins binsof bit break buf|0 bufif0 bufif1 ' + 'byte case casex casez cell chandle checker class clocking cmos config const ' + 'constraint context continue cover covergroup coverpoint cross deassign default ' + 'defparam design disable dist do edge else end endcase endchecker endclass ' + 'endclocking endconfig endfunction endgenerate endgroup endinterface endmodule ' + 'endpackage endprimitive endprogram endproperty endspecify endsequence endtable ' + 'endtask enum event eventually expect export extends extern final first_match for ' + 'force foreach forever fork forkjoin function generate|5 genvar global highz0 highz1 ' + 'if iff ifnone ignore_bins illegal_bins implements implies import incdir include ' + 'initial inout input inside instance int integer interconnect interface intersect ' + 'join join_any join_none large let liblist library local localparam logic longint ' + 'macromodule matches medium modport module nand negedge nettype new nexttime nmos ' + 'nor noshowcancelled not notif0 notif1 or output package packed parameter pmos ' + 'posedge primitive priority program property protected pull0 pull1 pulldown pullup ' + 'pulsestyle_ondetect pulsestyle_onevent pure rand randc randcase randsequence rcmos ' + 'real realtime ref reg reject_on release repeat restrict return rnmos rpmos rtran ' + 'rtranif0 rtranif1 s_always s_eventually s_nexttime s_until s_until_with scalared ' + 'sequence shortint shortreal showcancelled signed small soft solve specify specparam ' + 'static string strong strong0 strong1 struct super supply0 supply1 sync_accept_on ' + 'sync_reject_on table tagged task this throughout time timeprecision timeunit tran ' + 'tranif0 tranif1 tri tri0 tri1 triand trior trireg type typedef union unique unique0 ' + 'unsigned until until_with untyped use uwire var vectored virtual void wait wait_order ' + 'wand weak weak0 weak1 while wildcard wire with within wor xnor xor', literal: 'null', built_in: '$finish $stop $exit $fatal $error $warning $info $realtime $time $printtimescale ' + '$bitstoreal $bitstoshortreal $itor $signed $cast $bits $stime $timeformat ' + '$realtobits $shortrealtobits $rtoi $unsigned $asserton $assertkill $assertpasson ' + '$assertfailon $assertnonvacuouson $assertoff $assertcontrol $assertpassoff ' + '$assertfailoff $assertvacuousoff $isunbounded $sampled $fell $changed $past_gclk ' + '$fell_gclk $changed_gclk $rising_gclk $steady_gclk $coverage_control ' + '$coverage_get $coverage_save $set_coverage_db_name $rose $stable $past ' + '$rose_gclk $stable_gclk $future_gclk $falling_gclk $changing_gclk $display ' + '$coverage_get_max $coverage_merge $get_coverage $load_coverage_db $typename ' + '$unpacked_dimensions $left $low $increment $clog2 $ln $log10 $exp $sqrt $pow ' + '$floor $ceil $sin $cos $tan $countbits $onehot $isunknown $fatal $warning ' + '$dimensions $right $high $size $asin $acos $atan $atan2 $hypot $sinh $cosh ' + '$tanh $asinh $acosh $atanh $countones $onehot0 $error $info $random ' + '$dist_chi_square $dist_erlang $dist_exponential $dist_normal $dist_poisson ' + '$dist_t $dist_uniform $q_initialize $q_remove $q_exam $async$and$array ' + '$async$nand$array $async$or$array $async$nor$array $sync$and$array ' + '$sync$nand$array $sync$or$array $sync$nor$array $q_add $q_full $psprintf ' + '$async$and$plane $async$nand$plane $async$or$plane $async$nor$plane ' + '$sync$and$plane $sync$nand$plane $sync$or$plane $sync$nor$plane $system ' + '$display $displayb $displayh $displayo $strobe $strobeb $strobeh $strobeo ' + '$write $readmemb $readmemh $writememh $value$plusargs ' + '$dumpvars $dumpon $dumplimit $dumpports $dumpportson $dumpportslimit ' + '$writeb $writeh $writeo $monitor $monitorb $monitorh $monitoro $writememb ' + '$dumpfile $dumpoff $dumpall $dumpflush $dumpportsoff $dumpportsall ' + '$dumpportsflush $fclose $fdisplay $fdisplayb $fdisplayh $fdisplayo ' + '$fstrobe $fstrobeb $fstrobeh $fstrobeo $swrite $swriteb $swriteh ' + '$swriteo $fscanf $fread $fseek $fflush $feof $fopen $fwrite $fwriteb ' + '$fwriteh $fwriteo $fmonitor $fmonitorb $fmonitorh $fmonitoro $sformat ' + '$sformatf $fgetc $ungetc $fgets $sscanf $rewind $ftell $ferror' }; return { aliases: ['v', 'sv', 'svh'], case_insensitive: false, keywords: SV_KEYWORDS, lexemes: /[\w\$]+/, contains: [ hljs.C_BLOCK_COMMENT_MODE, hljs.C_LINE_COMMENT_MODE, hljs.QUOTE_STRING_MODE, { className: 'number', contains: [hljs.BACKSLASH_ESCAPE], variants: [ {begin: '\\b((\\d+\'(b|h|o|d|B|H|O|D))[0-9xzXZa-fA-F_]+)'}, {begin: '\\B((\'(b|h|o|d|B|H|O|D))[0-9xzXZa-fA-F_]+)'}, {begin: '\\b([0-9_])+', relevance: 0} ] }, /* parameters to instances */ { className: 'variable', variants: [ {begin: '#\\((?!parameter).+\\)'}, {begin: '\\.\\w+', relevance: 0}, ] }, { className: 'meta', begin: '`', end: '$', keywords: {'meta-keyword': 'define __FILE__ ' + '__LINE__ begin_keywords celldefine default_nettype define ' + 'else elsif end_keywords endcelldefine endif ifdef ifndef ' + 'include line nounconnected_drive pragma resetall timescale ' + 'unconnected_drive undef undefineall'}, relevance: 0 } ] }; // return }; /***/ }, /* 334 */ /***/ function(module, exports) { module.exports = function(hljs) { // Regular expression for VHDL numeric literals. // Decimal literal: var INTEGER_RE = '\\d(_|\\d)*'; var EXPONENT_RE = '[eE][-+]?' + INTEGER_RE; var DECIMAL_LITERAL_RE = INTEGER_RE + '(\\.' + INTEGER_RE + ')?' + '(' + EXPONENT_RE + ')?'; // Based literal: var BASED_INTEGER_RE = '\\w+'; var BASED_LITERAL_RE = INTEGER_RE + '#' + BASED_INTEGER_RE + '(\\.' + BASED_INTEGER_RE + ')?' + '#' + '(' + EXPONENT_RE + ')?'; var NUMBER_RE = '\\b(' + BASED_LITERAL_RE + '|' + DECIMAL_LITERAL_RE + ')'; return { case_insensitive: true, keywords: { keyword: 'abs access after alias all and architecture array assert assume assume_guarantee attribute ' + 'begin block body buffer bus case component configuration constant context cover disconnect ' + 'downto default else elsif end entity exit fairness file for force function generate ' + 'generic group guarded if impure in inertial inout is label library linkage literal ' + 'loop map mod nand new next nor not null of on open or others out package port ' + 'postponed procedure process property protected pure range record register reject ' + 'release rem report restrict restrict_guarantee return rol ror select sequence ' + 'severity shared signal sla sll sra srl strong subtype then to transport type ' + 'unaffected units until use variable vmode vprop vunit wait when while with xnor xor', built_in: 'boolean bit character ' + 'integer time delay_length natural positive ' + 'string bit_vector file_open_kind file_open_status ' + 'std_logic std_logic_vector unsigned signed boolean_vector integer_vector ' + 'std_ulogic std_ulogic_vector unresolved_unsigned u_unsigned unresolved_signed u_signed' + 'real_vector time_vector', literal: 'false true note warning error failure ' + // severity_level 'line text side width' // textio }, illegal: '{', contains: [ hljs.C_BLOCK_COMMENT_MODE, // VHDL-2008 block commenting. hljs.COMMENT('--', '$'), hljs.QUOTE_STRING_MODE, { className: 'number', begin: NUMBER_RE, relevance: 0 }, { className: 'string', begin: '\'(U|X|0|1|Z|W|L|H|-)\'', contains: [hljs.BACKSLASH_ESCAPE] }, { className: 'symbol', begin: '\'[A-Za-z](_?[A-Za-z0-9])*', contains: [hljs.BACKSLASH_ESCAPE] } ] }; }; /***/ }, /* 335 */ /***/ function(module, exports) { module.exports = function(hljs) { return { lexemes: /[!#@\w]+/, keywords: { keyword: // express version except: ! & * < = > !! # @ @@ 'N|0 P|0 X|0 a|0 ab abc abo al am an|0 ar arga argd arge argdo argg argl argu as au aug aun b|0 bN ba bad bd be bel bf bl bm bn bo bp br brea breaka breakd breakl bro bufdo buffers bun bw c|0 cN cNf ca cabc caddb cad caddf cal cat cb cc ccl cd ce cex cf cfir cgetb cgete cg changes chd che checkt cl cla clo cm cmapc cme cn cnew cnf cno cnorea cnoreme co col colo com comc comp con conf cope '+ 'cp cpf cq cr cs cst cu cuna cunme cw delm deb debugg delc delf dif diffg diffo diffp diffpu diffs diffthis dig di dl dell dj dli do doautoa dp dr ds dsp e|0 ea ec echoe echoh echom echon el elsei em en endfo endf endt endw ene ex exe exi exu f|0 files filet fin fina fini fir fix fo foldc foldd folddoc foldo for fu go gr grepa gu gv ha helpf helpg helpt hi hid his ia iabc if ij il im imapc '+ 'ime ino inorea inoreme int is isp iu iuna iunme j|0 ju k|0 keepa kee keepj lN lNf l|0 lad laddb laddf la lan lat lb lc lch lcl lcs le lefta let lex lf lfir lgetb lgete lg lgr lgrepa lh ll lla lli lmak lm lmapc lne lnew lnf ln loadk lo loc lockv lol lope lp lpf lr ls lt lu lua luad luaf lv lvimgrepa lw m|0 ma mak map mapc marks mat me menut mes mk mks mksp mkv mkvie mod mz mzf nbc nb nbs new nm nmapc nme nn nnoreme noa no noh norea noreme norm nu nun nunme ol o|0 om omapc ome on ono onoreme opt ou ounme ow p|0 '+ 'profd prof pro promptr pc ped pe perld po popu pp pre prev ps pt ptN ptf ptj ptl ptn ptp ptr pts pu pw py3 python3 py3d py3f py pyd pyf quita qa rec red redi redr redraws reg res ret retu rew ri rightb rub rubyd rubyf rund ru rv sN san sa sal sav sb sbN sba sbf sbl sbm sbn sbp sbr scrip scripte scs se setf setg setl sf sfir sh sim sig sil sl sla sm smap smapc sme sn sni sno snor snoreme sor '+ 'so spelld spe spelli spellr spellu spellw sp spr sre st sta startg startr star stopi stj sts sun sunm sunme sus sv sw sy synti sync tN tabN tabc tabdo tabe tabf tabfir tabl tabm tabnew '+ 'tabn tabo tabp tabr tabs tab ta tags tc tcld tclf te tf th tj tl tm tn to tp tr try ts tu u|0 undoj undol una unh unl unlo unm unme uns up ve verb vert vim vimgrepa vi viu vie vm vmapc vme vne vn vnoreme vs vu vunme windo w|0 wN wa wh wi winc winp wn wp wq wqa ws wu wv x|0 xa xmapc xm xme xn xnoreme xu xunme y|0 z|0 ~ '+ // full version 'Next Print append abbreviate abclear aboveleft all amenu anoremenu args argadd argdelete argedit argglobal arglocal argument ascii autocmd augroup aunmenu buffer bNext ball badd bdelete behave belowright bfirst blast bmodified bnext botright bprevious brewind break breakadd breakdel breaklist browse bunload '+ 'bwipeout change cNext cNfile cabbrev cabclear caddbuffer caddexpr caddfile call catch cbuffer cclose center cexpr cfile cfirst cgetbuffer cgetexpr cgetfile chdir checkpath checktime clist clast close cmap cmapclear cmenu cnext cnewer cnfile cnoremap cnoreabbrev cnoremenu copy colder colorscheme command comclear compiler continue confirm copen cprevious cpfile cquit crewind cscope cstag cunmap '+ 'cunabbrev cunmenu cwindow delete delmarks debug debuggreedy delcommand delfunction diffupdate diffget diffoff diffpatch diffput diffsplit digraphs display deletel djump dlist doautocmd doautoall deletep drop dsearch dsplit edit earlier echo echoerr echohl echomsg else elseif emenu endif endfor '+ 'endfunction endtry endwhile enew execute exit exusage file filetype find finally finish first fixdel fold foldclose folddoopen folddoclosed foldopen function global goto grep grepadd gui gvim hardcopy help helpfind helpgrep helptags highlight hide history insert iabbrev iabclear ijump ilist imap '+ 'imapclear imenu inoremap inoreabbrev inoremenu intro isearch isplit iunmap iunabbrev iunmenu join jumps keepalt keepmarks keepjumps lNext lNfile list laddexpr laddbuffer laddfile last language later lbuffer lcd lchdir lclose lcscope left leftabove lexpr lfile lfirst lgetbuffer lgetexpr lgetfile lgrep lgrepadd lhelpgrep llast llist lmake lmap lmapclear lnext lnewer lnfile lnoremap loadkeymap loadview '+ 'lockmarks lockvar lolder lopen lprevious lpfile lrewind ltag lunmap luado luafile lvimgrep lvimgrepadd lwindow move mark make mapclear match menu menutranslate messages mkexrc mksession mkspell mkvimrc mkview mode mzscheme mzfile nbclose nbkey nbsart next nmap nmapclear nmenu nnoremap '+ 'nnoremenu noautocmd noremap nohlsearch noreabbrev noremenu normal number nunmap nunmenu oldfiles open omap omapclear omenu only onoremap onoremenu options ounmap ounmenu ownsyntax print profdel profile promptfind promptrepl pclose pedit perl perldo pop popup ppop preserve previous psearch ptag ptNext '+ 'ptfirst ptjump ptlast ptnext ptprevious ptrewind ptselect put pwd py3do py3file python pydo pyfile quit quitall qall read recover redo redir redraw redrawstatus registers resize retab return rewind right rightbelow ruby rubydo rubyfile rundo runtime rviminfo substitute sNext sandbox sargument sall saveas sbuffer sbNext sball sbfirst sblast sbmodified sbnext sbprevious sbrewind scriptnames scriptencoding '+ 'scscope set setfiletype setglobal setlocal sfind sfirst shell simalt sign silent sleep slast smagic smapclear smenu snext sniff snomagic snoremap snoremenu sort source spelldump spellgood spellinfo spellrepall spellundo spellwrong split sprevious srewind stop stag startgreplace startreplace '+ 'startinsert stopinsert stjump stselect sunhide sunmap sunmenu suspend sview swapname syntax syntime syncbind tNext tabNext tabclose tabedit tabfind tabfirst tablast tabmove tabnext tabonly tabprevious tabrewind tag tcl tcldo tclfile tearoff tfirst throw tjump tlast tmenu tnext topleft tprevious '+'trewind tselect tunmenu undo undojoin undolist unabbreviate unhide unlet unlockvar unmap unmenu unsilent update vglobal version verbose vertical vimgrep vimgrepadd visual viusage view vmap vmapclear vmenu vnew '+ 'vnoremap vnoremenu vsplit vunmap vunmenu write wNext wall while winsize wincmd winpos wnext wprevious wqall wsverb wundo wviminfo xit xall xmapclear xmap xmenu xnoremap xnoremenu xunmap xunmenu yank', built_in: //built in func 'synIDtrans atan2 range matcharg did_filetype asin feedkeys xor argv ' + 'complete_check add getwinposx getqflist getwinposy screencol ' + 'clearmatches empty extend getcmdpos mzeval garbagecollect setreg ' + 'ceil sqrt diff_hlID inputsecret get getfperm getpid filewritable ' + 'shiftwidth max sinh isdirectory synID system inputrestore winline ' + 'atan visualmode inputlist tabpagewinnr round getregtype mapcheck ' + 'hasmapto histdel argidx findfile sha256 exists toupper getcmdline ' + 'taglist string getmatches bufnr strftime winwidth bufexists ' + 'strtrans tabpagebuflist setcmdpos remote_read printf setloclist ' + 'getpos getline bufwinnr float2nr len getcmdtype diff_filler luaeval ' + 'resolve libcallnr foldclosedend reverse filter has_key bufname ' + 'str2float strlen setline getcharmod setbufvar index searchpos ' + 'shellescape undofile foldclosed setqflist buflisted strchars str2nr ' + 'virtcol floor remove undotree remote_expr winheight gettabwinvar ' + 'reltime cursor tabpagenr finddir localtime acos getloclist search ' + 'tanh matchend rename gettabvar strdisplaywidth type abs py3eval ' + 'setwinvar tolower wildmenumode log10 spellsuggest bufloaded ' + 'synconcealed nextnonblank server2client complete settabwinvar ' + 'executable input wincol setmatches getftype hlID inputsave ' + 'searchpair or screenrow line settabvar histadd deepcopy strpart ' + 'remote_peek and eval getftime submatch screenchar winsaveview ' + 'matchadd mkdir screenattr getfontname libcall reltimestr getfsize ' + 'winnr invert pow getbufline byte2line soundfold repeat fnameescape ' + 'tagfiles sin strwidth spellbadword trunc maparg log lispindent ' + 'hostname setpos globpath remote_foreground getchar synIDattr ' + 'fnamemodify cscope_connection stridx winbufnr indent min ' + 'complete_add nr2char searchpairpos inputdialog values matchlist ' + 'items hlexists strridx browsedir expand fmod pathshorten line2byte ' + 'argc count getwinvar glob foldtextresult getreg foreground cosh ' + 'matchdelete has char2nr simplify histget searchdecl iconv ' + 'winrestcmd pumvisible writefile foldlevel haslocaldir keys cos ' + 'matchstr foldtext histnr tan tempname getcwd byteidx getbufvar ' + 'islocked escape eventhandler remote_send serverlist winrestview ' + 'synstack pyeval prevnonblank readfile cindent filereadable changenr ' + 'exp' }, illegal: /;/, contains: [ hljs.NUMBER_MODE, hljs.APOS_STRING_MODE, /* A double quote can start either a string or a line comment. Strings are ended before the end of a line by another double quote and can contain escaped double-quotes and post-escaped line breaks. Also, any double quote at the beginning of a line is a comment but we don't handle that properly at the moment: any double quote inside will turn them into a string. Handling it properly will require a smarter parser. */ { className: 'string', begin: /"(\\"|\n\\|[^"\n])*"/ }, hljs.COMMENT('"', '$'), { className: 'variable', begin: /[bwtglsav]:[\w\d_]*/ }, { className: 'function', beginKeywords: 'function function!', end: '$', relevance: 0, contains: [ hljs.TITLE_MODE, { className: 'params', begin: '\\(', end: '\\)' } ] }, { className: 'symbol', begin: /<[\w-]+>/ } ] }; }; /***/ }, /* 336 */ /***/ function(module, exports) { module.exports = function(hljs) { return { case_insensitive: true, lexemes: '[.%]?' + hljs.IDENT_RE, keywords: { keyword: 'lock rep repe repz repne repnz xaquire xrelease bnd nobnd ' + 'aaa aad aam aas adc add and arpl bb0_reset bb1_reset bound bsf bsr bswap bt btc btr bts call cbw cdq cdqe clc cld cli clts cmc cmp cmpsb cmpsd cmpsq cmpsw cmpxchg cmpxchg486 cmpxchg8b cmpxchg16b cpuid cpu_read cpu_write cqo cwd cwde daa das dec div dmint emms enter equ f2xm1 fabs fadd faddp fbld fbstp fchs fclex fcmovb fcmovbe fcmove fcmovnb fcmovnbe fcmovne fcmovnu fcmovu fcom fcomi fcomip fcomp fcompp fcos fdecstp fdisi fdiv fdivp fdivr fdivrp femms feni ffree ffreep fiadd ficom ficomp fidiv fidivr fild fimul fincstp finit fist fistp fisttp fisub fisubr fld fld1 fldcw fldenv fldl2e fldl2t fldlg2 fldln2 fldpi fldz fmul fmulp fnclex fndisi fneni fninit fnop fnsave fnstcw fnstenv fnstsw fpatan fprem fprem1 fptan frndint frstor fsave fscale fsetpm fsin fsincos fsqrt fst fstcw fstenv fstp fstsw fsub fsubp fsubr fsubrp ftst fucom fucomi fucomip fucomp fucompp fxam fxch fxtract fyl2x fyl2xp1 hlt ibts icebp idiv imul in inc incbin insb insd insw int int01 int1 int03 int3 into invd invpcid invlpg invlpga iret iretd iretq iretw jcxz jecxz jrcxz jmp jmpe lahf lar lds lea leave les lfence lfs lgdt lgs lidt lldt lmsw loadall loadall286 lodsb lodsd lodsq lodsw loop loope loopne loopnz loopz lsl lss ltr mfence monitor mov movd movq movsb movsd movsq movsw movsx movsxd movzx mul mwait neg nop not or out outsb outsd outsw packssdw packsswb packuswb paddb paddd paddsb paddsiw paddsw paddusb paddusw paddw pand pandn pause paveb pavgusb pcmpeqb pcmpeqd pcmpeqw pcmpgtb pcmpgtd pcmpgtw pdistib pf2id pfacc pfadd pfcmpeq pfcmpge pfcmpgt pfmax pfmin pfmul pfrcp pfrcpit1 pfrcpit2 pfrsqit1 pfrsqrt pfsub pfsubr pi2fd pmachriw pmaddwd pmagw pmulhriw pmulhrwa pmulhrwc pmulhw pmullw pmvgezb pmvlzb pmvnzb pmvzb pop popa popad popaw popf popfd popfq popfw por prefetch prefetchw pslld psllq psllw psrad psraw psrld psrlq psrlw psubb psubd psubsb psubsiw psubsw psubusb psubusw psubw punpckhbw punpckhdq punpckhwd punpcklbw punpckldq punpcklwd push pusha pushad pushaw pushf pushfd pushfq pushfw pxor rcl rcr rdshr rdmsr rdpmc rdtsc rdtscp ret retf retn rol ror rdm rsdc rsldt rsm rsts sahf sal salc sar sbb scasb scasd scasq scasw sfence sgdt shl shld shr shrd sidt sldt skinit smi smint smintold smsw stc std sti stosb stosd stosq stosw str sub svdc svldt svts swapgs syscall sysenter sysexit sysret test ud0 ud1 ud2b ud2 ud2a umov verr verw fwait wbinvd wrshr wrmsr xadd xbts xchg xlatb xlat xor cmove cmovz cmovne cmovnz cmova cmovnbe cmovae cmovnb cmovb cmovnae cmovbe cmovna cmovg cmovnle cmovge cmovnl cmovl cmovnge cmovle cmovng cmovc cmovnc cmovo cmovno cmovs cmovns cmovp cmovpe cmovnp cmovpo je jz jne jnz ja jnbe jae jnb jb jnae jbe jna jg jnle jge jnl jl jnge jle jng jc jnc jo jno js jns jpo jnp jpe jp sete setz setne setnz seta setnbe setae setnb setnc setb setnae setcset setbe setna setg setnle setge setnl setl setnge setle setng sets setns seto setno setpe setp setpo setnp addps addss andnps andps cmpeqps cmpeqss cmpleps cmpless cmpltps cmpltss cmpneqps cmpneqss cmpnleps cmpnless cmpnltps cmpnltss cmpordps cmpordss cmpunordps cmpunordss cmpps cmpss comiss cvtpi2ps cvtps2pi cvtsi2ss cvtss2si cvttps2pi cvttss2si divps divss ldmxcsr maxps maxss minps minss movaps movhps movlhps movlps movhlps movmskps movntps movss movups mulps mulss orps rcpps rcpss rsqrtps rsqrtss shufps sqrtps sqrtss stmxcsr subps subss ucomiss unpckhps unpcklps xorps fxrstor fxrstor64 fxsave fxsave64 xgetbv xsetbv xsave xsave64 xsaveopt xsaveopt64 xrstor xrstor64 prefetchnta prefetcht0 prefetcht1 prefetcht2 maskmovq movntq pavgb pavgw pextrw pinsrw pmaxsw pmaxub pminsw pminub pmovmskb pmulhuw psadbw pshufw pf2iw pfnacc pfpnacc pi2fw pswapd maskmovdqu clflush movntdq movnti movntpd movdqa movdqu movdq2q movq2dq paddq pmuludq pshufd pshufhw pshuflw pslldq psrldq psubq punpckhqdq punpcklqdq addpd addsd andnpd andpd cmpeqpd cmpeqsd cmplepd cmplesd cmpltpd cmpltsd cmpneqpd cmpneqsd cmpnlepd cmpnlesd cmpnltpd cmpnltsd cmpordpd cmpordsd cmpunordpd cmpunordsd cmppd comisd cvtdq2pd cvtdq2ps cvtpd2dq cvtpd2pi cvtpd2ps cvtpi2pd cvtps2dq cvtps2pd cvtsd2si cvtsd2ss cvtsi2sd cvtss2sd cvttpd2pi cvttpd2dq cvttps2dq cvttsd2si divpd divsd maxpd maxsd minpd minsd movapd movhpd movlpd movmskpd movupd mulpd mulsd orpd shufpd sqrtpd sqrtsd subpd subsd ucomisd unpckhpd unpcklpd xorpd addsubpd addsubps haddpd haddps hsubpd hsubps lddqu movddup movshdup movsldup clgi stgi vmcall vmclear vmfunc vmlaunch vmload vmmcall vmptrld vmptrst vmread vmresume vmrun vmsave vmwrite vmxoff vmxon invept invvpid pabsb pabsw pabsd palignr phaddw phaddd phaddsw phsubw phsubd phsubsw pmaddubsw pmulhrsw pshufb psignb psignw psignd extrq insertq movntsd movntss lzcnt blendpd blendps blendvpd blendvps dppd dpps extractps insertps movntdqa mpsadbw packusdw pblendvb pblendw pcmpeqq pextrb pextrd pextrq phminposuw pinsrb pinsrd pinsrq pmaxsb pmaxsd pmaxud pmaxuw pminsb pminsd pminud pminuw pmovsxbw pmovsxbd pmovsxbq pmovsxwd pmovsxwq pmovsxdq pmovzxbw pmovzxbd pmovzxbq pmovzxwd pmovzxwq pmovzxdq pmuldq pmulld ptest roundpd roundps roundsd roundss crc32 pcmpestri pcmpestrm pcmpistri pcmpistrm pcmpgtq popcnt getsec pfrcpv pfrsqrtv movbe aesenc aesenclast aesdec aesdeclast aesimc aeskeygenassist vaesenc vaesenclast vaesdec vaesdeclast vaesimc vaeskeygenassist vaddpd vaddps vaddsd vaddss vaddsubpd vaddsubps vandpd vandps vandnpd vandnps vblendpd vblendps vblendvpd vblendvps vbroadcastss vbroadcastsd vbroadcastf128 vcmpeq_ospd vcmpeqpd vcmplt_ospd vcmpltpd vcmple_ospd vcmplepd vcmpunord_qpd vcmpunordpd vcmpneq_uqpd vcmpneqpd vcmpnlt_uspd vcmpnltpd vcmpnle_uspd vcmpnlepd vcmpord_qpd vcmpordpd vcmpeq_uqpd vcmpnge_uspd vcmpngepd vcmpngt_uspd vcmpngtpd vcmpfalse_oqpd vcmpfalsepd vcmpneq_oqpd vcmpge_ospd vcmpgepd vcmpgt_ospd vcmpgtpd vcmptrue_uqpd vcmptruepd vcmplt_oqpd vcmple_oqpd vcmpunord_spd vcmpneq_uspd vcmpnlt_uqpd vcmpnle_uqpd vcmpord_spd vcmpeq_uspd vcmpnge_uqpd vcmpngt_uqpd vcmpfalse_ospd vcmpneq_ospd vcmpge_oqpd vcmpgt_oqpd vcmptrue_uspd vcmppd vcmpeq_osps vcmpeqps vcmplt_osps vcmpltps vcmple_osps vcmpleps vcmpunord_qps vcmpunordps vcmpneq_uqps vcmpneqps vcmpnlt_usps vcmpnltps vcmpnle_usps vcmpnleps vcmpord_qps vcmpordps vcmpeq_uqps vcmpnge_usps vcmpngeps vcmpngt_usps vcmpngtps vcmpfalse_oqps vcmpfalseps vcmpneq_oqps vcmpge_osps vcmpgeps vcmpgt_osps vcmpgtps vcmptrue_uqps vcmptrueps vcmplt_oqps vcmple_oqps vcmpunord_sps vcmpneq_usps vcmpnlt_uqps vcmpnle_uqps vcmpord_sps vcmpeq_usps vcmpnge_uqps vcmpngt_uqps vcmpfalse_osps vcmpneq_osps vcmpge_oqps vcmpgt_oqps vcmptrue_usps vcmpps vcmpeq_ossd vcmpeqsd vcmplt_ossd vcmpltsd vcmple_ossd vcmplesd vcmpunord_qsd vcmpunordsd vcmpneq_uqsd vcmpneqsd vcmpnlt_ussd vcmpnltsd vcmpnle_ussd vcmpnlesd vcmpord_qsd vcmpordsd vcmpeq_uqsd vcmpnge_ussd vcmpngesd vcmpngt_ussd vcmpngtsd vcmpfalse_oqsd vcmpfalsesd vcmpneq_oqsd vcmpge_ossd vcmpgesd vcmpgt_ossd vcmpgtsd vcmptrue_uqsd vcmptruesd vcmplt_oqsd vcmple_oqsd vcmpunord_ssd vcmpneq_ussd vcmpnlt_uqsd vcmpnle_uqsd vcmpord_ssd vcmpeq_ussd vcmpnge_uqsd vcmpngt_uqsd vcmpfalse_ossd vcmpneq_ossd vcmpge_oqsd vcmpgt_oqsd vcmptrue_ussd vcmpsd vcmpeq_osss vcmpeqss vcmplt_osss vcmpltss vcmple_osss vcmpless vcmpunord_qss vcmpunordss vcmpneq_uqss vcmpneqss vcmpnlt_usss vcmpnltss vcmpnle_usss vcmpnless vcmpord_qss vcmpordss vcmpeq_uqss vcmpnge_usss vcmpngess vcmpngt_usss vcmpngtss vcmpfalse_oqss vcmpfalsess vcmpneq_oqss vcmpge_osss vcmpgess vcmpgt_osss vcmpgtss vcmptrue_uqss vcmptruess vcmplt_oqss vcmple_oqss vcmpunord_sss vcmpneq_usss vcmpnlt_uqss vcmpnle_uqss vcmpord_sss vcmpeq_usss vcmpnge_uqss vcmpngt_uqss vcmpfalse_osss vcmpneq_osss vcmpge_oqss vcmpgt_oqss vcmptrue_usss vcmpss vcomisd vcomiss vcvtdq2pd vcvtdq2ps vcvtpd2dq vcvtpd2ps vcvtps2dq vcvtps2pd vcvtsd2si vcvtsd2ss vcvtsi2sd vcvtsi2ss vcvtss2sd vcvtss2si vcvttpd2dq vcvttps2dq vcvttsd2si vcvttss2si vdivpd vdivps vdivsd vdivss vdppd vdpps vextractf128 vextractps vhaddpd vhaddps vhsubpd vhsubps vinsertf128 vinsertps vlddqu vldqqu vldmxcsr vmaskmovdqu vmaskmovps vmaskmovpd vmaxpd vmaxps vmaxsd vmaxss vminpd vminps vminsd vminss vmovapd vmovaps vmovd vmovq vmovddup vmovdqa vmovqqa vmovdqu vmovqqu vmovhlps vmovhpd vmovhps vmovlhps vmovlpd vmovlps vmovmskpd vmovmskps vmovntdq vmovntqq vmovntdqa vmovntpd vmovntps vmovsd vmovshdup vmovsldup vmovss vmovupd vmovups vmpsadbw vmulpd vmulps vmulsd vmulss vorpd vorps vpabsb vpabsw vpabsd vpacksswb vpackssdw vpackuswb vpackusdw vpaddb vpaddw vpaddd vpaddq vpaddsb vpaddsw vpaddusb vpaddusw vpalignr vpand vpandn vpavgb vpavgw vpblendvb vpblendw vpcmpestri vpcmpestrm vpcmpistri vpcmpistrm vpcmpeqb vpcmpeqw vpcmpeqd vpcmpeqq vpcmpgtb vpcmpgtw vpcmpgtd vpcmpgtq vpermilpd vpermilps vperm2f128 vpextrb vpextrw vpextrd vpextrq vphaddw vphaddd vphaddsw vphminposuw vphsubw vphsubd vphsubsw vpinsrb vpinsrw vpinsrd vpinsrq vpmaddwd vpmaddubsw vpmaxsb vpmaxsw vpmaxsd vpmaxub vpmaxuw vpmaxud vpminsb vpminsw vpminsd vpminub vpminuw vpminud vpmovmskb vpmovsxbw vpmovsxbd vpmovsxbq vpmovsxwd vpmovsxwq vpmovsxdq vpmovzxbw vpmovzxbd vpmovzxbq vpmovzxwd vpmovzxwq vpmovzxdq vpmulhuw vpmulhrsw vpmulhw vpmullw vpmulld vpmuludq vpmuldq vpor vpsadbw vpshufb vpshufd vpshufhw vpshuflw vpsignb vpsignw vpsignd vpslldq vpsrldq vpsllw vpslld vpsllq vpsraw vpsrad vpsrlw vpsrld vpsrlq vptest vpsubb vpsubw vpsubd vpsubq vpsubsb vpsubsw vpsubusb vpsubusw vpunpckhbw vpunpckhwd vpunpckhdq vpunpckhqdq vpunpcklbw vpunpcklwd vpunpckldq vpunpcklqdq vpxor vrcpps vrcpss vrsqrtps vrsqrtss vroundpd vroundps vroundsd vroundss vshufpd vshufps vsqrtpd vsqrtps vsqrtsd vsqrtss vstmxcsr vsubpd vsubps vsubsd vsubss vtestps vtestpd vucomisd vucomiss vunpckhpd vunpckhps vunpcklpd vunpcklps vxorpd vxorps vzeroall vzeroupper pclmullqlqdq pclmulhqlqdq pclmullqhqdq pclmulhqhqdq pclmulqdq vpclmullqlqdq vpclmulhqlqdq vpclmullqhqdq vpclmulhqhqdq vpclmulqdq vfmadd132ps vfmadd132pd vfmadd312ps vfmadd312pd vfmadd213ps vfmadd213pd vfmadd123ps vfmadd123pd vfmadd231ps vfmadd231pd vfmadd321ps vfmadd321pd vfmaddsub132ps vfmaddsub132pd vfmaddsub312ps vfmaddsub312pd vfmaddsub213ps vfmaddsub213pd vfmaddsub123ps vfmaddsub123pd vfmaddsub231ps vfmaddsub231pd vfmaddsub321ps vfmaddsub321pd vfmsub132ps vfmsub132pd vfmsub312ps vfmsub312pd vfmsub213ps vfmsub213pd vfmsub123ps vfmsub123pd vfmsub231ps vfmsub231pd vfmsub321ps vfmsub321pd vfmsubadd132ps vfmsubadd132pd vfmsubadd312ps vfmsubadd312pd vfmsubadd213ps vfmsubadd213pd vfmsubadd123ps vfmsubadd123pd vfmsubadd231ps vfmsubadd231pd vfmsubadd321ps vfmsubadd321pd vfnmadd132ps vfnmadd132pd vfnmadd312ps vfnmadd312pd vfnmadd213ps vfnmadd213pd vfnmadd123ps vfnmadd123pd vfnmadd231ps vfnmadd231pd vfnmadd321ps vfnmadd321pd vfnmsub132ps vfnmsub132pd vfnmsub312ps vfnmsub312pd vfnmsub213ps vfnmsub213pd vfnmsub123ps vfnmsub123pd vfnmsub231ps vfnmsub231pd vfnmsub321ps vfnmsub321pd vfmadd132ss vfmadd132sd vfmadd312ss vfmadd312sd vfmadd213ss vfmadd213sd vfmadd123ss vfmadd123sd vfmadd231ss vfmadd231sd vfmadd321ss vfmadd321sd vfmsub132ss vfmsub132sd vfmsub312ss vfmsub312sd vfmsub213ss vfmsub213sd vfmsub123ss vfmsub123sd vfmsub231ss vfmsub231sd vfmsub321ss vfmsub321sd vfnmadd132ss vfnmadd132sd vfnmadd312ss vfnmadd312sd vfnmadd213ss vfnmadd213sd vfnmadd123ss vfnmadd123sd vfnmadd231ss vfnmadd231sd vfnmadd321ss vfnmadd321sd vfnmsub132ss vfnmsub132sd vfnmsub312ss vfnmsub312sd vfnmsub213ss vfnmsub213sd vfnmsub123ss vfnmsub123sd vfnmsub231ss vfnmsub231sd vfnmsub321ss vfnmsub321sd rdfsbase rdgsbase rdrand wrfsbase wrgsbase vcvtph2ps vcvtps2ph adcx adox rdseed clac stac xstore xcryptecb xcryptcbc xcryptctr xcryptcfb xcryptofb montmul xsha1 xsha256 llwpcb slwpcb lwpval lwpins vfmaddpd vfmaddps vfmaddsd vfmaddss vfmaddsubpd vfmaddsubps vfmsubaddpd vfmsubaddps vfmsubpd vfmsubps vfmsubsd vfmsubss vfnmaddpd vfnmaddps vfnmaddsd vfnmaddss vfnmsubpd vfnmsubps vfnmsubsd vfnmsubss vfrczpd vfrczps vfrczsd vfrczss vpcmov vpcomb vpcomd vpcomq vpcomub vpcomud vpcomuq vpcomuw vpcomw vphaddbd vphaddbq vphaddbw vphadddq vphaddubd vphaddubq vphaddubw vphaddudq vphadduwd vphadduwq vphaddwd vphaddwq vphsubbw vphsubdq vphsubwd vpmacsdd vpmacsdqh vpmacsdql vpmacssdd vpmacssdqh vpmacssdql vpmacsswd vpmacssww vpmacswd vpmacsww vpmadcsswd vpmadcswd vpperm vprotb vprotd vprotq vprotw vpshab vpshad vpshaq vpshaw vpshlb vpshld vpshlq vpshlw vbroadcasti128 vpblendd vpbroadcastb vpbroadcastw vpbroadcastd vpbroadcastq vpermd vpermpd vpermps vpermq vperm2i128 vextracti128 vinserti128 vpmaskmovd vpmaskmovq vpsllvd vpsllvq vpsravd vpsrlvd vpsrlvq vgatherdpd vgatherqpd vgatherdps vgatherqps vpgatherdd vpgatherqd vpgatherdq vpgatherqq xabort xbegin xend xtest andn bextr blci blcic blsi blsic blcfill blsfill blcmsk blsmsk blsr blcs bzhi mulx pdep pext rorx sarx shlx shrx tzcnt tzmsk t1mskc valignd valignq vblendmpd vblendmps vbroadcastf32x4 vbroadcastf64x4 vbroadcasti32x4 vbroadcasti64x4 vcompresspd vcompressps vcvtpd2udq vcvtps2udq vcvtsd2usi vcvtss2usi vcvttpd2udq vcvttps2udq vcvttsd2usi vcvttss2usi vcvtudq2pd vcvtudq2ps vcvtusi2sd vcvtusi2ss vexpandpd vexpandps vextractf32x4 vextractf64x4 vextracti32x4 vextracti64x4 vfixupimmpd vfixupimmps vfixupimmsd vfixupimmss vgetexppd vgetexpps vgetexpsd vgetexpss vgetmantpd vgetmantps vgetmantsd vgetmantss vinsertf32x4 vinsertf64x4 vinserti32x4 vinserti64x4 vmovdqa32 vmovdqa64 vmovdqu32 vmovdqu64 vpabsq vpandd vpandnd vpandnq vpandq vpblendmd vpblendmq vpcmpltd vpcmpled vpcmpneqd vpcmpnltd vpcmpnled vpcmpd vpcmpltq vpcmpleq vpcmpneqq vpcmpnltq vpcmpnleq vpcmpq vpcmpequd vpcmpltud vpcmpleud vpcmpnequd vpcmpnltud vpcmpnleud vpcmpud vpcmpequq vpcmpltuq vpcmpleuq vpcmpnequq vpcmpnltuq vpcmpnleuq vpcmpuq vpcompressd vpcompressq vpermi2d vpermi2pd vpermi2ps vpermi2q vpermt2d vpermt2pd vpermt2ps vpermt2q vpexpandd vpexpandq vpmaxsq vpmaxuq vpminsq vpminuq vpmovdb vpmovdw vpmovqb vpmovqd vpmovqw vpmovsdb vpmovsdw vpmovsqb vpmovsqd vpmovsqw vpmovusdb vpmovusdw vpmovusqb vpmovusqd vpmovusqw vpord vporq vprold vprolq vprolvd vprolvq vprord vprorq vprorvd vprorvq vpscatterdd vpscatterdq vpscatterqd vpscatterqq vpsraq vpsravq vpternlogd vpternlogq vptestmd vptestmq vptestnmd vptestnmq vpxord vpxorq vrcp14pd vrcp14ps vrcp14sd vrcp14ss vrndscalepd vrndscaleps vrndscalesd vrndscaless vrsqrt14pd vrsqrt14ps vrsqrt14sd vrsqrt14ss vscalefpd vscalefps vscalefsd vscalefss vscatterdpd vscatterdps vscatterqpd vscatterqps vshuff32x4 vshuff64x2 vshufi32x4 vshufi64x2 kandnw kandw kmovw knotw kortestw korw kshiftlw kshiftrw kunpckbw kxnorw kxorw vpbroadcastmb2q vpbroadcastmw2d vpconflictd vpconflictq vplzcntd vplzcntq vexp2pd vexp2ps vrcp28pd vrcp28ps vrcp28sd vrcp28ss vrsqrt28pd vrsqrt28ps vrsqrt28sd vrsqrt28ss vgatherpf0dpd vgatherpf0dps vgatherpf0qpd vgatherpf0qps vgatherpf1dpd vgatherpf1dps vgatherpf1qpd vgatherpf1qps vscatterpf0dpd vscatterpf0dps vscatterpf0qpd vscatterpf0qps vscatterpf1dpd vscatterpf1dps vscatterpf1qpd vscatterpf1qps prefetchwt1 bndmk bndcl bndcu bndcn bndmov bndldx bndstx sha1rnds4 sha1nexte sha1msg1 sha1msg2 sha256rnds2 sha256msg1 sha256msg2 hint_nop0 hint_nop1 hint_nop2 hint_nop3 hint_nop4 hint_nop5 hint_nop6 hint_nop7 hint_nop8 hint_nop9 hint_nop10 hint_nop11 hint_nop12 hint_nop13 hint_nop14 hint_nop15 hint_nop16 hint_nop17 hint_nop18 hint_nop19 hint_nop20 hint_nop21 hint_nop22 hint_nop23 hint_nop24 hint_nop25 hint_nop26 hint_nop27 hint_nop28 hint_nop29 hint_nop30 hint_nop31 hint_nop32 hint_nop33 hint_nop34 hint_nop35 hint_nop36 hint_nop37 hint_nop38 hint_nop39 hint_nop40 hint_nop41 hint_nop42 hint_nop43 hint_nop44 hint_nop45 hint_nop46 hint_nop47 hint_nop48 hint_nop49 hint_nop50 hint_nop51 hint_nop52 hint_nop53 hint_nop54 hint_nop55 hint_nop56 hint_nop57 hint_nop58 hint_nop59 hint_nop60 hint_nop61 hint_nop62 hint_nop63', built_in: // Instruction pointer 'ip eip rip ' + // 8-bit registers 'al ah bl bh cl ch dl dh sil dil bpl spl r8b r9b r10b r11b r12b r13b r14b r15b ' + // 16-bit registers 'ax bx cx dx si di bp sp r8w r9w r10w r11w r12w r13w r14w r15w ' + // 32-bit registers 'eax ebx ecx edx esi edi ebp esp eip r8d r9d r10d r11d r12d r13d r14d r15d ' + // 64-bit registers 'rax rbx rcx rdx rsi rdi rbp rsp r8 r9 r10 r11 r12 r13 r14 r15 ' + // Segment registers 'cs ds es fs gs ss ' + // Floating point stack registers 'st st0 st1 st2 st3 st4 st5 st6 st7 ' + // MMX Registers 'mm0 mm1 mm2 mm3 mm4 mm5 mm6 mm7 ' + // SSE registers 'xmm0 xmm1 xmm2 xmm3 xmm4 xmm5 xmm6 xmm7 xmm8 xmm9 xmm10 xmm11 xmm12 xmm13 xmm14 xmm15 ' + 'xmm16 xmm17 xmm18 xmm19 xmm20 xmm21 xmm22 xmm23 xmm24 xmm25 xmm26 xmm27 xmm28 xmm29 xmm30 xmm31 ' + // AVX registers 'ymm0 ymm1 ymm2 ymm3 ymm4 ymm5 ymm6 ymm7 ymm8 ymm9 ymm10 ymm11 ymm12 ymm13 ymm14 ymm15 ' + 'ymm16 ymm17 ymm18 ymm19 ymm20 ymm21 ymm22 ymm23 ymm24 ymm25 ymm26 ymm27 ymm28 ymm29 ymm30 ymm31 ' + // AVX-512F registers 'zmm0 zmm1 zmm2 zmm3 zmm4 zmm5 zmm6 zmm7 zmm8 zmm9 zmm10 zmm11 zmm12 zmm13 zmm14 zmm15 ' + 'zmm16 zmm17 zmm18 zmm19 zmm20 zmm21 zmm22 zmm23 zmm24 zmm25 zmm26 zmm27 zmm28 zmm29 zmm30 zmm31 ' + // AVX-512F mask registers 'k0 k1 k2 k3 k4 k5 k6 k7 ' + // Bound (MPX) register 'bnd0 bnd1 bnd2 bnd3 ' + // Special register 'cr0 cr1 cr2 cr3 cr4 cr8 dr0 dr1 dr2 dr3 dr8 tr3 tr4 tr5 tr6 tr7 ' + // NASM altreg package 'r0 r1 r2 r3 r4 r5 r6 r7 r0b r1b r2b r3b r4b r5b r6b r7b ' + 'r0w r1w r2w r3w r4w r5w r6w r7w r0d r1d r2d r3d r4d r5d r6d r7d ' + 'r0h r1h r2h r3h ' + 'r0l r1l r2l r3l r4l r5l r6l r7l r8l r9l r10l r11l r12l r13l r14l r15l ' + 'db dw dd dq dt ddq do dy dz ' + 'resb resw resd resq rest resdq reso resy resz ' + 'incbin equ times ' + 'byte word dword qword nosplit rel abs seg wrt strict near far a32 ptr', meta: '%define %xdefine %+ %undef %defstr %deftok %assign %strcat %strlen %substr %rotate %elif %else %endif ' + '%if %ifmacro %ifctx %ifidn %ifidni %ifid %ifnum %ifstr %iftoken %ifempty %ifenv %error %warning %fatal %rep ' + '%endrep %include %push %pop %repl %pathsearch %depend %use %arg %stacksize %local %line %comment %endcomment ' + '.nolist ' + '__FILE__ __LINE__ __SECT__ __BITS__ __OUTPUT_FORMAT__ __DATE__ __TIME__ __DATE_NUM__ __TIME_NUM__ ' + '__UTC_DATE__ __UTC_TIME__ __UTC_DATE_NUM__ __UTC_TIME_NUM__ __PASS__ struc endstruc istruc at iend ' + 'align alignb sectalign daz nodaz up down zero default option assume public ' + 'bits use16 use32 use64 default section segment absolute extern global common cpu float ' + '__utf16__ __utf16le__ __utf16be__ __utf32__ __utf32le__ __utf32be__ ' + '__float8__ __float16__ __float32__ __float64__ __float80m__ __float80e__ __float128l__ __float128h__ ' + '__Infinity__ __QNaN__ __SNaN__ Inf NaN QNaN SNaN float8 float16 float32 float64 float80m float80e ' + 'float128l float128h __FLOAT_DAZ__ __FLOAT_ROUND__ __FLOAT__' }, contains: [ hljs.COMMENT( ';', '$', { relevance: 0 } ), { className: 'number', variants: [ // Float number and x87 BCD { begin: '\\b(?:([0-9][0-9_]*)?\\.[0-9_]*(?:[eE][+-]?[0-9_]+)?|' + '(0[Xx])?[0-9][0-9_]*\\.?[0-9_]*(?:[pP](?:[+-]?[0-9_]+)?)?)\\b', relevance: 0 }, // Hex number in $ { begin: '\\$[0-9][0-9A-Fa-f]*', relevance: 0 }, // Number in H,D,T,Q,O,B,Y suffix { begin: '\\b(?:[0-9A-Fa-f][0-9A-Fa-f_]*[Hh]|[0-9][0-9_]*[DdTt]?|[0-7][0-7_]*[QqOo]|[0-1][0-1_]*[BbYy])\\b' }, // Number in X,D,T,Q,O,B,Y prefix { begin: '\\b(?:0[Xx][0-9A-Fa-f_]+|0[DdTt][0-9_]+|0[QqOo][0-7_]+|0[BbYy][0-1_]+)\\b'} ] }, // Double quote string hljs.QUOTE_STRING_MODE, { className: 'string', variants: [ // Single-quoted string { begin: '\'', end: '[^\\\\]\'' }, // Backquoted string { begin: '`', end: '[^\\\\]`' } ], relevance: 0 }, { className: 'symbol', variants: [ // Global label and local label { begin: '^\\s*[A-Za-z._?][A-Za-z0-9_$#@~.?]*(:|\\s+label)' }, // Macro-local label { begin: '^\\s*%%[A-Za-z0-9_$#@~.?]*:' } ], relevance: 0 }, // Macro parameter { className: 'subst', begin: '%[0-9]+', relevance: 0 }, // Macro parameter { className: 'subst', begin: '%!\S+', relevance: 0 }, { className: 'meta', begin: /^\s*\.[\w_-]+/ } ] }; }; /***/ }, /* 337 */ /***/ function(module, exports) { module.exports = function(hljs) { var BUILTIN_MODULES = 'ObjectLoader Animate MovieCredits Slides Filters Shading Materials LensFlare Mapping VLCAudioVideo ' + 'StereoDecoder PointCloud NetworkAccess RemoteControl RegExp ChromaKey Snowfall NodeJS Speech Charts'; var XL_KEYWORDS = { keyword: 'if then else do while until for loop import with is as where when by data constant ' + 'integer real text name boolean symbol infix prefix postfix block tree', literal: 'true false nil', built_in: 'in mod rem and or xor not abs sign floor ceil sqrt sin cos tan asin ' + 'acos atan exp expm1 log log2 log10 log1p pi at text_length text_range ' + 'text_find text_replace contains page slide basic_slide title_slide ' + 'title subtitle fade_in fade_out fade_at clear_color color line_color ' + 'line_width texture_wrap texture_transform texture scale_?x scale_?y ' + 'scale_?z? translate_?x translate_?y translate_?z? rotate_?x rotate_?y ' + 'rotate_?z? rectangle circle ellipse sphere path line_to move_to ' + 'quad_to curve_to theme background contents locally time mouse_?x ' + 'mouse_?y mouse_buttons ' + BUILTIN_MODULES }; var DOUBLE_QUOTE_TEXT = { className: 'string', begin: '"', end: '"', illegal: '\\n' }; var SINGLE_QUOTE_TEXT = { className: 'string', begin: '\'', end: '\'', illegal: '\\n' }; var LONG_TEXT = { className: 'string', begin: '<<', end: '>>' }; var BASED_NUMBER = { className: 'number', begin: '[0-9]+#[0-9A-Z_]+(\\.[0-9-A-Z_]+)?#?([Ee][+-]?[0-9]+)?' }; var IMPORT = { beginKeywords: 'import', end: '$', keywords: XL_KEYWORDS, contains: [DOUBLE_QUOTE_TEXT] }; var FUNCTION_DEFINITION = { className: 'function', begin: /[a-z][^\n]*->/, returnBegin: true, end: /->/, contains: [ hljs.inherit(hljs.TITLE_MODE, {starts: { endsWithParent: true, keywords: XL_KEYWORDS }}) ] }; return { aliases: ['tao'], lexemes: /[a-zA-Z][a-zA-Z0-9_?]*/, keywords: XL_KEYWORDS, contains: [ hljs.C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE, DOUBLE_QUOTE_TEXT, SINGLE_QUOTE_TEXT, LONG_TEXT, FUNCTION_DEFINITION, IMPORT, BASED_NUMBER, hljs.NUMBER_MODE ] }; }; /***/ }, /* 338 */ /***/ function(module, exports) { module.exports = function(hljs) { var KEYWORDS = 'for let if while then else return where group by xquery encoding version' + 'module namespace boundary-space preserve strip default collation base-uri ordering' + 'copy-namespaces order declare import schema namespace function option in allowing empty' + 'at tumbling window sliding window start when only end when previous next stable ascending' + 'descending empty greatest least some every satisfies switch case typeswitch try catch and' + 'or to union intersect instance of treat as castable cast map array delete insert into' + 'replace value rename copy modify update'; var LITERAL = 'false true xs:string xs:integer element item xs:date xs:datetime xs:float xs:double xs:decimal QName xs:anyURI xs:long xs:int xs:short xs:byte attribute'; var VAR = { begin: /\$[a-zA-Z0-9\-]+/ }; var NUMBER = { className: 'number', begin: '(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b', relevance: 0 }; var STRING = { className: 'string', variants: [ {begin: /"/, end: /"/, contains: [{begin: /""/, relevance: 0}]}, {begin: /'/, end: /'/, contains: [{begin: /''/, relevance: 0}]} ] }; var ANNOTATION = { className: 'meta', begin: '%\\w+' }; var COMMENT = { className: 'comment', begin: '\\(:', end: ':\\)', relevance: 10, contains: [ { className: 'doctag', begin: '@\\w+' } ] }; var METHOD = { begin: '{', end: '}' }; var CONTAINS = [ VAR, STRING, NUMBER, COMMENT, ANNOTATION, METHOD ]; METHOD.contains = CONTAINS; return { aliases: ['xpath', 'xq'], case_insensitive: false, lexemes: /[a-zA-Z\$][a-zA-Z0-9_:\-]*/, illegal: /(proc)|(abstract)|(extends)|(until)|(#)/, keywords: { keyword: KEYWORDS, literal: LITERAL }, contains: CONTAINS }; }; /***/ }, /* 339 */ /***/ function(module, exports) { module.exports = function(hljs) { var STRING = { className: 'string', contains: [hljs.BACKSLASH_ESCAPE], variants: [ { begin: 'b"', end: '"' }, { begin: 'b\'', end: '\'' }, hljs.inherit(hljs.APOS_STRING_MODE, {illegal: null}), hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}) ] }; var NUMBER = {variants: [hljs.BINARY_NUMBER_MODE, hljs.C_NUMBER_MODE]}; return { aliases: ['zep'], case_insensitive: true, keywords: 'and include_once list abstract global private echo interface as static endswitch ' + 'array null if endwhile or const for endforeach self var let while isset public ' + 'protected exit foreach throw elseif include __FILE__ empty require_once do xor ' + 'return parent clone use __CLASS__ __LINE__ else break print eval new ' + 'catch __METHOD__ case exception default die require __FUNCTION__ ' + 'enddeclare final try switch continue endfor endif declare unset true false ' + 'trait goto instanceof insteadof __DIR__ __NAMESPACE__ ' + 'yield finally int uint long ulong char uchar double float bool boolean string' + 'likely unlikely', contains: [ hljs.C_LINE_COMMENT_MODE, hljs.HASH_COMMENT_MODE, hljs.COMMENT( '/\\*', '\\*/', { contains: [ { className: 'doctag', begin: '@[A-Za-z]+' } ] } ), hljs.COMMENT( '__halt_compiler.+?;', false, { endsWithParent: true, keywords: '__halt_compiler', lexemes: hljs.UNDERSCORE_IDENT_RE } ), { className: 'string', begin: '<<<[\'"]?\\w+[\'"]?$', end: '^\\w+;', contains: [hljs.BACKSLASH_ESCAPE] }, { // swallow composed identifiers to avoid parsing them as keywords begin: /(::|->)+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/ }, { className: 'function', beginKeywords: 'function', end: /[;{]/, excludeEnd: true, illegal: '\\$|\\[|%', contains: [ hljs.UNDERSCORE_TITLE_MODE, { className: 'params', begin: '\\(', end: '\\)', contains: [ 'self', hljs.C_BLOCK_COMMENT_MODE, STRING, NUMBER ] } ] }, { className: 'class', beginKeywords: 'class interface', end: '{', excludeEnd: true, illegal: /[:\(\$"]/, contains: [ {beginKeywords: 'extends implements'}, hljs.UNDERSCORE_TITLE_MODE ] }, { beginKeywords: 'namespace', end: ';', illegal: /[\.']/, contains: [hljs.UNDERSCORE_TITLE_MODE] }, { beginKeywords: 'use', end: ';', contains: [hljs.UNDERSCORE_TITLE_MODE] }, { begin: '=>' // No markup, just a relevance booster }, STRING, NUMBER ] }; }; /***/ }, /* 340 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'Default', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange} />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange }) ) ); } }); /***/ }, /* 341 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _date_input = __webpack_require__(342); var _date_input2 = _interopRequireDefault(_date_input); var _calendar = __webpack_require__(455); var _calendar2 = _interopRequireDefault(_calendar); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _defer = __webpack_require__(591); var _defer2 = _interopRequireDefault(_defer); var _tether_component = __webpack_require__(601); var _tether_component2 = _interopRequireDefault(_tether_component); var _classnames2 = __webpack_require__(579); var _classnames3 = _interopRequireDefault(_classnames2); var _date_utils = __webpack_require__(454); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); var _reactOnclickoutside = __webpack_require__(580); var _reactOnclickoutside2 = _interopRequireDefault(_reactOnclickoutside); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } var outsideClickIgnoreClass = 'react-datepicker-ignore-onclickoutside'; var WrappedCalendar = (0, _reactOnclickoutside2.default)(_calendar2.default); /** * General datepicker component. */ var DatePicker = _react2.default.createClass({ displayName: 'DatePicker', propTypes: { autoComplete: _react2.default.PropTypes.string, autoFocus: _react2.default.PropTypes.bool, calendarClassName: _react2.default.PropTypes.string, children: _react2.default.PropTypes.node, className: _react2.default.PropTypes.string, customInput: _react2.default.PropTypes.element, dateFormat: _react2.default.PropTypes.oneOfType([_react2.default.PropTypes.string, _react2.default.PropTypes.array]), dateFormatCalendar: _react2.default.PropTypes.string, disabled: _react2.default.PropTypes.bool, disabledKeyboardNavigation: _react2.default.PropTypes.bool, dropdownMode: _react2.default.PropTypes.oneOf(['scroll', 'select']).isRequired, endDate: _react2.default.PropTypes.object, excludeDates: _react2.default.PropTypes.array, filterDate: _react2.default.PropTypes.func, fixedHeight: _react2.default.PropTypes.bool, highlightDates: _react2.default.PropTypes.array, id: _react2.default.PropTypes.string, includeDates: _react2.default.PropTypes.array, inline: _react2.default.PropTypes.bool, isClearable: _react2.default.PropTypes.bool, locale: _react2.default.PropTypes.string, maxDate: _react2.default.PropTypes.object, minDate: _react2.default.PropTypes.object, monthsShown: _react2.default.PropTypes.number, name: _react2.default.PropTypes.string, onBlur: _react2.default.PropTypes.func, onChange: _react2.default.PropTypes.func.isRequired, onChangeRaw: _react2.default.PropTypes.func, onFocus: _react2.default.PropTypes.func, onMonthChange: _react2.default.PropTypes.func, openToDate: _react2.default.PropTypes.object, peekNextMonth: _react2.default.PropTypes.bool, placeholderText: _react2.default.PropTypes.string, popoverAttachment: _react2.default.PropTypes.string, popoverTargetAttachment: _react2.default.PropTypes.string, popoverTargetOffset: _react2.default.PropTypes.string, readOnly: _react2.default.PropTypes.bool, renderCalendarTo: _react2.default.PropTypes.any, required: _react2.default.PropTypes.bool, scrollableYearDropdown: _react2.default.PropTypes.bool, selected: _react2.default.PropTypes.object, selectsEnd: _react2.default.PropTypes.bool, selectsStart: _react2.default.PropTypes.bool, showMonthDropdown: _react2.default.PropTypes.bool, showWeekNumbers: _react2.default.PropTypes.bool, showYearDropdown: _react2.default.PropTypes.bool, forceShowMonthNavigation: _react2.default.PropTypes.bool, startDate: _react2.default.PropTypes.object, tabIndex: _react2.default.PropTypes.number, tetherConstraints: _react2.default.PropTypes.array, title: _react2.default.PropTypes.string, todayButton: _react2.default.PropTypes.string, utcOffset: _react2.default.PropTypes.number, withPortal: _react2.default.PropTypes.bool }, getDefaultProps: function getDefaultProps() { return { dateFormatCalendar: 'MMMM YYYY', onChange: function onChange() {}, disabled: false, disabledKeyboardNavigation: false, dropdownMode: 'scroll', onFocus: function onFocus() {}, onBlur: function onBlur() {}, onMonthChange: function onMonthChange() {}, popoverAttachment: 'top left', popoverTargetAttachment: 'bottom left', popoverTargetOffset: '10px 0', tetherConstraints: [{ to: 'window', attachment: 'together' }], utcOffset: (0, _moment2.default)().utcOffset(), monthsShown: 1, withPortal: false }; }, getInitialState: function getInitialState() { return { open: false, preventFocus: false, preSelection: this.props.selected ? (0, _moment2.default)(this.props.selected) : (0, _moment2.default)() }; }, componentWillUnmount: function componentWillUnmount() { this.clearPreventFocusTimeout(); }, clearPreventFocusTimeout: function clearPreventFocusTimeout() { if (this.preventFocusTimeout) { clearTimeout(this.preventFocusTimeout); } }, setFocus: function setFocus() { this.refs.input.focus(); }, setOpen: function setOpen(open) { this.setState({ open: open, preSelection: open && this.state.open ? this.state.preSelection : this.getInitialState().preSelection }); }, handleFocus: function handleFocus(event) { if (!this.state.preventFocus) { this.props.onFocus(event); this.setOpen(true); } }, cancelFocusInput: function cancelFocusInput() { clearTimeout(this.inputFocusTimeout); this.inputFocusTimeout = null; }, deferFocusInput: function deferFocusInput() { var _this = this; this.cancelFocusInput(); this.inputFocusTimeout = (0, _defer2.default)(function () { return _this.setFocus(); }); }, handleDropdownFocus: function handleDropdownFocus() { this.cancelFocusInput(); }, handleBlur: function handleBlur(event) { if (this.state.open) { this.deferFocusInput(); } else { this.props.onBlur(event); } }, handleCalendarClickOutside: function handleCalendarClickOutside(event) { this.setOpen(false); if (this.props.withPortal) { event.preventDefault(); } }, handleSelect: function handleSelect(date, event) { var _this2 = this; // Preventing onFocus event to fix issue // https://github.com/Hacker0x01/react-datepicker/issues/628 this.setState({ preventFocus: true }, function () { _this2.preventFocusTimeout = setTimeout(function () { return _this2.setState({ preventFocus: false }); }, 50); return _this2.preventFocusTimeout; }); this.setSelected(date, event); this.setOpen(false); }, setSelected: function setSelected(date, event) { var changedDate = date; if (changedDate !== null && (0, _date_utils.isDayDisabled)(changedDate, this.props)) { return; } if (!(0, _date_utils.isSameDay)(this.props.selected, changedDate)) { if (changedDate !== null) { if (this.props.selected) { changedDate = (0, _moment2.default)(changedDate).set({ hour: this.props.selected.hour(), minute: this.props.selected.minute(), second: this.props.selected.second() }); } this.setState({ preSelection: changedDate }); } this.props.onChange(changedDate, event); } }, setPreSelection: function setPreSelection(date) { var isDateRangePresent = typeof this.props.minDate !== 'undefined' && typeof this.props.maxDate !== 'undefined'; var isValidDateSelection = isDateRangePresent ? (0, _date_utils.isDayInRange)(date, this.props.minDate, this.props.maxDate) : true; if (isValidDateSelection) { this.setState({ preSelection: date }); } }, onInputClick: function onInputClick() { if (!this.props.disabled) { this.setOpen(true); } }, onInputKeyDown: function onInputKeyDown(event) { if (!this.state.open && !this.props.inline) { if (/^Arrow/.test(event.key)) { this.onInputClick(); } return; } var copy = (0, _moment2.default)(this.state.preSelection); if (event.key === 'Enter') { event.preventDefault(); this.handleSelect(copy, event); } else if (event.key === 'Escape') { event.preventDefault(); this.setOpen(false); } else if (event.key === 'Tab') { this.setOpen(false); } if (!this.props.disabledKeyboardNavigation) { var newSelection = void 0; switch (event.key) { case 'ArrowLeft': event.preventDefault(); newSelection = copy.subtract(1, 'days'); break; case 'ArrowRight': event.preventDefault(); newSelection = copy.add(1, 'days'); break; case 'ArrowUp': event.preventDefault(); newSelection = copy.subtract(1, 'weeks'); break; case 'ArrowDown': event.preventDefault(); newSelection = copy.add(1, 'weeks'); break; case 'PageUp': event.preventDefault(); newSelection = copy.subtract(1, 'months'); break; case 'PageDown': event.preventDefault(); newSelection = copy.add(1, 'months'); break; case 'Home': event.preventDefault(); newSelection = copy.subtract(1, 'years'); break; case 'End': event.preventDefault(); newSelection = copy.add(1, 'years'); break; } this.setPreSelection(newSelection); } }, onClearClick: function onClearClick(event) { event.preventDefault(); this.props.onChange(null, event); }, renderCalendar: function renderCalendar() { if (!this.props.inline && (!this.state.open || this.props.disabled)) { return null; } return _react2.default.createElement( WrappedCalendar, { ref: 'calendar', locale: this.props.locale, dateFormat: this.props.dateFormatCalendar, dropdownMode: this.props.dropdownMode, selected: this.props.selected, preSelection: this.state.preSelection, onSelect: this.handleSelect, openToDate: this.props.openToDate, minDate: this.props.minDate, maxDate: this.props.maxDate, selectsStart: this.props.selectsStart, selectsEnd: this.props.selectsEnd, startDate: this.props.startDate, endDate: this.props.endDate, excludeDates: this.props.excludeDates, filterDate: this.props.filterDate, onClickOutside: this.handleCalendarClickOutside, highlightDates: this.props.highlightDates, includeDates: this.props.includeDates, peekNextMonth: this.props.peekNextMonth, showMonthDropdown: this.props.showMonthDropdown, showWeekNumbers: this.props.showWeekNumbers, showYearDropdown: this.props.showYearDropdown, forceShowMonthNavigation: this.props.forceShowMonthNavigation, scrollableYearDropdown: this.props.scrollableYearDropdown, todayButton: this.props.todayButton, utcOffset: this.props.utcOffset, outsideClickIgnoreClass: outsideClickIgnoreClass, fixedHeight: this.props.fixedHeight, monthsShown: this.props.monthsShown, onDropdownFocus: this.handleDropdownFocus, onMonthChange: this.props.onMonthChange, className: this.props.calendarClassName }, this.props.children ); }, renderDateInput: function renderDateInput() { var className = (0, _classnames3.default)(this.props.className, _defineProperty({}, outsideClickIgnoreClass, this.state.open)); return _react2.default.createElement(_date_input2.default, { ref: 'input', id: this.props.id, name: this.props.name, autoFocus: this.props.autoFocus, date: this.props.selected, locale: this.props.locale, minDate: this.props.minDate, maxDate: this.props.maxDate, excludeDates: this.props.excludeDates, includeDates: this.props.includeDates, filterDate: this.props.filterDate, dateFormat: this.props.dateFormat, onFocus: this.handleFocus, onBlur: this.handleBlur, onClick: this.onInputClick, onChangeRaw: this.props.onChangeRaw, onKeyDown: this.onInputKeyDown, onChangeDate: this.setSelected, placeholder: this.props.placeholderText, disabled: this.props.disabled, autoComplete: this.props.autoComplete, className: className, title: this.props.title, readOnly: this.props.readOnly, required: this.props.required, tabIndex: this.props.tabIndex, customInput: this.props.customInput }); }, renderClearButton: function renderClearButton() { if (this.props.isClearable && this.props.selected != null) { return _react2.default.createElement('a', { className: 'react-datepicker__close-icon', href: '#', onClick: this.onClearClick }); } else { return null; } }, render: function render() { var calendar = this.renderCalendar(); if (this.props.inline) { return calendar; } if (this.props.withPortal) { return _react2.default.createElement( 'div', null, _react2.default.createElement( 'div', { className: 'react-datepicker__input-container' }, this.renderDateInput(), this.renderClearButton() ), this.state.open ? _react2.default.createElement( 'div', { className: 'react-datepicker__portal' }, calendar ) : null ); } return _react2.default.createElement( _tether_component2.default, { classPrefix: 'react-datepicker__tether', attachment: this.props.popoverAttachment, targetAttachment: this.props.popoverTargetAttachment, targetOffset: this.props.popoverTargetOffset, renderElementTo: this.props.renderCalendarTo, constraints: this.props.tetherConstraints }, _react2.default.createElement( 'div', { className: 'react-datepicker__input-container' }, this.renderDateInput(), this.renderClearButton() ), calendar ); } }); module.exports = DatePicker; /***/ }, /* 342 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _date_utils = __webpack_require__(454); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _objectWithoutProperties(obj, keys) { var target = {}; for (var i in obj) { if (keys.indexOf(i) >= 0) continue; if (!Object.prototype.hasOwnProperty.call(obj, i)) continue; target[i] = obj[i]; } return target; } var DateInput = _react2.default.createClass({ displayName: 'DateInput', propTypes: { customInput: _react2.default.PropTypes.element, date: _react2.default.PropTypes.object, dateFormat: _react2.default.PropTypes.oneOfType([_react2.default.PropTypes.string, _react2.default.PropTypes.array]), disabled: _react2.default.PropTypes.bool, excludeDates: _react2.default.PropTypes.array, filterDate: _react2.default.PropTypes.func, includeDates: _react2.default.PropTypes.array, locale: _react2.default.PropTypes.string, maxDate: _react2.default.PropTypes.object, minDate: _react2.default.PropTypes.object, onBlur: _react2.default.PropTypes.func, onChange: _react2.default.PropTypes.func, onChangeRaw: _react2.default.PropTypes.func, onChangeDate: _react2.default.PropTypes.func }, getDefaultProps: function getDefaultProps() { return { dateFormat: 'L' }; }, getInitialState: function getInitialState() { return { value: this.safeDateFormat(this.props) }; }, componentWillReceiveProps: function componentWillReceiveProps(newProps) { if (!(0, _date_utils.isSameDay)(newProps.date, this.props.date) || !(0, _date_utils.isSameUtcOffset)(newProps.date, this.props.date) || newProps.locale !== this.props.locale || newProps.dateFormat !== this.props.dateFormat) { this.setState({ value: this.safeDateFormat(newProps) }); } }, handleChange: function handleChange(event) { if (this.props.onChange) { this.props.onChange(event); } if (this.props.onChangeRaw) { this.props.onChangeRaw(event); } if (!event.defaultPrevented) { this.handleChangeDate(event.target.value); } }, handleChangeDate: function handleChangeDate(value) { if (this.props.onChangeDate) { var date = (0, _moment2.default)(value.trim(), this.props.dateFormat, this.props.locale || _moment2.default.locale(), true); if (date.isValid() && !(0, _date_utils.isDayDisabled)(date, this.props)) { this.props.onChangeDate(date); } else if (value === '') { this.props.onChangeDate(null); } } this.setState({ value: value }); }, safeDateFormat: function safeDateFormat(props) { return props.date && props.date.clone().locale(props.locale || _moment2.default.locale()).format(Array.isArray(props.dateFormat) ? props.dateFormat[0] : props.dateFormat) || ''; }, handleBlur: function handleBlur(event) { this.setState({ value: this.safeDateFormat(this.props) }); if (this.props.onBlur) { this.props.onBlur(event); } }, focus: function focus() { this.refs.input.focus(); }, render: function render() { var _props = this.props, customInput = _props.customInput, date = _props.date, locale = _props.locale, minDate = _props.minDate, maxDate = _props.maxDate, excludeDates = _props.excludeDates, includeDates = _props.includeDates, filterDate = _props.filterDate, dateFormat = _props.dateFormat, onChangeDate = _props.onChangeDate, onChangeRaw = _props.onChangeRaw, rest = _objectWithoutProperties(_props, ['customInput', 'date', 'locale', 'minDate', 'maxDate', 'excludeDates', 'includeDates', 'filterDate', 'dateFormat', 'onChangeDate', 'onChangeRaw']); // eslint-disable-line no-unused-vars if (customInput) { return _react2.default.cloneElement(customInput, _extends({}, rest, { ref: 'input', value: this.state.value, onBlur: this.handleBlur, onChange: this.handleChange })); } else { return _react2.default.createElement('input', _extends({ ref: 'input', type: 'text' }, rest, { value: this.state.value, onBlur: this.handleBlur, onChange: this.handleChange })); } } }); module.exports = DateInput; /***/ }, /* 343 */ /***/ function(module, exports, __webpack_require__) { /* WEBPACK VAR INJECTION */(function(module) {//! moment.js //! version : 2.17.1 //! authors : Tim Wood, Iskren Chernev, Moment.js contributors //! license : MIT //! momentjs.com ;(function (global, factory) { true ? module.exports = factory() : typeof define === 'function' && define.amd ? define(factory) : global.moment = factory() }(this, (function () { 'use strict'; var hookCallback; function hooks () { return hookCallback.apply(null, arguments); } // This is done to register the method called with moment() // without creating circular dependencies. function setHookCallback (callback) { hookCallback = callback; } function isArray(input) { return input instanceof Array || Object.prototype.toString.call(input) === '[object Array]'; } function isObject(input) { // IE8 will treat undefined and null as object if it wasn't for // input != null return input != null && Object.prototype.toString.call(input) === '[object Object]'; } function isObjectEmpty(obj) { var k; for (k in obj) { // even if its not own property I'd still call it non-empty return false; } return true; } function isNumber(input) { return typeof input === 'number' || Object.prototype.toString.call(input) === '[object Number]'; } function isDate(input) { return input instanceof Date || Object.prototype.toString.call(input) === '[object Date]'; } function map(arr, fn) { var res = [], i; for (i = 0; i < arr.length; ++i) { res.push(fn(arr[i], i)); } return res; } function hasOwnProp(a, b) { return Object.prototype.hasOwnProperty.call(a, b); } function extend(a, b) { for (var i in b) { if (hasOwnProp(b, i)) { a[i] = b[i]; } } if (hasOwnProp(b, 'toString')) { a.toString = b.toString; } if (hasOwnProp(b, 'valueOf')) { a.valueOf = b.valueOf; } return a; } function createUTC (input, format, locale, strict) { return createLocalOrUTC(input, format, locale, strict, true).utc(); } function defaultParsingFlags() { // We need to deep clone this object. return { empty : false, unusedTokens : [], unusedInput : [], overflow : -2, charsLeftOver : 0, nullInput : false, invalidMonth : null, invalidFormat : false, userInvalidated : false, iso : false, parsedDateParts : [], meridiem : null }; } function getParsingFlags(m) { if (m._pf == null) { m._pf = defaultParsingFlags(); } return m._pf; } var some; if (Array.prototype.some) { some = Array.prototype.some; } else { some = function (fun) { var t = Object(this); var len = t.length >>> 0; for (var i = 0; i < len; i++) { if (i in t && fun.call(this, t[i], i, t)) { return true; } } return false; }; } var some$1 = some; function isValid(m) { if (m._isValid == null) { var flags = getParsingFlags(m); var parsedParts = some$1.call(flags.parsedDateParts, function (i) { return i != null; }); var isNowValid = !isNaN(m._d.getTime()) && flags.overflow < 0 && !flags.empty && !flags.invalidMonth && !flags.invalidWeekday && !flags.nullInput && !flags.invalidFormat && !flags.userInvalidated && (!flags.meridiem || (flags.meridiem && parsedParts)); if (m._strict) { isNowValid = isNowValid && flags.charsLeftOver === 0 && flags.unusedTokens.length === 0 && flags.bigHour === undefined; } if (Object.isFrozen == null || !Object.isFrozen(m)) { m._isValid = isNowValid; } else { return isNowValid; } } return m._isValid; } function createInvalid (flags) { var m = createUTC(NaN); if (flags != null) { extend(getParsingFlags(m), flags); } else { getParsingFlags(m).userInvalidated = true; } return m; } function isUndefined(input) { return input === void 0; } // Plugins that add properties should also add the key here (null value), // so we can properly clone ourselves. var momentProperties = hooks.momentProperties = []; function copyConfig(to, from) { var i, prop, val; if (!isUndefined(from._isAMomentObject)) { to._isAMomentObject = from._isAMomentObject; } if (!isUndefined(from._i)) { to._i = from._i; } if (!isUndefined(from._f)) { to._f = from._f; } if (!isUndefined(from._l)) { to._l = from._l; } if (!isUndefined(from._strict)) { to._strict = from._strict; } if (!isUndefined(from._tzm)) { to._tzm = from._tzm; } if (!isUndefined(from._isUTC)) { to._isUTC = from._isUTC; } if (!isUndefined(from._offset)) { to._offset = from._offset; } if (!isUndefined(from._pf)) { to._pf = getParsingFlags(from); } if (!isUndefined(from._locale)) { to._locale = from._locale; } if (momentProperties.length > 0) { for (i in momentProperties) { prop = momentProperties[i]; val = from[prop]; if (!isUndefined(val)) { to[prop] = val; } } } return to; } var updateInProgress = false; // Moment prototype object function Moment(config) { copyConfig(this, config); this._d = new Date(config._d != null ? config._d.getTime() : NaN); if (!this.isValid()) { this._d = new Date(NaN); } // Prevent infinite loop in case updateOffset creates new moment // objects. if (updateInProgress === false) { updateInProgress = true; hooks.updateOffset(this); updateInProgress = false; } } function isMoment (obj) { return obj instanceof Moment || (obj != null && obj._isAMomentObject != null); } function absFloor (number) { if (number < 0) { // -0 -> 0 return Math.ceil(number) || 0; } else { return Math.floor(number); } } function toInt(argumentForCoercion) { var coercedNumber = +argumentForCoercion, value = 0; if (coercedNumber !== 0 && isFinite(coercedNumber)) { value = absFloor(coercedNumber); } return value; } // compare two arrays, return the number of differences function compareArrays(array1, array2, dontConvert) { var len = Math.min(array1.length, array2.length), lengthDiff = Math.abs(array1.length - array2.length), diffs = 0, i; for (i = 0; i < len; i++) { if ((dontConvert && array1[i] !== array2[i]) || (!dontConvert && toInt(array1[i]) !== toInt(array2[i]))) { diffs++; } } return diffs + lengthDiff; } function warn(msg) { if (hooks.suppressDeprecationWarnings === false && (typeof console !== 'undefined') && console.warn) { console.warn('Deprecation warning: ' + msg); } } function deprecate(msg, fn) { var firstTime = true; return extend(function () { if (hooks.deprecationHandler != null) { hooks.deprecationHandler(null, msg); } if (firstTime) { var args = []; var arg; for (var i = 0; i < arguments.length; i++) { arg = ''; if (typeof arguments[i] === 'object') { arg += '\n[' + i + '] '; for (var key in arguments[0]) { arg += key + ': ' + arguments[0][key] + ', '; } arg = arg.slice(0, -2); // Remove trailing comma and space } else { arg = arguments[i]; } args.push(arg); } warn(msg + '\nArguments: ' + Array.prototype.slice.call(args).join('') + '\n' + (new Error()).stack); firstTime = false; } return fn.apply(this, arguments); }, fn); } var deprecations = {}; function deprecateSimple(name, msg) { if (hooks.deprecationHandler != null) { hooks.deprecationHandler(name, msg); } if (!deprecations[name]) { warn(msg); deprecations[name] = true; } } hooks.suppressDeprecationWarnings = false; hooks.deprecationHandler = null; function isFunction(input) { return input instanceof Function || Object.prototype.toString.call(input) === '[object Function]'; } function set (config) { var prop, i; for (i in config) { prop = config[i]; if (isFunction(prop)) { this[i] = prop; } else { this['_' + i] = prop; } } this._config = config; // Lenient ordinal parsing accepts just a number in addition to // number + (possibly) stuff coming from _ordinalParseLenient. this._ordinalParseLenient = new RegExp(this._ordinalParse.source + '|' + (/\d{1,2}/).source); } function mergeConfigs(parentConfig, childConfig) { var res = extend({}, parentConfig), prop; for (prop in childConfig) { if (hasOwnProp(childConfig, prop)) { if (isObject(parentConfig[prop]) && isObject(childConfig[prop])) { res[prop] = {}; extend(res[prop], parentConfig[prop]); extend(res[prop], childConfig[prop]); } else if (childConfig[prop] != null) { res[prop] = childConfig[prop]; } else { delete res[prop]; } } } for (prop in parentConfig) { if (hasOwnProp(parentConfig, prop) && !hasOwnProp(childConfig, prop) && isObject(parentConfig[prop])) { // make sure changes to properties don't modify parent config res[prop] = extend({}, res[prop]); } } return res; } function Locale(config) { if (config != null) { this.set(config); } } var keys; if (Object.keys) { keys = Object.keys; } else { keys = function (obj) { var i, res = []; for (i in obj) { if (hasOwnProp(obj, i)) { res.push(i); } } return res; }; } var keys$1 = keys; var defaultCalendar = { sameDay : '[Today at] LT', nextDay : '[Tomorrow at] LT', nextWeek : 'dddd [at] LT', lastDay : '[Yesterday at] LT', lastWeek : '[Last] dddd [at] LT', sameElse : 'L' }; function calendar (key, mom, now) { var output = this._calendar[key] || this._calendar['sameElse']; return isFunction(output) ? output.call(mom, now) : output; } var defaultLongDateFormat = { LTS : 'h:mm:ss A', LT : 'h:mm A', L : 'MM/DD/YYYY', LL : 'MMMM D, YYYY', LLL : 'MMMM D, YYYY h:mm A', LLLL : 'dddd, MMMM D, YYYY h:mm A' }; function longDateFormat (key) { var format = this._longDateFormat[key], formatUpper = this._longDateFormat[key.toUpperCase()]; if (format || !formatUpper) { return format; } this._longDateFormat[key] = formatUpper.replace(/MMMM|MM|DD|dddd/g, function (val) { return val.slice(1); }); return this._longDateFormat[key]; } var defaultInvalidDate = 'Invalid date'; function invalidDate () { return this._invalidDate; } var defaultOrdinal = '%d'; var defaultOrdinalParse = /\d{1,2}/; function ordinal (number) { return this._ordinal.replace('%d', number); } var defaultRelativeTime = { future : 'in %s', past : '%s ago', s : 'a few seconds', m : 'a minute', mm : '%d minutes', h : 'an hour', hh : '%d hours', d : 'a day', dd : '%d days', M : 'a month', MM : '%d months', y : 'a year', yy : '%d years' }; function relativeTime (number, withoutSuffix, string, isFuture) { var output = this._relativeTime[string]; return (isFunction(output)) ? output(number, withoutSuffix, string, isFuture) : output.replace(/%d/i, number); } function pastFuture (diff, output) { var format = this._relativeTime[diff > 0 ? 'future' : 'past']; return isFunction(format) ? format(output) : format.replace(/%s/i, output); } var aliases = {}; function addUnitAlias (unit, shorthand) { var lowerCase = unit.toLowerCase(); aliases[lowerCase] = aliases[lowerCase + 's'] = aliases[shorthand] = unit; } function normalizeUnits(units) { return typeof units === 'string' ? aliases[units] || aliases[units.toLowerCase()] : undefined; } function normalizeObjectUnits(inputObject) { var normalizedInput = {}, normalizedProp, prop; for (prop in inputObject) { if (hasOwnProp(inputObject, prop)) { normalizedProp = normalizeUnits(prop); if (normalizedProp) { normalizedInput[normalizedProp] = inputObject[prop]; } } } return normalizedInput; } var priorities = {}; function addUnitPriority(unit, priority) { priorities[unit] = priority; } function getPrioritizedUnits(unitsObj) { var units = []; for (var u in unitsObj) { units.push({unit: u, priority: priorities[u]}); } units.sort(function (a, b) { return a.priority - b.priority; }); return units; } function makeGetSet (unit, keepTime) { return function (value) { if (value != null) { set$1(this, unit, value); hooks.updateOffset(this, keepTime); return this; } else { return get(this, unit); } }; } function get (mom, unit) { return mom.isValid() ? mom._d['get' + (mom._isUTC ? 'UTC' : '') + unit]() : NaN; } function set$1 (mom, unit, value) { if (mom.isValid()) { mom._d['set' + (mom._isUTC ? 'UTC' : '') + unit](value); } } // MOMENTS function stringGet (units) { units = normalizeUnits(units); if (isFunction(this[units])) { return this[units](); } return this; } function stringSet (units, value) { if (typeof units === 'object') { units = normalizeObjectUnits(units); var prioritized = getPrioritizedUnits(units); for (var i = 0; i < prioritized.length; i++) { this[prioritized[i].unit](units[prioritized[i].unit]); } } else { units = normalizeUnits(units); if (isFunction(this[units])) { return this[units](value); } } return this; } function zeroFill(number, targetLength, forceSign) { var absNumber = '' + Math.abs(number), zerosToFill = targetLength - absNumber.length, sign = number >= 0; return (sign ? (forceSign ? '+' : '') : '-') + Math.pow(10, Math.max(0, zerosToFill)).toString().substr(1) + absNumber; } var formattingTokens = /(\[[^\[]*\])|(\\)?([Hh]mm(ss)?|Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|Qo?|YYYYYY|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|kk?|mm?|ss?|S{1,9}|x|X|zz?|ZZ?|.)/g; var localFormattingTokens = /(\[[^\[]*\])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g; var formatFunctions = {}; var formatTokenFunctions = {}; // token: 'M' // padded: ['MM', 2] // ordinal: 'Mo' // callback: function () { this.month() + 1 } function addFormatToken (token, padded, ordinal, callback) { var func = callback; if (typeof callback === 'string') { func = function () { return this[callback](); }; } if (token) { formatTokenFunctions[token] = func; } if (padded) { formatTokenFunctions[padded[0]] = function () { return zeroFill(func.apply(this, arguments), padded[1], padded[2]); }; } if (ordinal) { formatTokenFunctions[ordinal] = function () { return this.localeData().ordinal(func.apply(this, arguments), token); }; } } function removeFormattingTokens(input) { if (input.match(/\[[\s\S]/)) { return input.replace(/^\[|\]$/g, ''); } return input.replace(/\\/g, ''); } function makeFormatFunction(format) { var array = format.match(formattingTokens), i, length; for (i = 0, length = array.length; i < length; i++) { if (formatTokenFunctions[array[i]]) { array[i] = formatTokenFunctions[array[i]]; } else { array[i] = removeFormattingTokens(array[i]); } } return function (mom) { var output = '', i; for (i = 0; i < length; i++) { output += array[i] instanceof Function ? array[i].call(mom, format) : array[i]; } return output; }; } // format date using native date object function formatMoment(m, format) { if (!m.isValid()) { return m.localeData().invalidDate(); } format = expandFormat(format, m.localeData()); formatFunctions[format] = formatFunctions[format] || makeFormatFunction(format); return formatFunctions[format](m); } function expandFormat(format, locale) { var i = 5; function replaceLongDateFormatTokens(input) { return locale.longDateFormat(input) || input; } localFormattingTokens.lastIndex = 0; while (i >= 0 && localFormattingTokens.test(format)) { format = format.replace(localFormattingTokens, replaceLongDateFormatTokens); localFormattingTokens.lastIndex = 0; i -= 1; } return format; } var match1 = /\d/; // 0 - 9 var match2 = /\d\d/; // 00 - 99 var match3 = /\d{3}/; // 000 - 999 var match4 = /\d{4}/; // 0000 - 9999 var match6 = /[+-]?\d{6}/; // -999999 - 999999 var match1to2 = /\d\d?/; // 0 - 99 var match3to4 = /\d\d\d\d?/; // 999 - 9999 var match5to6 = /\d\d\d\d\d\d?/; // 99999 - 999999 var match1to3 = /\d{1,3}/; // 0 - 999 var match1to4 = /\d{1,4}/; // 0 - 9999 var match1to6 = /[+-]?\d{1,6}/; // -999999 - 999999 var matchUnsigned = /\d+/; // 0 - inf var matchSigned = /[+-]?\d+/; // -inf - inf var matchOffset = /Z|[+-]\d\d:?\d\d/gi; // +00:00 -00:00 +0000 -0000 or Z var matchShortOffset = /Z|[+-]\d\d(?::?\d\d)?/gi; // +00 -00 +00:00 -00:00 +0000 -0000 or Z var matchTimestamp = /[+-]?\d+(\.\d{1,3})?/; // 123456789 123456789.123 // any word (or two) characters or numbers including two/three word month in arabic. // includes scottish gaelic two word and hyphenated months var matchWord = /[0-9]*['a-z\u00A0-\u05FF\u0700-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+|[\u0600-\u06FF\/]+(\s*?[\u0600-\u06FF]+){1,2}/i; var regexes = {}; function addRegexToken (token, regex, strictRegex) { regexes[token] = isFunction(regex) ? regex : function (isStrict, localeData) { return (isStrict && strictRegex) ? strictRegex : regex; }; } function getParseRegexForToken (token, config) { if (!hasOwnProp(regexes, token)) { return new RegExp(unescapeFormat(token)); } return regexes[token](config._strict, config._locale); } // Code from http://stackoverflow.com/questions/3561493/is-there-a-regexp-escape-function-in-javascript function unescapeFormat(s) { return regexEscape(s.replace('\\', '').replace(/\\(\[)|\\(\])|\[([^\]\[]*)\]|\\(.)/g, function (matched, p1, p2, p3, p4) { return p1 || p2 || p3 || p4; })); } function regexEscape(s) { return s.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&'); } var tokens = {}; function addParseToken (token, callback) { var i, func = callback; if (typeof token === 'string') { token = [token]; } if (isNumber(callback)) { func = function (input, array) { array[callback] = toInt(input); }; } for (i = 0; i < token.length; i++) { tokens[token[i]] = func; } } function addWeekParseToken (token, callback) { addParseToken(token, function (input, array, config, token) { config._w = config._w || {}; callback(input, config._w, config, token); }); } function addTimeToArrayFromToken(token, input, config) { if (input != null && hasOwnProp(tokens, token)) { tokens[token](input, config._a, config, token); } } var YEAR = 0; var MONTH = 1; var DATE = 2; var HOUR = 3; var MINUTE = 4; var SECOND = 5; var MILLISECOND = 6; var WEEK = 7; var WEEKDAY = 8; var indexOf; if (Array.prototype.indexOf) { indexOf = Array.prototype.indexOf; } else { indexOf = function (o) { // I know var i; for (i = 0; i < this.length; ++i) { if (this[i] === o) { return i; } } return -1; }; } var indexOf$1 = indexOf; function daysInMonth(year, month) { return new Date(Date.UTC(year, month + 1, 0)).getUTCDate(); } // FORMATTING addFormatToken('M', ['MM', 2], 'Mo', function () { return this.month() + 1; }); addFormatToken('MMM', 0, 0, function (format) { return this.localeData().monthsShort(this, format); }); addFormatToken('MMMM', 0, 0, function (format) { return this.localeData().months(this, format); }); // ALIASES addUnitAlias('month', 'M'); // PRIORITY addUnitPriority('month', 8); // PARSING addRegexToken('M', match1to2); addRegexToken('MM', match1to2, match2); addRegexToken('MMM', function (isStrict, locale) { return locale.monthsShortRegex(isStrict); }); addRegexToken('MMMM', function (isStrict, locale) { return locale.monthsRegex(isStrict); }); addParseToken(['M', 'MM'], function (input, array) { array[MONTH] = toInt(input) - 1; }); addParseToken(['MMM', 'MMMM'], function (input, array, config, token) { var month = config._locale.monthsParse(input, token, config._strict); // if we didn't find a month name, mark the date as invalid. if (month != null) { array[MONTH] = month; } else { getParsingFlags(config).invalidMonth = input; } }); // LOCALES var MONTHS_IN_FORMAT = /D[oD]?(\[[^\[\]]*\]|\s)+MMMM?/; var defaultLocaleMonths = 'January_February_March_April_May_June_July_August_September_October_November_December'.split('_'); function localeMonths (m, format) { if (!m) { return this._months; } return isArray(this._months) ? this._months[m.month()] : this._months[(this._months.isFormat || MONTHS_IN_FORMAT).test(format) ? 'format' : 'standalone'][m.month()]; } var defaultLocaleMonthsShort = 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'.split('_'); function localeMonthsShort (m, format) { if (!m) { return this._monthsShort; } return isArray(this._monthsShort) ? this._monthsShort[m.month()] : this._monthsShort[MONTHS_IN_FORMAT.test(format) ? 'format' : 'standalone'][m.month()]; } function handleStrictParse(monthName, format, strict) { var i, ii, mom, llc = monthName.toLocaleLowerCase(); if (!this._monthsParse) { // this is not used this._monthsParse = []; this._longMonthsParse = []; this._shortMonthsParse = []; for (i = 0; i < 12; ++i) { mom = createUTC([2000, i]); this._shortMonthsParse[i] = this.monthsShort(mom, '').toLocaleLowerCase(); this._longMonthsParse[i] = this.months(mom, '').toLocaleLowerCase(); } } if (strict) { if (format === 'MMM') { ii = indexOf$1.call(this._shortMonthsParse, llc); return ii !== -1 ? ii : null; } else { ii = indexOf$1.call(this._longMonthsParse, llc); return ii !== -1 ? ii : null; } } else { if (format === 'MMM') { ii = indexOf$1.call(this._shortMonthsParse, llc); if (ii !== -1) { return ii; } ii = indexOf$1.call(this._longMonthsParse, llc); return ii !== -1 ? ii : null; } else { ii = indexOf$1.call(this._longMonthsParse, llc); if (ii !== -1) { return ii; } ii = indexOf$1.call(this._shortMonthsParse, llc); return ii !== -1 ? ii : null; } } } function localeMonthsParse (monthName, format, strict) { var i, mom, regex; if (this._monthsParseExact) { return handleStrictParse.call(this, monthName, format, strict); } if (!this._monthsParse) { this._monthsParse = []; this._longMonthsParse = []; this._shortMonthsParse = []; } // TODO: add sorting // Sorting makes sure if one month (or abbr) is a prefix of another // see sorting in computeMonthsParse for (i = 0; i < 12; i++) { // make the regex if we don't have it already mom = createUTC([2000, i]); if (strict && !this._longMonthsParse[i]) { this._longMonthsParse[i] = new RegExp('^' + this.months(mom, '').replace('.', '') + '$', 'i'); this._shortMonthsParse[i] = new RegExp('^' + this.monthsShort(mom, '').replace('.', '') + '$', 'i'); } if (!strict && !this._monthsParse[i]) { regex = '^' + this.months(mom, '') + '|^' + this.monthsShort(mom, ''); this._monthsParse[i] = new RegExp(regex.replace('.', ''), 'i'); } // test the regex if (strict && format === 'MMMM' && this._longMonthsParse[i].test(monthName)) { return i; } else if (strict && format === 'MMM' && this._shortMonthsParse[i].test(monthName)) { return i; } else if (!strict && this._monthsParse[i].test(monthName)) { return i; } } } // MOMENTS function setMonth (mom, value) { var dayOfMonth; if (!mom.isValid()) { // No op return mom; } if (typeof value === 'string') { if (/^\d+$/.test(value)) { value = toInt(value); } else { value = mom.localeData().monthsParse(value); // TODO: Another silent failure? if (!isNumber(value)) { return mom; } } } dayOfMonth = Math.min(mom.date(), daysInMonth(mom.year(), value)); mom._d['set' + (mom._isUTC ? 'UTC' : '') + 'Month'](value, dayOfMonth); return mom; } function getSetMonth (value) { if (value != null) { setMonth(this, value); hooks.updateOffset(this, true); return this; } else { return get(this, 'Month'); } } function getDaysInMonth () { return daysInMonth(this.year(), this.month()); } var defaultMonthsShortRegex = matchWord; function monthsShortRegex (isStrict) { if (this._monthsParseExact) { if (!hasOwnProp(this, '_monthsRegex')) { computeMonthsParse.call(this); } if (isStrict) { return this._monthsShortStrictRegex; } else { return this._monthsShortRegex; } } else { if (!hasOwnProp(this, '_monthsShortRegex')) { this._monthsShortRegex = defaultMonthsShortRegex; } return this._monthsShortStrictRegex && isStrict ? this._monthsShortStrictRegex : this._monthsShortRegex; } } var defaultMonthsRegex = matchWord; function monthsRegex (isStrict) { if (this._monthsParseExact) { if (!hasOwnProp(this, '_monthsRegex')) { computeMonthsParse.call(this); } if (isStrict) { return this._monthsStrictRegex; } else { return this._monthsRegex; } } else { if (!hasOwnProp(this, '_monthsRegex')) { this._monthsRegex = defaultMonthsRegex; } return this._monthsStrictRegex && isStrict ? this._monthsStrictRegex : this._monthsRegex; } } function computeMonthsParse () { function cmpLenRev(a, b) { return b.length - a.length; } var shortPieces = [], longPieces = [], mixedPieces = [], i, mom; for (i = 0; i < 12; i++) { // make the regex if we don't have it already mom = createUTC([2000, i]); shortPieces.push(this.monthsShort(mom, '')); longPieces.push(this.months(mom, '')); mixedPieces.push(this.months(mom, '')); mixedPieces.push(this.monthsShort(mom, '')); } // Sorting makes sure if one month (or abbr) is a prefix of another it // will match the longer piece. shortPieces.sort(cmpLenRev); longPieces.sort(cmpLenRev); mixedPieces.sort(cmpLenRev); for (i = 0; i < 12; i++) { shortPieces[i] = regexEscape(shortPieces[i]); longPieces[i] = regexEscape(longPieces[i]); } for (i = 0; i < 24; i++) { mixedPieces[i] = regexEscape(mixedPieces[i]); } this._monthsRegex = new RegExp('^(' + mixedPieces.join('|') + ')', 'i'); this._monthsShortRegex = this._monthsRegex; this._monthsStrictRegex = new RegExp('^(' + longPieces.join('|') + ')', 'i'); this._monthsShortStrictRegex = new RegExp('^(' + shortPieces.join('|') + ')', 'i'); } // FORMATTING addFormatToken('Y', 0, 0, function () { var y = this.year(); return y <= 9999 ? '' + y : '+' + y; }); addFormatToken(0, ['YY', 2], 0, function () { return this.year() % 100; }); addFormatToken(0, ['YYYY', 4], 0, 'year'); addFormatToken(0, ['YYYYY', 5], 0, 'year'); addFormatToken(0, ['YYYYYY', 6, true], 0, 'year'); // ALIASES addUnitAlias('year', 'y'); // PRIORITIES addUnitPriority('year', 1); // PARSING addRegexToken('Y', matchSigned); addRegexToken('YY', match1to2, match2); addRegexToken('YYYY', match1to4, match4); addRegexToken('YYYYY', match1to6, match6); addRegexToken('YYYYYY', match1to6, match6); addParseToken(['YYYYY', 'YYYYYY'], YEAR); addParseToken('YYYY', function (input, array) { array[YEAR] = input.length === 2 ? hooks.parseTwoDigitYear(input) : toInt(input); }); addParseToken('YY', function (input, array) { array[YEAR] = hooks.parseTwoDigitYear(input); }); addParseToken('Y', function (input, array) { array[YEAR] = parseInt(input, 10); }); // HELPERS function daysInYear(year) { return isLeapYear(year) ? 366 : 365; } function isLeapYear(year) { return (year % 4 === 0 && year % 100 !== 0) || year % 400 === 0; } // HOOKS hooks.parseTwoDigitYear = function (input) { return toInt(input) + (toInt(input) > 68 ? 1900 : 2000); }; // MOMENTS var getSetYear = makeGetSet('FullYear', true); function getIsLeapYear () { return isLeapYear(this.year()); } function createDate (y, m, d, h, M, s, ms) { //can't just apply() to create a date: //http://stackoverflow.com/questions/181348/instantiating-a-javascript-object-by-calling-prototype-constructor-apply var date = new Date(y, m, d, h, M, s, ms); //the date constructor remaps years 0-99 to 1900-1999 if (y < 100 && y >= 0 && isFinite(date.getFullYear())) { date.setFullYear(y); } return date; } function createUTCDate (y) { var date = new Date(Date.UTC.apply(null, arguments)); //the Date.UTC function remaps years 0-99 to 1900-1999 if (y < 100 && y >= 0 && isFinite(date.getUTCFullYear())) { date.setUTCFullYear(y); } return date; } // start-of-first-week - start-of-year function firstWeekOffset(year, dow, doy) { var // first-week day -- which january is always in the first week (4 for iso, 1 for other) fwd = 7 + dow - doy, // first-week day local weekday -- which local weekday is fwd fwdlw = (7 + createUTCDate(year, 0, fwd).getUTCDay() - dow) % 7; return -fwdlw + fwd - 1; } //http://en.wikipedia.org/wiki/ISO_week_date#Calculating_a_date_given_the_year.2C_week_number_and_weekday function dayOfYearFromWeeks(year, week, weekday, dow, doy) { var localWeekday = (7 + weekday - dow) % 7, weekOffset = firstWeekOffset(year, dow, doy), dayOfYear = 1 + 7 * (week - 1) + localWeekday + weekOffset, resYear, resDayOfYear; if (dayOfYear <= 0) { resYear = year - 1; resDayOfYear = daysInYear(resYear) + dayOfYear; } else if (dayOfYear > daysInYear(year)) { resYear = year + 1; resDayOfYear = dayOfYear - daysInYear(year); } else { resYear = year; resDayOfYear = dayOfYear; } return { year: resYear, dayOfYear: resDayOfYear }; } function weekOfYear(mom, dow, doy) { var weekOffset = firstWeekOffset(mom.year(), dow, doy), week = Math.floor((mom.dayOfYear() - weekOffset - 1) / 7) + 1, resWeek, resYear; if (week < 1) { resYear = mom.year() - 1; resWeek = week + weeksInYear(resYear, dow, doy); } else if (week > weeksInYear(mom.year(), dow, doy)) { resWeek = week - weeksInYear(mom.year(), dow, doy); resYear = mom.year() + 1; } else { resYear = mom.year(); resWeek = week; } return { week: resWeek, year: resYear }; } function weeksInYear(year, dow, doy) { var weekOffset = firstWeekOffset(year, dow, doy), weekOffsetNext = firstWeekOffset(year + 1, dow, doy); return (daysInYear(year) - weekOffset + weekOffsetNext) / 7; } // FORMATTING addFormatToken('w', ['ww', 2], 'wo', 'week'); addFormatToken('W', ['WW', 2], 'Wo', 'isoWeek'); // ALIASES addUnitAlias('week', 'w'); addUnitAlias('isoWeek', 'W'); // PRIORITIES addUnitPriority('week', 5); addUnitPriority('isoWeek', 5); // PARSING addRegexToken('w', match1to2); addRegexToken('ww', match1to2, match2); addRegexToken('W', match1to2); addRegexToken('WW', match1to2, match2); addWeekParseToken(['w', 'ww', 'W', 'WW'], function (input, week, config, token) { week[token.substr(0, 1)] = toInt(input); }); // HELPERS // LOCALES function localeWeek (mom) { return weekOfYear(mom, this._week.dow, this._week.doy).week; } var defaultLocaleWeek = { dow : 0, // Sunday is the first day of the week. doy : 6 // The week that contains Jan 1st is the first week of the year. }; function localeFirstDayOfWeek () { return this._week.dow; } function localeFirstDayOfYear () { return this._week.doy; } // MOMENTS function getSetWeek (input) { var week = this.localeData().week(this); return input == null ? week : this.add((input - week) * 7, 'd'); } function getSetISOWeek (input) { var week = weekOfYear(this, 1, 4).week; return input == null ? week : this.add((input - week) * 7, 'd'); } // FORMATTING addFormatToken('d', 0, 'do', 'day'); addFormatToken('dd', 0, 0, function (format) { return this.localeData().weekdaysMin(this, format); }); addFormatToken('ddd', 0, 0, function (format) { return this.localeData().weekdaysShort(this, format); }); addFormatToken('dddd', 0, 0, function (format) { return this.localeData().weekdays(this, format); }); addFormatToken('e', 0, 0, 'weekday'); addFormatToken('E', 0, 0, 'isoWeekday'); // ALIASES addUnitAlias('day', 'd'); addUnitAlias('weekday', 'e'); addUnitAlias('isoWeekday', 'E'); // PRIORITY addUnitPriority('day', 11); addUnitPriority('weekday', 11); addUnitPriority('isoWeekday', 11); // PARSING addRegexToken('d', match1to2); addRegexToken('e', match1to2); addRegexToken('E', match1to2); addRegexToken('dd', function (isStrict, locale) { return locale.weekdaysMinRegex(isStrict); }); addRegexToken('ddd', function (isStrict, locale) { return locale.weekdaysShortRegex(isStrict); }); addRegexToken('dddd', function (isStrict, locale) { return locale.weekdaysRegex(isStrict); }); addWeekParseToken(['dd', 'ddd', 'dddd'], function (input, week, config, token) { var weekday = config._locale.weekdaysParse(input, token, config._strict); // if we didn't get a weekday name, mark the date as invalid if (weekday != null) { week.d = weekday; } else { getParsingFlags(config).invalidWeekday = input; } }); addWeekParseToken(['d', 'e', 'E'], function (input, week, config, token) { week[token] = toInt(input); }); // HELPERS function parseWeekday(input, locale) { if (typeof input !== 'string') { return input; } if (!isNaN(input)) { return parseInt(input, 10); } input = locale.weekdaysParse(input); if (typeof input === 'number') { return input; } return null; } function parseIsoWeekday(input, locale) { if (typeof input === 'string') { return locale.weekdaysParse(input) % 7 || 7; } return isNaN(input) ? null : input; } // LOCALES var defaultLocaleWeekdays = 'Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday'.split('_'); function localeWeekdays (m, format) { if (!m) { return this._weekdays; } return isArray(this._weekdays) ? this._weekdays[m.day()] : this._weekdays[this._weekdays.isFormat.test(format) ? 'format' : 'standalone'][m.day()]; } var defaultLocaleWeekdaysShort = 'Sun_Mon_Tue_Wed_Thu_Fri_Sat'.split('_'); function localeWeekdaysShort (m) { return (m) ? this._weekdaysShort[m.day()] : this._weekdaysShort; } var defaultLocaleWeekdaysMin = 'Su_Mo_Tu_We_Th_Fr_Sa'.split('_'); function localeWeekdaysMin (m) { return (m) ? this._weekdaysMin[m.day()] : this._weekdaysMin; } function handleStrictParse$1(weekdayName, format, strict) { var i, ii, mom, llc = weekdayName.toLocaleLowerCase(); if (!this._weekdaysParse) { this._weekdaysParse = []; this._shortWeekdaysParse = []; this._minWeekdaysParse = []; for (i = 0; i < 7; ++i) { mom = createUTC([2000, 1]).day(i); this._minWeekdaysParse[i] = this.weekdaysMin(mom, '').toLocaleLowerCase(); this._shortWeekdaysParse[i] = this.weekdaysShort(mom, '').toLocaleLowerCase(); this._weekdaysParse[i] = this.weekdays(mom, '').toLocaleLowerCase(); } } if (strict) { if (format === 'dddd') { ii = indexOf$1.call(this._weekdaysParse, llc); return ii !== -1 ? ii : null; } else if (format === 'ddd') { ii = indexOf$1.call(this._shortWeekdaysParse, llc); return ii !== -1 ? ii : null; } else { ii = indexOf$1.call(this._minWeekdaysParse, llc); return ii !== -1 ? ii : null; } } else { if (format === 'dddd') { ii = indexOf$1.call(this._weekdaysParse, llc); if (ii !== -1) { return ii; } ii = indexOf$1.call(this._shortWeekdaysParse, llc); if (ii !== -1) { return ii; } ii = indexOf$1.call(this._minWeekdaysParse, llc); return ii !== -1 ? ii : null; } else if (format === 'ddd') { ii = indexOf$1.call(this._shortWeekdaysParse, llc); if (ii !== -1) { return ii; } ii = indexOf$1.call(this._weekdaysParse, llc); if (ii !== -1) { return ii; } ii = indexOf$1.call(this._minWeekdaysParse, llc); return ii !== -1 ? ii : null; } else { ii = indexOf$1.call(this._minWeekdaysParse, llc); if (ii !== -1) { return ii; } ii = indexOf$1.call(this._weekdaysParse, llc); if (ii !== -1) { return ii; } ii = indexOf$1.call(this._shortWeekdaysParse, llc); return ii !== -1 ? ii : null; } } } function localeWeekdaysParse (weekdayName, format, strict) { var i, mom, regex; if (this._weekdaysParseExact) { return handleStrictParse$1.call(this, weekdayName, format, strict); } if (!this._weekdaysParse) { this._weekdaysParse = []; this._minWeekdaysParse = []; this._shortWeekdaysParse = []; this._fullWeekdaysParse = []; } for (i = 0; i < 7; i++) { // make the regex if we don't have it already mom = createUTC([2000, 1]).day(i); if (strict && !this._fullWeekdaysParse[i]) { this._fullWeekdaysParse[i] = new RegExp('^' + this.weekdays(mom, '').replace('.', '\.?') + '$', 'i'); this._shortWeekdaysParse[i] = new RegExp('^' + this.weekdaysShort(mom, '').replace('.', '\.?') + '$', 'i'); this._minWeekdaysParse[i] = new RegExp('^' + this.weekdaysMin(mom, '').replace('.', '\.?') + '$', 'i'); } if (!this._weekdaysParse[i]) { regex = '^' + this.weekdays(mom, '') + '|^' + this.weekdaysShort(mom, '') + '|^' + this.weekdaysMin(mom, ''); this._weekdaysParse[i] = new RegExp(regex.replace('.', ''), 'i'); } // test the regex if (strict && format === 'dddd' && this._fullWeekdaysParse[i].test(weekdayName)) { return i; } else if (strict && format === 'ddd' && this._shortWeekdaysParse[i].test(weekdayName)) { return i; } else if (strict && format === 'dd' && this._minWeekdaysParse[i].test(weekdayName)) { return i; } else if (!strict && this._weekdaysParse[i].test(weekdayName)) { return i; } } } // MOMENTS function getSetDayOfWeek (input) { if (!this.isValid()) { return input != null ? this : NaN; } var day = this._isUTC ? this._d.getUTCDay() : this._d.getDay(); if (input != null) { input = parseWeekday(input, this.localeData()); return this.add(input - day, 'd'); } else { return day; } } function getSetLocaleDayOfWeek (input) { if (!this.isValid()) { return input != null ? this : NaN; } var weekday = (this.day() + 7 - this.localeData()._week.dow) % 7; return input == null ? weekday : this.add(input - weekday, 'd'); } function getSetISODayOfWeek (input) { if (!this.isValid()) { return input != null ? this : NaN; } // behaves the same as moment#day except // as a getter, returns 7 instead of 0 (1-7 range instead of 0-6) // as a setter, sunday should belong to the previous week. if (input != null) { var weekday = parseIsoWeekday(input, this.localeData()); return this.day(this.day() % 7 ? weekday : weekday - 7); } else { return this.day() || 7; } } var defaultWeekdaysRegex = matchWord; function weekdaysRegex (isStrict) { if (this._weekdaysParseExact) { if (!hasOwnProp(this, '_weekdaysRegex')) { computeWeekdaysParse.call(this); } if (isStrict) { return this._weekdaysStrictRegex; } else { return this._weekdaysRegex; } } else { if (!hasOwnProp(this, '_weekdaysRegex')) { this._weekdaysRegex = defaultWeekdaysRegex; } return this._weekdaysStrictRegex && isStrict ? this._weekdaysStrictRegex : this._weekdaysRegex; } } var defaultWeekdaysShortRegex = matchWord; function weekdaysShortRegex (isStrict) { if (this._weekdaysParseExact) { if (!hasOwnProp(this, '_weekdaysRegex')) { computeWeekdaysParse.call(this); } if (isStrict) { return this._weekdaysShortStrictRegex; } else { return this._weekdaysShortRegex; } } else { if (!hasOwnProp(this, '_weekdaysShortRegex')) { this._weekdaysShortRegex = defaultWeekdaysShortRegex; } return this._weekdaysShortStrictRegex && isStrict ? this._weekdaysShortStrictRegex : this._weekdaysShortRegex; } } var defaultWeekdaysMinRegex = matchWord; function weekdaysMinRegex (isStrict) { if (this._weekdaysParseExact) { if (!hasOwnProp(this, '_weekdaysRegex')) { computeWeekdaysParse.call(this); } if (isStrict) { return this._weekdaysMinStrictRegex; } else { return this._weekdaysMinRegex; } } else { if (!hasOwnProp(this, '_weekdaysMinRegex')) { this._weekdaysMinRegex = defaultWeekdaysMinRegex; } return this._weekdaysMinStrictRegex && isStrict ? this._weekdaysMinStrictRegex : this._weekdaysMinRegex; } } function computeWeekdaysParse () { function cmpLenRev(a, b) { return b.length - a.length; } var minPieces = [], shortPieces = [], longPieces = [], mixedPieces = [], i, mom, minp, shortp, longp; for (i = 0; i < 7; i++) { // make the regex if we don't have it already mom = createUTC([2000, 1]).day(i); minp = this.weekdaysMin(mom, ''); shortp = this.weekdaysShort(mom, ''); longp = this.weekdays(mom, ''); minPieces.push(minp); shortPieces.push(shortp); longPieces.push(longp); mixedPieces.push(minp); mixedPieces.push(shortp); mixedPieces.push(longp); } // Sorting makes sure if one weekday (or abbr) is a prefix of another it // will match the longer piece. minPieces.sort(cmpLenRev); shortPieces.sort(cmpLenRev); longPieces.sort(cmpLenRev); mixedPieces.sort(cmpLenRev); for (i = 0; i < 7; i++) { shortPieces[i] = regexEscape(shortPieces[i]); longPieces[i] = regexEscape(longPieces[i]); mixedPieces[i] = regexEscape(mixedPieces[i]); } this._weekdaysRegex = new RegExp('^(' + mixedPieces.join('|') + ')', 'i'); this._weekdaysShortRegex = this._weekdaysRegex; this._weekdaysMinRegex = this._weekdaysRegex; this._weekdaysStrictRegex = new RegExp('^(' + longPieces.join('|') + ')', 'i'); this._weekdaysShortStrictRegex = new RegExp('^(' + shortPieces.join('|') + ')', 'i'); this._weekdaysMinStrictRegex = new RegExp('^(' + minPieces.join('|') + ')', 'i'); } // FORMATTING function hFormat() { return this.hours() % 12 || 12; } function kFormat() { return this.hours() || 24; } addFormatToken('H', ['HH', 2], 0, 'hour'); addFormatToken('h', ['hh', 2], 0, hFormat); addFormatToken('k', ['kk', 2], 0, kFormat); addFormatToken('hmm', 0, 0, function () { return '' + hFormat.apply(this) + zeroFill(this.minutes(), 2); }); addFormatToken('hmmss', 0, 0, function () { return '' + hFormat.apply(this) + zeroFill(this.minutes(), 2) + zeroFill(this.seconds(), 2); }); addFormatToken('Hmm', 0, 0, function () { return '' + this.hours() + zeroFill(this.minutes(), 2); }); addFormatToken('Hmmss', 0, 0, function () { return '' + this.hours() + zeroFill(this.minutes(), 2) + zeroFill(this.seconds(), 2); }); function meridiem (token, lowercase) { addFormatToken(token, 0, 0, function () { return this.localeData().meridiem(this.hours(), this.minutes(), lowercase); }); } meridiem('a', true); meridiem('A', false); // ALIASES addUnitAlias('hour', 'h'); // PRIORITY addUnitPriority('hour', 13); // PARSING function matchMeridiem (isStrict, locale) { return locale._meridiemParse; } addRegexToken('a', matchMeridiem); addRegexToken('A', matchMeridiem); addRegexToken('H', match1to2); addRegexToken('h', match1to2); addRegexToken('HH', match1to2, match2); addRegexToken('hh', match1to2, match2); addRegexToken('hmm', match3to4); addRegexToken('hmmss', match5to6); addRegexToken('Hmm', match3to4); addRegexToken('Hmmss', match5to6); addParseToken(['H', 'HH'], HOUR); addParseToken(['a', 'A'], function (input, array, config) { config._isPm = config._locale.isPM(input); config._meridiem = input; }); addParseToken(['h', 'hh'], function (input, array, config) { array[HOUR] = toInt(input); getParsingFlags(config).bigHour = true; }); addParseToken('hmm', function (input, array, config) { var pos = input.length - 2; array[HOUR] = toInt(input.substr(0, pos)); array[MINUTE] = toInt(input.substr(pos)); getParsingFlags(config).bigHour = true; }); addParseToken('hmmss', function (input, array, config) { var pos1 = input.length - 4; var pos2 = input.length - 2; array[HOUR] = toInt(input.substr(0, pos1)); array[MINUTE] = toInt(input.substr(pos1, 2)); array[SECOND] = toInt(input.substr(pos2)); getParsingFlags(config).bigHour = true; }); addParseToken('Hmm', function (input, array, config) { var pos = input.length - 2; array[HOUR] = toInt(input.substr(0, pos)); array[MINUTE] = toInt(input.substr(pos)); }); addParseToken('Hmmss', function (input, array, config) { var pos1 = input.length - 4; var pos2 = input.length - 2; array[HOUR] = toInt(input.substr(0, pos1)); array[MINUTE] = toInt(input.substr(pos1, 2)); array[SECOND] = toInt(input.substr(pos2)); }); // LOCALES function localeIsPM (input) { // IE8 Quirks Mode & IE7 Standards Mode do not allow accessing strings like arrays // Using charAt should be more compatible. return ((input + '').toLowerCase().charAt(0) === 'p'); } var defaultLocaleMeridiemParse = /[ap]\.?m?\.?/i; function localeMeridiem (hours, minutes, isLower) { if (hours > 11) { return isLower ? 'pm' : 'PM'; } else { return isLower ? 'am' : 'AM'; } } // MOMENTS // Setting the hour should keep the time, because the user explicitly // specified which hour he wants. So trying to maintain the same hour (in // a new timezone) makes sense. Adding/subtracting hours does not follow // this rule. var getSetHour = makeGetSet('Hours', true); // months // week // weekdays // meridiem var baseConfig = { calendar: defaultCalendar, longDateFormat: defaultLongDateFormat, invalidDate: defaultInvalidDate, ordinal: defaultOrdinal, ordinalParse: defaultOrdinalParse, relativeTime: defaultRelativeTime, months: defaultLocaleMonths, monthsShort: defaultLocaleMonthsShort, week: defaultLocaleWeek, weekdays: defaultLocaleWeekdays, weekdaysMin: defaultLocaleWeekdaysMin, weekdaysShort: defaultLocaleWeekdaysShort, meridiemParse: defaultLocaleMeridiemParse }; // internal storage for locale config files var locales = {}; var localeFamilies = {}; var globalLocale; function normalizeLocale(key) { return key ? key.toLowerCase().replace('_', '-') : key; } // pick the locale from the array // try ['en-au', 'en-gb'] as 'en-au', 'en-gb', 'en', as in move through the list trying each // substring from most specific to least, but move to the next array item if it's a more specific variant than the current root function chooseLocale(names) { var i = 0, j, next, locale, split; while (i < names.length) { split = normalizeLocale(names[i]).split('-'); j = split.length; next = normalizeLocale(names[i + 1]); next = next ? next.split('-') : null; while (j > 0) { locale = loadLocale(split.slice(0, j).join('-')); if (locale) { return locale; } if (next && next.length >= j && compareArrays(split, next, true) >= j - 1) { //the next array item is better than a shallower substring of this one break; } j--; } i++; } return null; } function loadLocale(name) { var oldLocale = null; // TODO: Find a better way to register and load all the locales in Node if (!locales[name] && (typeof module !== 'undefined') && module && module.exports) { try { oldLocale = globalLocale._abbr; __webpack_require__(345)("./" + name); // because defineLocale currently also sets the global locale, we // want to undo that for lazy loaded locales getSetGlobalLocale(oldLocale); } catch (e) { } } return locales[name]; } // This function will load locale and then set the global locale. If // no arguments are passed in, it will simply return the current global // locale key. function getSetGlobalLocale (key, values) { var data; if (key) { if (isUndefined(values)) { data = getLocale(key); } else { data = defineLocale(key, values); } if (data) { // moment.duration._locale = moment._locale = data; globalLocale = data; } } return globalLocale._abbr; } function defineLocale (name, config) { if (config !== null) { var parentConfig = baseConfig; config.abbr = name; if (locales[name] != null) { deprecateSimple('defineLocaleOverride', 'use moment.updateLocale(localeName, config) to change ' + 'an existing locale. moment.defineLocale(localeName, ' + 'config) should only be used for creating a new locale ' + 'See http://momentjs.com/guides/#/warnings/define-locale/ for more info.'); parentConfig = locales[name]._config; } else if (config.parentLocale != null) { if (locales[config.parentLocale] != null) { parentConfig = locales[config.parentLocale]._config; } else { if (!localeFamilies[config.parentLocale]) { localeFamilies[config.parentLocale] = []; } localeFamilies[config.parentLocale].push({ name: name, config: config }); return null; } } locales[name] = new Locale(mergeConfigs(parentConfig, config)); if (localeFamilies[name]) { localeFamilies[name].forEach(function (x) { defineLocale(x.name, x.config); }); } // backwards compat for now: also set the locale // make sure we set the locale AFTER all child locales have been // created, so we won't end up with the child locale set. getSetGlobalLocale(name); return locales[name]; } else { // useful for testing delete locales[name]; return null; } } function updateLocale(name, config) { if (config != null) { var locale, parentConfig = baseConfig; // MERGE if (locales[name] != null) { parentConfig = locales[name]._config; } config = mergeConfigs(parentConfig, config); locale = new Locale(config); locale.parentLocale = locales[name]; locales[name] = locale; // backwards compat for now: also set the locale getSetGlobalLocale(name); } else { // pass null for config to unupdate, useful for tests if (locales[name] != null) { if (locales[name].parentLocale != null) { locales[name] = locales[name].parentLocale; } else if (locales[name] != null) { delete locales[name]; } } } return locales[name]; } // returns locale data function getLocale (key) { var locale; if (key && key._locale && key._locale._abbr) { key = key._locale._abbr; } if (!key) { return globalLocale; } if (!isArray(key)) { //short-circuit everything else locale = loadLocale(key); if (locale) { return locale; } key = [key]; } return chooseLocale(key); } function listLocales() { return keys$1(locales); } function checkOverflow (m) { var overflow; var a = m._a; if (a && getParsingFlags(m).overflow === -2) { overflow = a[MONTH] < 0 || a[MONTH] > 11 ? MONTH : a[DATE] < 1 || a[DATE] > daysInMonth(a[YEAR], a[MONTH]) ? DATE : a[HOUR] < 0 || a[HOUR] > 24 || (a[HOUR] === 24 && (a[MINUTE] !== 0 || a[SECOND] !== 0 || a[MILLISECOND] !== 0)) ? HOUR : a[MINUTE] < 0 || a[MINUTE] > 59 ? MINUTE : a[SECOND] < 0 || a[SECOND] > 59 ? SECOND : a[MILLISECOND] < 0 || a[MILLISECOND] > 999 ? MILLISECOND : -1; if (getParsingFlags(m)._overflowDayOfYear && (overflow < YEAR || overflow > DATE)) { overflow = DATE; } if (getParsingFlags(m)._overflowWeeks && overflow === -1) { overflow = WEEK; } if (getParsingFlags(m)._overflowWeekday && overflow === -1) { overflow = WEEKDAY; } getParsingFlags(m).overflow = overflow; } return m; } // iso 8601 regex // 0000-00-00 0000-W00 or 0000-W00-0 + T + 00 or 00:00 or 00:00:00 or 00:00:00.000 + +00:00 or +0000 or +00) var extendedIsoRegex = /^\s*((?:[+-]\d{6}|\d{4})-(?:\d\d-\d\d|W\d\d-\d|W\d\d|\d\d\d|\d\d))(?:(T| )(\d\d(?::\d\d(?::\d\d(?:[.,]\d+)?)?)?)([\+\-]\d\d(?::?\d\d)?|\s*Z)?)?$/; var basicIsoRegex = /^\s*((?:[+-]\d{6}|\d{4})(?:\d\d\d\d|W\d\d\d|W\d\d|\d\d\d|\d\d))(?:(T| )(\d\d(?:\d\d(?:\d\d(?:[.,]\d+)?)?)?)([\+\-]\d\d(?::?\d\d)?|\s*Z)?)?$/; var tzRegex = /Z|[+-]\d\d(?::?\d\d)?/; var isoDates = [ ['YYYYYY-MM-DD', /[+-]\d{6}-\d\d-\d\d/], ['YYYY-MM-DD', /\d{4}-\d\d-\d\d/], ['GGGG-[W]WW-E', /\d{4}-W\d\d-\d/], ['GGGG-[W]WW', /\d{4}-W\d\d/, false], ['YYYY-DDD', /\d{4}-\d{3}/], ['YYYY-MM', /\d{4}-\d\d/, false], ['YYYYYYMMDD', /[+-]\d{10}/], ['YYYYMMDD', /\d{8}/], // YYYYMM is NOT allowed by the standard ['GGGG[W]WWE', /\d{4}W\d{3}/], ['GGGG[W]WW', /\d{4}W\d{2}/, false], ['YYYYDDD', /\d{7}/] ]; // iso time formats and regexes var isoTimes = [ ['HH:mm:ss.SSSS', /\d\d:\d\d:\d\d\.\d+/], ['HH:mm:ss,SSSS', /\d\d:\d\d:\d\d,\d+/], ['HH:mm:ss', /\d\d:\d\d:\d\d/], ['HH:mm', /\d\d:\d\d/], ['HHmmss.SSSS', /\d\d\d\d\d\d\.\d+/], ['HHmmss,SSSS', /\d\d\d\d\d\d,\d+/], ['HHmmss', /\d\d\d\d\d\d/], ['HHmm', /\d\d\d\d/], ['HH', /\d\d/] ]; var aspNetJsonRegex = /^\/?Date\((\-?\d+)/i; // date from iso format function configFromISO(config) { var i, l, string = config._i, match = extendedIsoRegex.exec(string) || basicIsoRegex.exec(string), allowTime, dateFormat, timeFormat, tzFormat; if (match) { getParsingFlags(config).iso = true; for (i = 0, l = isoDates.length; i < l; i++) { if (isoDates[i][1].exec(match[1])) { dateFormat = isoDates[i][0]; allowTime = isoDates[i][2] !== false; break; } } if (dateFormat == null) { config._isValid = false; return; } if (match[3]) { for (i = 0, l = isoTimes.length; i < l; i++) { if (isoTimes[i][1].exec(match[3])) { // match[2] should be 'T' or space timeFormat = (match[2] || ' ') + isoTimes[i][0]; break; } } if (timeFormat == null) { config._isValid = false; return; } } if (!allowTime && timeFormat != null) { config._isValid = false; return; } if (match[4]) { if (tzRegex.exec(match[4])) { tzFormat = 'Z'; } else { config._isValid = false; return; } } config._f = dateFormat + (timeFormat || '') + (tzFormat || ''); configFromStringAndFormat(config); } else { config._isValid = false; } } // date from iso format or fallback function configFromString(config) { var matched = aspNetJsonRegex.exec(config._i); if (matched !== null) { config._d = new Date(+matched[1]); return; } configFromISO(config); if (config._isValid === false) { delete config._isValid; hooks.createFromInputFallback(config); } } hooks.createFromInputFallback = deprecate( 'value provided is not in a recognized ISO format. moment construction falls back to js Date(), ' + 'which is not reliable across all browsers and versions. Non ISO date formats are ' + 'discouraged and will be removed in an upcoming major release. Please refer to ' + 'http://momentjs.com/guides/#/warnings/js-date/ for more info.', function (config) { config._d = new Date(config._i + (config._useUTC ? ' UTC' : '')); } ); // Pick the first defined of two or three arguments. function defaults(a, b, c) { if (a != null) { return a; } if (b != null) { return b; } return c; } function currentDateArray(config) { // hooks is actually the exported moment object var nowValue = new Date(hooks.now()); if (config._useUTC) { return [nowValue.getUTCFullYear(), nowValue.getUTCMonth(), nowValue.getUTCDate()]; } return [nowValue.getFullYear(), nowValue.getMonth(), nowValue.getDate()]; } // convert an array to a date. // the array should mirror the parameters below // note: all values past the year are optional and will default to the lowest possible value. // [year, month, day , hour, minute, second, millisecond] function configFromArray (config) { var i, date, input = [], currentDate, yearToUse; if (config._d) { return; } currentDate = currentDateArray(config); //compute day of the year from weeks and weekdays if (config._w && config._a[DATE] == null && config._a[MONTH] == null) { dayOfYearFromWeekInfo(config); } //if the day of the year is set, figure out what it is if (config._dayOfYear) { yearToUse = defaults(config._a[YEAR], currentDate[YEAR]); if (config._dayOfYear > daysInYear(yearToUse)) { getParsingFlags(config)._overflowDayOfYear = true; } date = createUTCDate(yearToUse, 0, config._dayOfYear); config._a[MONTH] = date.getUTCMonth(); config._a[DATE] = date.getUTCDate(); } // Default to current date. // * if no year, month, day of month are given, default to today // * if day of month is given, default month and year // * if month is given, default only year // * if year is given, don't default anything for (i = 0; i < 3 && config._a[i] == null; ++i) { config._a[i] = input[i] = currentDate[i]; } // Zero out whatever was not defaulted, including time for (; i < 7; i++) { config._a[i] = input[i] = (config._a[i] == null) ? (i === 2 ? 1 : 0) : config._a[i]; } // Check for 24:00:00.000 if (config._a[HOUR] === 24 && config._a[MINUTE] === 0 && config._a[SECOND] === 0 && config._a[MILLISECOND] === 0) { config._nextDay = true; config._a[HOUR] = 0; } config._d = (config._useUTC ? createUTCDate : createDate).apply(null, input); // Apply timezone offset from input. The actual utcOffset can be changed // with parseZone. if (config._tzm != null) { config._d.setUTCMinutes(config._d.getUTCMinutes() - config._tzm); } if (config._nextDay) { config._a[HOUR] = 24; } } function dayOfYearFromWeekInfo(config) { var w, weekYear, week, weekday, dow, doy, temp, weekdayOverflow; w = config._w; if (w.GG != null || w.W != null || w.E != null) { dow = 1; doy = 4; // TODO: We need to take the current isoWeekYear, but that depends on // how we interpret now (local, utc, fixed offset). So create // a now version of current config (take local/utc/offset flags, and // create now). weekYear = defaults(w.GG, config._a[YEAR], weekOfYear(createLocal(), 1, 4).year); week = defaults(w.W, 1); weekday = defaults(w.E, 1); if (weekday < 1 || weekday > 7) { weekdayOverflow = true; } } else { dow = config._locale._week.dow; doy = config._locale._week.doy; var curWeek = weekOfYear(createLocal(), dow, doy); weekYear = defaults(w.gg, config._a[YEAR], curWeek.year); // Default to current week. week = defaults(w.w, curWeek.week); if (w.d != null) { // weekday -- low day numbers are considered next week weekday = w.d; if (weekday < 0 || weekday > 6) { weekdayOverflow = true; } } else if (w.e != null) { // local weekday -- counting starts from begining of week weekday = w.e + dow; if (w.e < 0 || w.e > 6) { weekdayOverflow = true; } } else { // default to begining of week weekday = dow; } } if (week < 1 || week > weeksInYear(weekYear, dow, doy)) { getParsingFlags(config)._overflowWeeks = true; } else if (weekdayOverflow != null) { getParsingFlags(config)._overflowWeekday = true; } else { temp = dayOfYearFromWeeks(weekYear, week, weekday, dow, doy); config._a[YEAR] = temp.year; config._dayOfYear = temp.dayOfYear; } } // constant that refers to the ISO standard hooks.ISO_8601 = function () {}; // date from string and format string function configFromStringAndFormat(config) { // TODO: Move this to another part of the creation flow to prevent circular deps if (config._f === hooks.ISO_8601) { configFromISO(config); return; } config._a = []; getParsingFlags(config).empty = true; // This array is used to make a Date, either with `new Date` or `Date.UTC` var string = '' + config._i, i, parsedInput, tokens, token, skipped, stringLength = string.length, totalParsedInputLength = 0; tokens = expandFormat(config._f, config._locale).match(formattingTokens) || []; for (i = 0; i < tokens.length; i++) { token = tokens[i]; parsedInput = (string.match(getParseRegexForToken(token, config)) || [])[0]; // console.log('token', token, 'parsedInput', parsedInput, // 'regex', getParseRegexForToken(token, config)); if (parsedInput) { skipped = string.substr(0, string.indexOf(parsedInput)); if (skipped.length > 0) { getParsingFlags(config).unusedInput.push(skipped); } string = string.slice(string.indexOf(parsedInput) + parsedInput.length); totalParsedInputLength += parsedInput.length; } // don't parse if it's not a known token if (formatTokenFunctions[token]) { if (parsedInput) { getParsingFlags(config).empty = false; } else { getParsingFlags(config).unusedTokens.push(token); } addTimeToArrayFromToken(token, parsedInput, config); } else if (config._strict && !parsedInput) { getParsingFlags(config).unusedTokens.push(token); } } // add remaining unparsed input length to the string getParsingFlags(config).charsLeftOver = stringLength - totalParsedInputLength; if (string.length > 0) { getParsingFlags(config).unusedInput.push(string); } // clear _12h flag if hour is <= 12 if (config._a[HOUR] <= 12 && getParsingFlags(config).bigHour === true && config._a[HOUR] > 0) { getParsingFlags(config).bigHour = undefined; } getParsingFlags(config).parsedDateParts = config._a.slice(0); getParsingFlags(config).meridiem = config._meridiem; // handle meridiem config._a[HOUR] = meridiemFixWrap(config._locale, config._a[HOUR], config._meridiem); configFromArray(config); checkOverflow(config); } function meridiemFixWrap (locale, hour, meridiem) { var isPm; if (meridiem == null) { // nothing to do return hour; } if (locale.meridiemHour != null) { return locale.meridiemHour(hour, meridiem); } else if (locale.isPM != null) { // Fallback isPm = locale.isPM(meridiem); if (isPm && hour < 12) { hour += 12; } if (!isPm && hour === 12) { hour = 0; } return hour; } else { // this is not supposed to happen return hour; } } // date from string and array of format strings function configFromStringAndArray(config) { var tempConfig, bestMoment, scoreToBeat, i, currentScore; if (config._f.length === 0) { getParsingFlags(config).invalidFormat = true; config._d = new Date(NaN); return; } for (i = 0; i < config._f.length; i++) { currentScore = 0; tempConfig = copyConfig({}, config); if (config._useUTC != null) { tempConfig._useUTC = config._useUTC; } tempConfig._f = config._f[i]; configFromStringAndFormat(tempConfig); if (!isValid(tempConfig)) { continue; } // if there is any input that was not parsed add a penalty for that format currentScore += getParsingFlags(tempConfig).charsLeftOver; //or tokens currentScore += getParsingFlags(tempConfig).unusedTokens.length * 10; getParsingFlags(tempConfig).score = currentScore; if (scoreToBeat == null || currentScore < scoreToBeat) { scoreToBeat = currentScore; bestMoment = tempConfig; } } extend(config, bestMoment || tempConfig); } function configFromObject(config) { if (config._d) { return; } var i = normalizeObjectUnits(config._i); config._a = map([i.year, i.month, i.day || i.date, i.hour, i.minute, i.second, i.millisecond], function (obj) { return obj && parseInt(obj, 10); }); configFromArray(config); } function createFromConfig (config) { var res = new Moment(checkOverflow(prepareConfig(config))); if (res._nextDay) { // Adding is smart enough around DST res.add(1, 'd'); res._nextDay = undefined; } return res; } function prepareConfig (config) { var input = config._i, format = config._f; config._locale = config._locale || getLocale(config._l); if (input === null || (format === undefined && input === '')) { return createInvalid({nullInput: true}); } if (typeof input === 'string') { config._i = input = config._locale.preparse(input); } if (isMoment(input)) { return new Moment(checkOverflow(input)); } else if (isDate(input)) { config._d = input; } else if (isArray(format)) { configFromStringAndArray(config); } else if (format) { configFromStringAndFormat(config); } else { configFromInput(config); } if (!isValid(config)) { config._d = null; } return config; } function configFromInput(config) { var input = config._i; if (input === undefined) { config._d = new Date(hooks.now()); } else if (isDate(input)) { config._d = new Date(input.valueOf()); } else if (typeof input === 'string') { configFromString(config); } else if (isArray(input)) { config._a = map(input.slice(0), function (obj) { return parseInt(obj, 10); }); configFromArray(config); } else if (typeof(input) === 'object') { configFromObject(config); } else if (isNumber(input)) { // from milliseconds config._d = new Date(input); } else { hooks.createFromInputFallback(config); } } function createLocalOrUTC (input, format, locale, strict, isUTC) { var c = {}; if (locale === true || locale === false) { strict = locale; locale = undefined; } if ((isObject(input) && isObjectEmpty(input)) || (isArray(input) && input.length === 0)) { input = undefined; } // object construction must be done this way. // https://github.com/moment/moment/issues/1423 c._isAMomentObject = true; c._useUTC = c._isUTC = isUTC; c._l = locale; c._i = input; c._f = format; c._strict = strict; return createFromConfig(c); } function createLocal (input, format, locale, strict) { return createLocalOrUTC(input, format, locale, strict, false); } var prototypeMin = deprecate( 'moment().min is deprecated, use moment.max instead. http://momentjs.com/guides/#/warnings/min-max/', function () { var other = createLocal.apply(null, arguments); if (this.isValid() && other.isValid()) { return other < this ? this : other; } else { return createInvalid(); } } ); var prototypeMax = deprecate( 'moment().max is deprecated, use moment.min instead. http://momentjs.com/guides/#/warnings/min-max/', function () { var other = createLocal.apply(null, arguments); if (this.isValid() && other.isValid()) { return other > this ? this : other; } else { return createInvalid(); } } ); // Pick a moment m from moments so that m[fn](other) is true for all // other. This relies on the function fn to be transitive. // // moments should either be an array of moment objects or an array, whose // first element is an array of moment objects. function pickBy(fn, moments) { var res, i; if (moments.length === 1 && isArray(moments[0])) { moments = moments[0]; } if (!moments.length) { return createLocal(); } res = moments[0]; for (i = 1; i < moments.length; ++i) { if (!moments[i].isValid() || moments[i][fn](res)) { res = moments[i]; } } return res; } // TODO: Use [].sort instead? function min () { var args = [].slice.call(arguments, 0); return pickBy('isBefore', args); } function max () { var args = [].slice.call(arguments, 0); return pickBy('isAfter', args); } var now = function () { return Date.now ? Date.now() : +(new Date()); }; function Duration (duration) { var normalizedInput = normalizeObjectUnits(duration), years = normalizedInput.year || 0, quarters = normalizedInput.quarter || 0, months = normalizedInput.month || 0, weeks = normalizedInput.week || 0, days = normalizedInput.day || 0, hours = normalizedInput.hour || 0, minutes = normalizedInput.minute || 0, seconds = normalizedInput.second || 0, milliseconds = normalizedInput.millisecond || 0; // representation for dateAddRemove this._milliseconds = +milliseconds + seconds * 1e3 + // 1000 minutes * 6e4 + // 1000 * 60 hours * 1000 * 60 * 60; //using 1000 * 60 * 60 instead of 36e5 to avoid floating point rounding errors https://github.com/moment/moment/issues/2978 // Because of dateAddRemove treats 24 hours as different from a // day when working around DST, we need to store them separately this._days = +days + weeks * 7; // It is impossible translate months into days without knowing // which months you are are talking about, so we have to store // it separately. this._months = +months + quarters * 3 + years * 12; this._data = {}; this._locale = getLocale(); this._bubble(); } function isDuration (obj) { return obj instanceof Duration; } function absRound (number) { if (number < 0) { return Math.round(-1 * number) * -1; } else { return Math.round(number); } } // FORMATTING function offset (token, separator) { addFormatToken(token, 0, 0, function () { var offset = this.utcOffset(); var sign = '+'; if (offset < 0) { offset = -offset; sign = '-'; } return sign + zeroFill(~~(offset / 60), 2) + separator + zeroFill(~~(offset) % 60, 2); }); } offset('Z', ':'); offset('ZZ', ''); // PARSING addRegexToken('Z', matchShortOffset); addRegexToken('ZZ', matchShortOffset); addParseToken(['Z', 'ZZ'], function (input, array, config) { config._useUTC = true; config._tzm = offsetFromString(matchShortOffset, input); }); // HELPERS // timezone chunker // '+10:00' > ['10', '00'] // '-1530' > ['-15', '30'] var chunkOffset = /([\+\-]|\d\d)/gi; function offsetFromString(matcher, string) { var matches = (string || '').match(matcher); if (matches === null) { return null; } var chunk = matches[matches.length - 1] || []; var parts = (chunk + '').match(chunkOffset) || ['-', 0, 0]; var minutes = +(parts[1] * 60) + toInt(parts[2]); return minutes === 0 ? 0 : parts[0] === '+' ? minutes : -minutes; } // Return a moment from input, that is local/utc/zone equivalent to model. function cloneWithOffset(input, model) { var res, diff; if (model._isUTC) { res = model.clone(); diff = (isMoment(input) || isDate(input) ? input.valueOf() : createLocal(input).valueOf()) - res.valueOf(); // Use low-level api, because this fn is low-level api. res._d.setTime(res._d.valueOf() + diff); hooks.updateOffset(res, false); return res; } else { return createLocal(input).local(); } } function getDateOffset (m) { // On Firefox.24 Date#getTimezoneOffset returns a floating point. // https://github.com/moment/moment/pull/1871 return -Math.round(m._d.getTimezoneOffset() / 15) * 15; } // HOOKS // This function will be called whenever a moment is mutated. // It is intended to keep the offset in sync with the timezone. hooks.updateOffset = function () {}; // MOMENTS // keepLocalTime = true means only change the timezone, without // affecting the local hour. So 5:31:26 +0300 --[utcOffset(2, true)]--> // 5:31:26 +0200 It is possible that 5:31:26 doesn't exist with offset // +0200, so we adjust the time as needed, to be valid. // // Keeping the time actually adds/subtracts (one hour) // from the actual represented time. That is why we call updateOffset // a second time. In case it wants us to change the offset again // _changeInProgress == true case, then we have to adjust, because // there is no such time in the given timezone. function getSetOffset (input, keepLocalTime) { var offset = this._offset || 0, localAdjust; if (!this.isValid()) { return input != null ? this : NaN; } if (input != null) { if (typeof input === 'string') { input = offsetFromString(matchShortOffset, input); if (input === null) { return this; } } else if (Math.abs(input) < 16) { input = input * 60; } if (!this._isUTC && keepLocalTime) { localAdjust = getDateOffset(this); } this._offset = input; this._isUTC = true; if (localAdjust != null) { this.add(localAdjust, 'm'); } if (offset !== input) { if (!keepLocalTime || this._changeInProgress) { addSubtract(this, createDuration(input - offset, 'm'), 1, false); } else if (!this._changeInProgress) { this._changeInProgress = true; hooks.updateOffset(this, true); this._changeInProgress = null; } } return this; } else { return this._isUTC ? offset : getDateOffset(this); } } function getSetZone (input, keepLocalTime) { if (input != null) { if (typeof input !== 'string') { input = -input; } this.utcOffset(input, keepLocalTime); return this; } else { return -this.utcOffset(); } } function setOffsetToUTC (keepLocalTime) { return this.utcOffset(0, keepLocalTime); } function setOffsetToLocal (keepLocalTime) { if (this._isUTC) { this.utcOffset(0, keepLocalTime); this._isUTC = false; if (keepLocalTime) { this.subtract(getDateOffset(this), 'm'); } } return this; } function setOffsetToParsedOffset () { if (this._tzm != null) { this.utcOffset(this._tzm); } else if (typeof this._i === 'string') { var tZone = offsetFromString(matchOffset, this._i); if (tZone != null) { this.utcOffset(tZone); } else { this.utcOffset(0, true); } } return this; } function hasAlignedHourOffset (input) { if (!this.isValid()) { return false; } input = input ? createLocal(input).utcOffset() : 0; return (this.utcOffset() - input) % 60 === 0; } function isDaylightSavingTime () { return ( this.utcOffset() > this.clone().month(0).utcOffset() || this.utcOffset() > this.clone().month(5).utcOffset() ); } function isDaylightSavingTimeShifted () { if (!isUndefined(this._isDSTShifted)) { return this._isDSTShifted; } var c = {}; copyConfig(c, this); c = prepareConfig(c); if (c._a) { var other = c._isUTC ? createUTC(c._a) : createLocal(c._a); this._isDSTShifted = this.isValid() && compareArrays(c._a, other.toArray()) > 0; } else { this._isDSTShifted = false; } return this._isDSTShifted; } function isLocal () { return this.isValid() ? !this._isUTC : false; } function isUtcOffset () { return this.isValid() ? this._isUTC : false; } function isUtc () { return this.isValid() ? this._isUTC && this._offset === 0 : false; } // ASP.NET json date format regex var aspNetRegex = /^(\-)?(?:(\d*)[. ])?(\d+)\:(\d+)(?:\:(\d+)(\.\d*)?)?$/; // from http://docs.closure-library.googlecode.com/git/closure_goog_date_date.js.source.html // somewhat more in line with 4.4.3.2 2004 spec, but allows decimal anywhere // and further modified to allow for strings containing both week and day var isoRegex = /^(-)?P(?:(-?[0-9,.]*)Y)?(?:(-?[0-9,.]*)M)?(?:(-?[0-9,.]*)W)?(?:(-?[0-9,.]*)D)?(?:T(?:(-?[0-9,.]*)H)?(?:(-?[0-9,.]*)M)?(?:(-?[0-9,.]*)S)?)?$/; function createDuration (input, key) { var duration = input, // matching against regexp is expensive, do it on demand match = null, sign, ret, diffRes; if (isDuration(input)) { duration = { ms : input._milliseconds, d : input._days, M : input._months }; } else if (isNumber(input)) { duration = {}; if (key) { duration[key] = input; } else { duration.milliseconds = input; } } else if (!!(match = aspNetRegex.exec(input))) { sign = (match[1] === '-') ? -1 : 1; duration = { y : 0, d : toInt(match[DATE]) * sign, h : toInt(match[HOUR]) * sign, m : toInt(match[MINUTE]) * sign, s : toInt(match[SECOND]) * sign, ms : toInt(absRound(match[MILLISECOND] * 1000)) * sign // the millisecond decimal point is included in the match }; } else if (!!(match = isoRegex.exec(input))) { sign = (match[1] === '-') ? -1 : 1; duration = { y : parseIso(match[2], sign), M : parseIso(match[3], sign), w : parseIso(match[4], sign), d : parseIso(match[5], sign), h : parseIso(match[6], sign), m : parseIso(match[7], sign), s : parseIso(match[8], sign) }; } else if (duration == null) {// checks for null or undefined duration = {}; } else if (typeof duration === 'object' && ('from' in duration || 'to' in duration)) { diffRes = momentsDifference(createLocal(duration.from), createLocal(duration.to)); duration = {}; duration.ms = diffRes.milliseconds; duration.M = diffRes.months; } ret = new Duration(duration); if (isDuration(input) && hasOwnProp(input, '_locale')) { ret._locale = input._locale; } return ret; } createDuration.fn = Duration.prototype; function parseIso (inp, sign) { // We'd normally use ~~inp for this, but unfortunately it also // converts floats to ints. // inp may be undefined, so careful calling replace on it. var res = inp && parseFloat(inp.replace(',', '.')); // apply sign while we're at it return (isNaN(res) ? 0 : res) * sign; } function positiveMomentsDifference(base, other) { var res = {milliseconds: 0, months: 0}; res.months = other.month() - base.month() + (other.year() - base.year()) * 12; if (base.clone().add(res.months, 'M').isAfter(other)) { --res.months; } res.milliseconds = +other - +(base.clone().add(res.months, 'M')); return res; } function momentsDifference(base, other) { var res; if (!(base.isValid() && other.isValid())) { return {milliseconds: 0, months: 0}; } other = cloneWithOffset(other, base); if (base.isBefore(other)) { res = positiveMomentsDifference(base, other); } else { res = positiveMomentsDifference(other, base); res.milliseconds = -res.milliseconds; res.months = -res.months; } return res; } // TODO: remove 'name' arg after deprecation is removed function createAdder(direction, name) { return function (val, period) { var dur, tmp; //invert the arguments, but complain about it if (period !== null && !isNaN(+period)) { deprecateSimple(name, 'moment().' + name + '(period, number) is deprecated. Please use moment().' + name + '(number, period). ' + 'See http://momentjs.com/guides/#/warnings/add-inverted-param/ for more info.'); tmp = val; val = period; period = tmp; } val = typeof val === 'string' ? +val : val; dur = createDuration(val, period); addSubtract(this, dur, direction); return this; }; } function addSubtract (mom, duration, isAdding, updateOffset) { var milliseconds = duration._milliseconds, days = absRound(duration._days), months = absRound(duration._months); if (!mom.isValid()) { // No op return; } updateOffset = updateOffset == null ? true : updateOffset; if (milliseconds) { mom._d.setTime(mom._d.valueOf() + milliseconds * isAdding); } if (days) { set$1(mom, 'Date', get(mom, 'Date') + days * isAdding); } if (months) { setMonth(mom, get(mom, 'Month') + months * isAdding); } if (updateOffset) { hooks.updateOffset(mom, days || months); } } var add = createAdder(1, 'add'); var subtract = createAdder(-1, 'subtract'); function getCalendarFormat(myMoment, now) { var diff = myMoment.diff(now, 'days', true); return diff < -6 ? 'sameElse' : diff < -1 ? 'lastWeek' : diff < 0 ? 'lastDay' : diff < 1 ? 'sameDay' : diff < 2 ? 'nextDay' : diff < 7 ? 'nextWeek' : 'sameElse'; } function calendar$1 (time, formats) { // We want to compare the start of today, vs this. // Getting start-of-today depends on whether we're local/utc/offset or not. var now = time || createLocal(), sod = cloneWithOffset(now, this).startOf('day'), format = hooks.calendarFormat(this, sod) || 'sameElse'; var output = formats && (isFunction(formats[format]) ? formats[format].call(this, now) : formats[format]); return this.format(output || this.localeData().calendar(format, this, createLocal(now))); } function clone () { return new Moment(this); } function isAfter (input, units) { var localInput = isMoment(input) ? input : createLocal(input); if (!(this.isValid() && localInput.isValid())) { return false; } units = normalizeUnits(!isUndefined(units) ? units : 'millisecond'); if (units === 'millisecond') { return this.valueOf() > localInput.valueOf(); } else { return localInput.valueOf() < this.clone().startOf(units).valueOf(); } } function isBefore (input, units) { var localInput = isMoment(input) ? input : createLocal(input); if (!(this.isValid() && localInput.isValid())) { return false; } units = normalizeUnits(!isUndefined(units) ? units : 'millisecond'); if (units === 'millisecond') { return this.valueOf() < localInput.valueOf(); } else { return this.clone().endOf(units).valueOf() < localInput.valueOf(); } } function isBetween (from, to, units, inclusivity) { inclusivity = inclusivity || '()'; return (inclusivity[0] === '(' ? this.isAfter(from, units) : !this.isBefore(from, units)) && (inclusivity[1] === ')' ? this.isBefore(to, units) : !this.isAfter(to, units)); } function isSame (input, units) { var localInput = isMoment(input) ? input : createLocal(input), inputMs; if (!(this.isValid() && localInput.isValid())) { return false; } units = normalizeUnits(units || 'millisecond'); if (units === 'millisecond') { return this.valueOf() === localInput.valueOf(); } else { inputMs = localInput.valueOf(); return this.clone().startOf(units).valueOf() <= inputMs && inputMs <= this.clone().endOf(units).valueOf(); } } function isSameOrAfter (input, units) { return this.isSame(input, units) || this.isAfter(input,units); } function isSameOrBefore (input, units) { return this.isSame(input, units) || this.isBefore(input,units); } function diff (input, units, asFloat) { var that, zoneDelta, delta, output; if (!this.isValid()) { return NaN; } that = cloneWithOffset(input, this); if (!that.isValid()) { return NaN; } zoneDelta = (that.utcOffset() - this.utcOffset()) * 6e4; units = normalizeUnits(units); if (units === 'year' || units === 'month' || units === 'quarter') { output = monthDiff(this, that); if (units === 'quarter') { output = output / 3; } else if (units === 'year') { output = output / 12; } } else { delta = this - that; output = units === 'second' ? delta / 1e3 : // 1000 units === 'minute' ? delta / 6e4 : // 1000 * 60 units === 'hour' ? delta / 36e5 : // 1000 * 60 * 60 units === 'day' ? (delta - zoneDelta) / 864e5 : // 1000 * 60 * 60 * 24, negate dst units === 'week' ? (delta - zoneDelta) / 6048e5 : // 1000 * 60 * 60 * 24 * 7, negate dst delta; } return asFloat ? output : absFloor(output); } function monthDiff (a, b) { // difference in months var wholeMonthDiff = ((b.year() - a.year()) * 12) + (b.month() - a.month()), // b is in (anchor - 1 month, anchor + 1 month) anchor = a.clone().add(wholeMonthDiff, 'months'), anchor2, adjust; if (b - anchor < 0) { anchor2 = a.clone().add(wholeMonthDiff - 1, 'months'); // linear across the month adjust = (b - anchor) / (anchor - anchor2); } else { anchor2 = a.clone().add(wholeMonthDiff + 1, 'months'); // linear across the month adjust = (b - anchor) / (anchor2 - anchor); } //check for negative zero, return zero if negative zero return -(wholeMonthDiff + adjust) || 0; } hooks.defaultFormat = 'YYYY-MM-DDTHH:mm:ssZ'; hooks.defaultFormatUtc = 'YYYY-MM-DDTHH:mm:ss[Z]'; function toString () { return this.clone().locale('en').format('ddd MMM DD YYYY HH:mm:ss [GMT]ZZ'); } function toISOString () { var m = this.clone().utc(); if (0 < m.year() && m.year() <= 9999) { if (isFunction(Date.prototype.toISOString)) { // native implementation is ~50x faster, use it when we can return this.toDate().toISOString(); } else { return formatMoment(m, 'YYYY-MM-DD[T]HH:mm:ss.SSS[Z]'); } } else { return formatMoment(m, 'YYYYYY-MM-DD[T]HH:mm:ss.SSS[Z]'); } } /** * Return a human readable representation of a moment that can * also be evaluated to get a new moment which is the same * * @link https://nodejs.org/dist/latest/docs/api/util.html#util_custom_inspect_function_on_objects */ function inspect () { if (!this.isValid()) { return 'moment.invalid(/* ' + this._i + ' */)'; } var func = 'moment'; var zone = ''; if (!this.isLocal()) { func = this.utcOffset() === 0 ? 'moment.utc' : 'moment.parseZone'; zone = 'Z'; } var prefix = '[' + func + '("]'; var year = (0 < this.year() && this.year() <= 9999) ? 'YYYY' : 'YYYYYY'; var datetime = '-MM-DD[T]HH:mm:ss.SSS'; var suffix = zone + '[")]'; return this.format(prefix + year + datetime + suffix); } function format (inputString) { if (!inputString) { inputString = this.isUtc() ? hooks.defaultFormatUtc : hooks.defaultFormat; } var output = formatMoment(this, inputString); return this.localeData().postformat(output); } function from (time, withoutSuffix) { if (this.isValid() && ((isMoment(time) && time.isValid()) || createLocal(time).isValid())) { return createDuration({to: this, from: time}).locale(this.locale()).humanize(!withoutSuffix); } else { return this.localeData().invalidDate(); } } function fromNow (withoutSuffix) { return this.from(createLocal(), withoutSuffix); } function to (time, withoutSuffix) { if (this.isValid() && ((isMoment(time) && time.isValid()) || createLocal(time).isValid())) { return createDuration({from: this, to: time}).locale(this.locale()).humanize(!withoutSuffix); } else { return this.localeData().invalidDate(); } } function toNow (withoutSuffix) { return this.to(createLocal(), withoutSuffix); } // If passed a locale key, it will set the locale for this // instance. Otherwise, it will return the locale configuration // variables for this instance. function locale (key) { var newLocaleData; if (key === undefined) { return this._locale._abbr; } else { newLocaleData = getLocale(key); if (newLocaleData != null) { this._locale = newLocaleData; } return this; } } var lang = deprecate( 'moment().lang() is deprecated. Instead, use moment().localeData() to get the language configuration. Use moment().locale() to change languages.', function (key) { if (key === undefined) { return this.localeData(); } else { return this.locale(key); } } ); function localeData () { return this._locale; } function startOf (units) { units = normalizeUnits(units); // the following switch intentionally omits break keywords // to utilize falling through the cases. switch (units) { case 'year': this.month(0); /* falls through */ case 'quarter': case 'month': this.date(1); /* falls through */ case 'week': case 'isoWeek': case 'day': case 'date': this.hours(0); /* falls through */ case 'hour': this.minutes(0); /* falls through */ case 'minute': this.seconds(0); /* falls through */ case 'second': this.milliseconds(0); } // weeks are a special case if (units === 'week') { this.weekday(0); } if (units === 'isoWeek') { this.isoWeekday(1); } // quarters are also special if (units === 'quarter') { this.month(Math.floor(this.month() / 3) * 3); } return this; } function endOf (units) { units = normalizeUnits(units); if (units === undefined || units === 'millisecond') { return this; } // 'date' is an alias for 'day', so it should be considered as such. if (units === 'date') { units = 'day'; } return this.startOf(units).add(1, (units === 'isoWeek' ? 'week' : units)).subtract(1, 'ms'); } function valueOf () { return this._d.valueOf() - ((this._offset || 0) * 60000); } function unix () { return Math.floor(this.valueOf() / 1000); } function toDate () { return new Date(this.valueOf()); } function toArray () { var m = this; return [m.year(), m.month(), m.date(), m.hour(), m.minute(), m.second(), m.millisecond()]; } function toObject () { var m = this; return { years: m.year(), months: m.month(), date: m.date(), hours: m.hours(), minutes: m.minutes(), seconds: m.seconds(), milliseconds: m.milliseconds() }; } function toJSON () { // new Date(NaN).toJSON() === null return this.isValid() ? this.toISOString() : null; } function isValid$1 () { return isValid(this); } function parsingFlags () { return extend({}, getParsingFlags(this)); } function invalidAt () { return getParsingFlags(this).overflow; } function creationData() { return { input: this._i, format: this._f, locale: this._locale, isUTC: this._isUTC, strict: this._strict }; } // FORMATTING addFormatToken(0, ['gg', 2], 0, function () { return this.weekYear() % 100; }); addFormatToken(0, ['GG', 2], 0, function () { return this.isoWeekYear() % 100; }); function addWeekYearFormatToken (token, getter) { addFormatToken(0, [token, token.length], 0, getter); } addWeekYearFormatToken('gggg', 'weekYear'); addWeekYearFormatToken('ggggg', 'weekYear'); addWeekYearFormatToken('GGGG', 'isoWeekYear'); addWeekYearFormatToken('GGGGG', 'isoWeekYear'); // ALIASES addUnitAlias('weekYear', 'gg'); addUnitAlias('isoWeekYear', 'GG'); // PRIORITY addUnitPriority('weekYear', 1); addUnitPriority('isoWeekYear', 1); // PARSING addRegexToken('G', matchSigned); addRegexToken('g', matchSigned); addRegexToken('GG', match1to2, match2); addRegexToken('gg', match1to2, match2); addRegexToken('GGGG', match1to4, match4); addRegexToken('gggg', match1to4, match4); addRegexToken('GGGGG', match1to6, match6); addRegexToken('ggggg', match1to6, match6); addWeekParseToken(['gggg', 'ggggg', 'GGGG', 'GGGGG'], function (input, week, config, token) { week[token.substr(0, 2)] = toInt(input); }); addWeekParseToken(['gg', 'GG'], function (input, week, config, token) { week[token] = hooks.parseTwoDigitYear(input); }); // MOMENTS function getSetWeekYear (input) { return getSetWeekYearHelper.call(this, input, this.week(), this.weekday(), this.localeData()._week.dow, this.localeData()._week.doy); } function getSetISOWeekYear (input) { return getSetWeekYearHelper.call(this, input, this.isoWeek(), this.isoWeekday(), 1, 4); } function getISOWeeksInYear () { return weeksInYear(this.year(), 1, 4); } function getWeeksInYear () { var weekInfo = this.localeData()._week; return weeksInYear(this.year(), weekInfo.dow, weekInfo.doy); } function getSetWeekYearHelper(input, week, weekday, dow, doy) { var weeksTarget; if (input == null) { return weekOfYear(this, dow, doy).year; } else { weeksTarget = weeksInYear(input, dow, doy); if (week > weeksTarget) { week = weeksTarget; } return setWeekAll.call(this, input, week, weekday, dow, doy); } } function setWeekAll(weekYear, week, weekday, dow, doy) { var dayOfYearData = dayOfYearFromWeeks(weekYear, week, weekday, dow, doy), date = createUTCDate(dayOfYearData.year, 0, dayOfYearData.dayOfYear); this.year(date.getUTCFullYear()); this.month(date.getUTCMonth()); this.date(date.getUTCDate()); return this; } // FORMATTING addFormatToken('Q', 0, 'Qo', 'quarter'); // ALIASES addUnitAlias('quarter', 'Q'); // PRIORITY addUnitPriority('quarter', 7); // PARSING addRegexToken('Q', match1); addParseToken('Q', function (input, array) { array[MONTH] = (toInt(input) - 1) * 3; }); // MOMENTS function getSetQuarter (input) { return input == null ? Math.ceil((this.month() + 1) / 3) : this.month((input - 1) * 3 + this.month() % 3); } // FORMATTING addFormatToken('D', ['DD', 2], 'Do', 'date'); // ALIASES addUnitAlias('date', 'D'); // PRIOROITY addUnitPriority('date', 9); // PARSING addRegexToken('D', match1to2); addRegexToken('DD', match1to2, match2); addRegexToken('Do', function (isStrict, locale) { return isStrict ? locale._ordinalParse : locale._ordinalParseLenient; }); addParseToken(['D', 'DD'], DATE); addParseToken('Do', function (input, array) { array[DATE] = toInt(input.match(match1to2)[0], 10); }); // MOMENTS var getSetDayOfMonth = makeGetSet('Date', true); // FORMATTING addFormatToken('DDD', ['DDDD', 3], 'DDDo', 'dayOfYear'); // ALIASES addUnitAlias('dayOfYear', 'DDD'); // PRIORITY addUnitPriority('dayOfYear', 4); // PARSING addRegexToken('DDD', match1to3); addRegexToken('DDDD', match3); addParseToken(['DDD', 'DDDD'], function (input, array, config) { config._dayOfYear = toInt(input); }); // HELPERS // MOMENTS function getSetDayOfYear (input) { var dayOfYear = Math.round((this.clone().startOf('day') - this.clone().startOf('year')) / 864e5) + 1; return input == null ? dayOfYear : this.add((input - dayOfYear), 'd'); } // FORMATTING addFormatToken('m', ['mm', 2], 0, 'minute'); // ALIASES addUnitAlias('minute', 'm'); // PRIORITY addUnitPriority('minute', 14); // PARSING addRegexToken('m', match1to2); addRegexToken('mm', match1to2, match2); addParseToken(['m', 'mm'], MINUTE); // MOMENTS var getSetMinute = makeGetSet('Minutes', false); // FORMATTING addFormatToken('s', ['ss', 2], 0, 'second'); // ALIASES addUnitAlias('second', 's'); // PRIORITY addUnitPriority('second', 15); // PARSING addRegexToken('s', match1to2); addRegexToken('ss', match1to2, match2); addParseToken(['s', 'ss'], SECOND); // MOMENTS var getSetSecond = makeGetSet('Seconds', false); // FORMATTING addFormatToken('S', 0, 0, function () { return ~~(this.millisecond() / 100); }); addFormatToken(0, ['SS', 2], 0, function () { return ~~(this.millisecond() / 10); }); addFormatToken(0, ['SSS', 3], 0, 'millisecond'); addFormatToken(0, ['SSSS', 4], 0, function () { return this.millisecond() * 10; }); addFormatToken(0, ['SSSSS', 5], 0, function () { return this.millisecond() * 100; }); addFormatToken(0, ['SSSSSS', 6], 0, function () { return this.millisecond() * 1000; }); addFormatToken(0, ['SSSSSSS', 7], 0, function () { return this.millisecond() * 10000; }); addFormatToken(0, ['SSSSSSSS', 8], 0, function () { return this.millisecond() * 100000; }); addFormatToken(0, ['SSSSSSSSS', 9], 0, function () { return this.millisecond() * 1000000; }); // ALIASES addUnitAlias('millisecond', 'ms'); // PRIORITY addUnitPriority('millisecond', 16); // PARSING addRegexToken('S', match1to3, match1); addRegexToken('SS', match1to3, match2); addRegexToken('SSS', match1to3, match3); var token; for (token = 'SSSS'; token.length <= 9; token += 'S') { addRegexToken(token, matchUnsigned); } function parseMs(input, array) { array[MILLISECOND] = toInt(('0.' + input) * 1000); } for (token = 'S'; token.length <= 9; token += 'S') { addParseToken(token, parseMs); } // MOMENTS var getSetMillisecond = makeGetSet('Milliseconds', false); // FORMATTING addFormatToken('z', 0, 0, 'zoneAbbr'); addFormatToken('zz', 0, 0, 'zoneName'); // MOMENTS function getZoneAbbr () { return this._isUTC ? 'UTC' : ''; } function getZoneName () { return this._isUTC ? 'Coordinated Universal Time' : ''; } var proto = Moment.prototype; proto.add = add; proto.calendar = calendar$1; proto.clone = clone; proto.diff = diff; proto.endOf = endOf; proto.format = format; proto.from = from; proto.fromNow = fromNow; proto.to = to; proto.toNow = toNow; proto.get = stringGet; proto.invalidAt = invalidAt; proto.isAfter = isAfter; proto.isBefore = isBefore; proto.isBetween = isBetween; proto.isSame = isSame; proto.isSameOrAfter = isSameOrAfter; proto.isSameOrBefore = isSameOrBefore; proto.isValid = isValid$1; proto.lang = lang; proto.locale = locale; proto.localeData = localeData; proto.max = prototypeMax; proto.min = prototypeMin; proto.parsingFlags = parsingFlags; proto.set = stringSet; proto.startOf = startOf; proto.subtract = subtract; proto.toArray = toArray; proto.toObject = toObject; proto.toDate = toDate; proto.toISOString = toISOString; proto.inspect = inspect; proto.toJSON = toJSON; proto.toString = toString; proto.unix = unix; proto.valueOf = valueOf; proto.creationData = creationData; // Year proto.year = getSetYear; proto.isLeapYear = getIsLeapYear; // Week Year proto.weekYear = getSetWeekYear; proto.isoWeekYear = getSetISOWeekYear; // Quarter proto.quarter = proto.quarters = getSetQuarter; // Month proto.month = getSetMonth; proto.daysInMonth = getDaysInMonth; // Week proto.week = proto.weeks = getSetWeek; proto.isoWeek = proto.isoWeeks = getSetISOWeek; proto.weeksInYear = getWeeksInYear; proto.isoWeeksInYear = getISOWeeksInYear; // Day proto.date = getSetDayOfMonth; proto.day = proto.days = getSetDayOfWeek; proto.weekday = getSetLocaleDayOfWeek; proto.isoWeekday = getSetISODayOfWeek; proto.dayOfYear = getSetDayOfYear; // Hour proto.hour = proto.hours = getSetHour; // Minute proto.minute = proto.minutes = getSetMinute; // Second proto.second = proto.seconds = getSetSecond; // Millisecond proto.millisecond = proto.milliseconds = getSetMillisecond; // Offset proto.utcOffset = getSetOffset; proto.utc = setOffsetToUTC; proto.local = setOffsetToLocal; proto.parseZone = setOffsetToParsedOffset; proto.hasAlignedHourOffset = hasAlignedHourOffset; proto.isDST = isDaylightSavingTime; proto.isLocal = isLocal; proto.isUtcOffset = isUtcOffset; proto.isUtc = isUtc; proto.isUTC = isUtc; // Timezone proto.zoneAbbr = getZoneAbbr; proto.zoneName = getZoneName; // Deprecations proto.dates = deprecate('dates accessor is deprecated. Use date instead.', getSetDayOfMonth); proto.months = deprecate('months accessor is deprecated. Use month instead', getSetMonth); proto.years = deprecate('years accessor is deprecated. Use year instead', getSetYear); proto.zone = deprecate('moment().zone is deprecated, use moment().utcOffset instead. http://momentjs.com/guides/#/warnings/zone/', getSetZone); proto.isDSTShifted = deprecate('isDSTShifted is deprecated. See http://momentjs.com/guides/#/warnings/dst-shifted/ for more information', isDaylightSavingTimeShifted); function createUnix (input) { return createLocal(input * 1000); } function createInZone () { return createLocal.apply(null, arguments).parseZone(); } function preParsePostFormat (string) { return string; } var proto$1 = Locale.prototype; proto$1.calendar = calendar; proto$1.longDateFormat = longDateFormat; proto$1.invalidDate = invalidDate; proto$1.ordinal = ordinal; proto$1.preparse = preParsePostFormat; proto$1.postformat = preParsePostFormat; proto$1.relativeTime = relativeTime; proto$1.pastFuture = pastFuture; proto$1.set = set; // Month proto$1.months = localeMonths; proto$1.monthsShort = localeMonthsShort; proto$1.monthsParse = localeMonthsParse; proto$1.monthsRegex = monthsRegex; proto$1.monthsShortRegex = monthsShortRegex; // Week proto$1.week = localeWeek; proto$1.firstDayOfYear = localeFirstDayOfYear; proto$1.firstDayOfWeek = localeFirstDayOfWeek; // Day of Week proto$1.weekdays = localeWeekdays; proto$1.weekdaysMin = localeWeekdaysMin; proto$1.weekdaysShort = localeWeekdaysShort; proto$1.weekdaysParse = localeWeekdaysParse; proto$1.weekdaysRegex = weekdaysRegex; proto$1.weekdaysShortRegex = weekdaysShortRegex; proto$1.weekdaysMinRegex = weekdaysMinRegex; // Hours proto$1.isPM = localeIsPM; proto$1.meridiem = localeMeridiem; function get$1 (format, index, field, setter) { var locale = getLocale(); var utc = createUTC().set(setter, index); return locale[field](utc, format); } function listMonthsImpl (format, index, field) { if (isNumber(format)) { index = format; format = undefined; } format = format || ''; if (index != null) { return get$1(format, index, field, 'month'); } var i; var out = []; for (i = 0; i < 12; i++) { out[i] = get$1(format, i, field, 'month'); } return out; } // () // (5) // (fmt, 5) // (fmt) // (true) // (true, 5) // (true, fmt, 5) // (true, fmt) function listWeekdaysImpl (localeSorted, format, index, field) { if (typeof localeSorted === 'boolean') { if (isNumber(format)) { index = format; format = undefined; } format = format || ''; } else { format = localeSorted; index = format; localeSorted = false; if (isNumber(format)) { index = format; format = undefined; } format = format || ''; } var locale = getLocale(), shift = localeSorted ? locale._week.dow : 0; if (index != null) { return get$1(format, (index + shift) % 7, field, 'day'); } var i; var out = []; for (i = 0; i < 7; i++) { out[i] = get$1(format, (i + shift) % 7, field, 'day'); } return out; } function listMonths (format, index) { return listMonthsImpl(format, index, 'months'); } function listMonthsShort (format, index) { return listMonthsImpl(format, index, 'monthsShort'); } function listWeekdays (localeSorted, format, index) { return listWeekdaysImpl(localeSorted, format, index, 'weekdays'); } function listWeekdaysShort (localeSorted, format, index) { return listWeekdaysImpl(localeSorted, format, index, 'weekdaysShort'); } function listWeekdaysMin (localeSorted, format, index) { return listWeekdaysImpl(localeSorted, format, index, 'weekdaysMin'); } getSetGlobalLocale('en', { ordinalParse: /\d{1,2}(th|st|nd|rd)/, ordinal : function (number) { var b = number % 10, output = (toInt(number % 100 / 10) === 1) ? 'th' : (b === 1) ? 'st' : (b === 2) ? 'nd' : (b === 3) ? 'rd' : 'th'; return number + output; } }); // Side effect imports hooks.lang = deprecate('moment.lang is deprecated. Use moment.locale instead.', getSetGlobalLocale); hooks.langData = deprecate('moment.langData is deprecated. Use moment.localeData instead.', getLocale); var mathAbs = Math.abs; function abs () { var data = this._data; this._milliseconds = mathAbs(this._milliseconds); this._days = mathAbs(this._days); this._months = mathAbs(this._months); data.milliseconds = mathAbs(data.milliseconds); data.seconds = mathAbs(data.seconds); data.minutes = mathAbs(data.minutes); data.hours = mathAbs(data.hours); data.months = mathAbs(data.months); data.years = mathAbs(data.years); return this; } function addSubtract$1 (duration, input, value, direction) { var other = createDuration(input, value); duration._milliseconds += direction * other._milliseconds; duration._days += direction * other._days; duration._months += direction * other._months; return duration._bubble(); } // supports only 2.0-style add(1, 's') or add(duration) function add$1 (input, value) { return addSubtract$1(this, input, value, 1); } // supports only 2.0-style subtract(1, 's') or subtract(duration) function subtract$1 (input, value) { return addSubtract$1(this, input, value, -1); } function absCeil (number) { if (number < 0) { return Math.floor(number); } else { return Math.ceil(number); } } function bubble () { var milliseconds = this._milliseconds; var days = this._days; var months = this._months; var data = this._data; var seconds, minutes, hours, years, monthsFromDays; // if we have a mix of positive and negative values, bubble down first // check: https://github.com/moment/moment/issues/2166 if (!((milliseconds >= 0 && days >= 0 && months >= 0) || (milliseconds <= 0 && days <= 0 && months <= 0))) { milliseconds += absCeil(monthsToDays(months) + days) * 864e5; days = 0; months = 0; } // The following code bubbles up values, see the tests for // examples of what that means. data.milliseconds = milliseconds % 1000; seconds = absFloor(milliseconds / 1000); data.seconds = seconds % 60; minutes = absFloor(seconds / 60); data.minutes = minutes % 60; hours = absFloor(minutes / 60); data.hours = hours % 24; days += absFloor(hours / 24); // convert days to months monthsFromDays = absFloor(daysToMonths(days)); months += monthsFromDays; days -= absCeil(monthsToDays(monthsFromDays)); // 12 months -> 1 year years = absFloor(months / 12); months %= 12; data.days = days; data.months = months; data.years = years; return this; } function daysToMonths (days) { // 400 years have 146097 days (taking into account leap year rules) // 400 years have 12 months === 4800 return days * 4800 / 146097; } function monthsToDays (months) { // the reverse of daysToMonths return months * 146097 / 4800; } function as (units) { var days; var months; var milliseconds = this._milliseconds; units = normalizeUnits(units); if (units === 'month' || units === 'year') { days = this._days + milliseconds / 864e5; months = this._months + daysToMonths(days); return units === 'month' ? months : months / 12; } else { // handle milliseconds separately because of floating point math errors (issue #1867) days = this._days + Math.round(monthsToDays(this._months)); switch (units) { case 'week' : return days / 7 + milliseconds / 6048e5; case 'day' : return days + milliseconds / 864e5; case 'hour' : return days * 24 + milliseconds / 36e5; case 'minute' : return days * 1440 + milliseconds / 6e4; case 'second' : return days * 86400 + milliseconds / 1000; // Math.floor prevents floating point math errors here case 'millisecond': return Math.floor(days * 864e5) + milliseconds; default: throw new Error('Unknown unit ' + units); } } } // TODO: Use this.as('ms')? function valueOf$1 () { return ( this._milliseconds + this._days * 864e5 + (this._months % 12) * 2592e6 + toInt(this._months / 12) * 31536e6 ); } function makeAs (alias) { return function () { return this.as(alias); }; } var asMilliseconds = makeAs('ms'); var asSeconds = makeAs('s'); var asMinutes = makeAs('m'); var asHours = makeAs('h'); var asDays = makeAs('d'); var asWeeks = makeAs('w'); var asMonths = makeAs('M'); var asYears = makeAs('y'); function get$2 (units) { units = normalizeUnits(units); return this[units + 's'](); } function makeGetter(name) { return function () { return this._data[name]; }; } var milliseconds = makeGetter('milliseconds'); var seconds = makeGetter('seconds'); var minutes = makeGetter('minutes'); var hours = makeGetter('hours'); var days = makeGetter('days'); var months = makeGetter('months'); var years = makeGetter('years'); function weeks () { return absFloor(this.days() / 7); } var round = Math.round; var thresholds = { s: 45, // seconds to minute m: 45, // minutes to hour h: 22, // hours to day d: 26, // days to month M: 11 // months to year }; // helper function for moment.fn.from, moment.fn.fromNow, and moment.duration.fn.humanize function substituteTimeAgo(string, number, withoutSuffix, isFuture, locale) { return locale.relativeTime(number || 1, !!withoutSuffix, string, isFuture); } function relativeTime$1 (posNegDuration, withoutSuffix, locale) { var duration = createDuration(posNegDuration).abs(); var seconds = round(duration.as('s')); var minutes = round(duration.as('m')); var hours = round(duration.as('h')); var days = round(duration.as('d')); var months = round(duration.as('M')); var years = round(duration.as('y')); var a = seconds < thresholds.s && ['s', seconds] || minutes <= 1 && ['m'] || minutes < thresholds.m && ['mm', minutes] || hours <= 1 && ['h'] || hours < thresholds.h && ['hh', hours] || days <= 1 && ['d'] || days < thresholds.d && ['dd', days] || months <= 1 && ['M'] || months < thresholds.M && ['MM', months] || years <= 1 && ['y'] || ['yy', years]; a[2] = withoutSuffix; a[3] = +posNegDuration > 0; a[4] = locale; return substituteTimeAgo.apply(null, a); } // This function allows you to set the rounding function for relative time strings function getSetRelativeTimeRounding (roundingFunction) { if (roundingFunction === undefined) { return round; } if (typeof(roundingFunction) === 'function') { round = roundingFunction; return true; } return false; } // This function allows you to set a threshold for relative time strings function getSetRelativeTimeThreshold (threshold, limit) { if (thresholds[threshold] === undefined) { return false; } if (limit === undefined) { return thresholds[threshold]; } thresholds[threshold] = limit; return true; } function humanize (withSuffix) { var locale = this.localeData(); var output = relativeTime$1(this, !withSuffix, locale); if (withSuffix) { output = locale.pastFuture(+this, output); } return locale.postformat(output); } var abs$1 = Math.abs; function toISOString$1() { // for ISO strings we do not use the normal bubbling rules: // * milliseconds bubble up until they become hours // * days do not bubble at all // * months bubble up until they become years // This is because there is no context-free conversion between hours and days // (think of clock changes) // and also not between days and months (28-31 days per month) var seconds = abs$1(this._milliseconds) / 1000; var days = abs$1(this._days); var months = abs$1(this._months); var minutes, hours, years; // 3600 seconds -> 60 minutes -> 1 hour minutes = absFloor(seconds / 60); hours = absFloor(minutes / 60); seconds %= 60; minutes %= 60; // 12 months -> 1 year years = absFloor(months / 12); months %= 12; // inspired by https://github.com/dordille/moment-isoduration/blob/master/moment.isoduration.js var Y = years; var M = months; var D = days; var h = hours; var m = minutes; var s = seconds; var total = this.asSeconds(); if (!total) { // this is the same as C#'s (Noda) and python (isodate)... // but not other JS (goog.date) return 'P0D'; } return (total < 0 ? '-' : '') + 'P' + (Y ? Y + 'Y' : '') + (M ? M + 'M' : '') + (D ? D + 'D' : '') + ((h || m || s) ? 'T' : '') + (h ? h + 'H' : '') + (m ? m + 'M' : '') + (s ? s + 'S' : ''); } var proto$2 = Duration.prototype; proto$2.abs = abs; proto$2.add = add$1; proto$2.subtract = subtract$1; proto$2.as = as; proto$2.asMilliseconds = asMilliseconds; proto$2.asSeconds = asSeconds; proto$2.asMinutes = asMinutes; proto$2.asHours = asHours; proto$2.asDays = asDays; proto$2.asWeeks = asWeeks; proto$2.asMonths = asMonths; proto$2.asYears = asYears; proto$2.valueOf = valueOf$1; proto$2._bubble = bubble; proto$2.get = get$2; proto$2.milliseconds = milliseconds; proto$2.seconds = seconds; proto$2.minutes = minutes; proto$2.hours = hours; proto$2.days = days; proto$2.weeks = weeks; proto$2.months = months; proto$2.years = years; proto$2.humanize = humanize; proto$2.toISOString = toISOString$1; proto$2.toString = toISOString$1; proto$2.toJSON = toISOString$1; proto$2.locale = locale; proto$2.localeData = localeData; // Deprecations proto$2.toIsoString = deprecate('toIsoString() is deprecated. Please use toISOString() instead (notice the capitals)', toISOString$1); proto$2.lang = lang; // Side effect imports // FORMATTING addFormatToken('X', 0, 0, 'unix'); addFormatToken('x', 0, 0, 'valueOf'); // PARSING addRegexToken('x', matchSigned); addRegexToken('X', matchTimestamp); addParseToken('X', function (input, array, config) { config._d = new Date(parseFloat(input, 10) * 1000); }); addParseToken('x', function (input, array, config) { config._d = new Date(toInt(input)); }); // Side effect imports hooks.version = '2.17.1'; setHookCallback(createLocal); hooks.fn = proto; hooks.min = min; hooks.max = max; hooks.now = now; hooks.utc = createUTC; hooks.unix = createUnix; hooks.months = listMonths; hooks.isDate = isDate; hooks.locale = getSetGlobalLocale; hooks.invalid = createInvalid; hooks.duration = createDuration; hooks.isMoment = isMoment; hooks.weekdays = listWeekdays; hooks.parseZone = createInZone; hooks.localeData = getLocale; hooks.isDuration = isDuration; hooks.monthsShort = listMonthsShort; hooks.weekdaysMin = listWeekdaysMin; hooks.defineLocale = defineLocale; hooks.updateLocale = updateLocale; hooks.locales = listLocales; hooks.weekdaysShort = listWeekdaysShort; hooks.normalizeUnits = normalizeUnits; hooks.relativeTimeRounding = getSetRelativeTimeRounding; hooks.relativeTimeThreshold = getSetRelativeTimeThreshold; hooks.calendarFormat = getCalendarFormat; hooks.prototype = proto; return hooks; }))); /* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(344)(module))) /***/ }, /* 344 */ /***/ function(module, exports) { module.exports = function(module) { if(!module.webpackPolyfill) { module.deprecate = function() {}; module.paths = []; // module.parent = undefined by default module.children = []; module.webpackPolyfill = 1; } return module; } /***/ }, /* 345 */ /***/ function(module, exports, __webpack_require__) { var map = { "./af": 346, "./af.js": 346, "./ar": 347, "./ar-dz": 348, "./ar-dz.js": 348, "./ar-ly": 349, "./ar-ly.js": 349, "./ar-ma": 350, "./ar-ma.js": 350, "./ar-sa": 351, "./ar-sa.js": 351, "./ar-tn": 352, "./ar-tn.js": 352, "./ar.js": 347, "./az": 353, "./az.js": 353, "./be": 354, "./be.js": 354, "./bg": 355, "./bg.js": 355, "./bn": 356, "./bn.js": 356, "./bo": 357, "./bo.js": 357, "./br": 358, "./br.js": 358, "./bs": 359, "./bs.js": 359, "./ca": 360, "./ca.js": 360, "./cs": 361, "./cs.js": 361, "./cv": 362, "./cv.js": 362, "./cy": 363, "./cy.js": 363, "./da": 364, "./da.js": 364, "./de": 365, "./de-at": 366, "./de-at.js": 366, "./de.js": 365, "./dv": 367, "./dv.js": 367, "./el": 368, "./el.js": 368, "./en-au": 369, "./en-au.js": 369, "./en-ca": 370, "./en-ca.js": 370, "./en-gb": 371, "./en-gb.js": 371, "./en-ie": 372, "./en-ie.js": 372, "./en-nz": 373, "./en-nz.js": 373, "./eo": 374, "./eo.js": 374, "./es": 375, "./es-do": 376, "./es-do.js": 376, "./es.js": 375, "./et": 377, "./et.js": 377, "./eu": 378, "./eu.js": 378, "./fa": 379, "./fa.js": 379, "./fi": 380, "./fi.js": 380, "./fo": 381, "./fo.js": 381, "./fr": 382, "./fr-ca": 383, "./fr-ca.js": 383, "./fr-ch": 384, "./fr-ch.js": 384, "./fr.js": 382, "./fy": 385, "./fy.js": 385, "./gd": 386, "./gd.js": 386, "./gl": 387, "./gl.js": 387, "./he": 388, "./he.js": 388, "./hi": 389, "./hi.js": 389, "./hr": 390, "./hr.js": 390, "./hu": 391, "./hu.js": 391, "./hy-am": 392, "./hy-am.js": 392, "./id": 393, "./id.js": 393, "./is": 394, "./is.js": 394, "./it": 395, "./it.js": 395, "./ja": 396, "./ja.js": 396, "./jv": 397, "./jv.js": 397, "./ka": 398, "./ka.js": 398, "./kk": 399, "./kk.js": 399, "./km": 400, "./km.js": 400, "./ko": 401, "./ko.js": 401, "./ky": 402, "./ky.js": 402, "./lb": 403, "./lb.js": 403, "./lo": 404, "./lo.js": 404, "./lt": 405, "./lt.js": 405, "./lv": 406, "./lv.js": 406, "./me": 407, "./me.js": 407, "./mi": 408, "./mi.js": 408, "./mk": 409, "./mk.js": 409, "./ml": 410, "./ml.js": 410, "./mr": 411, "./mr.js": 411, "./ms": 412, "./ms-my": 413, "./ms-my.js": 413, "./ms.js": 412, "./my": 414, "./my.js": 414, "./nb": 415, "./nb.js": 415, "./ne": 416, "./ne.js": 416, "./nl": 417, "./nl-be": 418, "./nl-be.js": 418, "./nl.js": 417, "./nn": 419, "./nn.js": 419, "./pa-in": 420, "./pa-in.js": 420, "./pl": 421, "./pl.js": 421, "./pt": 422, "./pt-br": 423, "./pt-br.js": 423, "./pt.js": 422, "./ro": 424, "./ro.js": 424, "./ru": 425, "./ru.js": 425, "./se": 426, "./se.js": 426, "./si": 427, "./si.js": 427, "./sk": 428, "./sk.js": 428, "./sl": 429, "./sl.js": 429, "./sq": 430, "./sq.js": 430, "./sr": 431, "./sr-cyrl": 432, "./sr-cyrl.js": 432, "./sr.js": 431, "./ss": 433, "./ss.js": 433, "./sv": 434, "./sv.js": 434, "./sw": 435, "./sw.js": 435, "./ta": 436, "./ta.js": 436, "./te": 437, "./te.js": 437, "./tet": 438, "./tet.js": 438, "./th": 439, "./th.js": 439, "./tl-ph": 440, "./tl-ph.js": 440, "./tlh": 441, "./tlh.js": 441, "./tr": 442, "./tr.js": 442, "./tzl": 443, "./tzl.js": 443, "./tzm": 444, "./tzm-latn": 445, "./tzm-latn.js": 445, "./tzm.js": 444, "./uk": 446, "./uk.js": 446, "./uz": 447, "./uz.js": 447, "./vi": 448, "./vi.js": 448, "./x-pseudo": 449, "./x-pseudo.js": 449, "./yo": 450, "./yo.js": 450, "./zh-cn": 451, "./zh-cn.js": 451, "./zh-hk": 452, "./zh-hk.js": 452, "./zh-tw": 453, "./zh-tw.js": 453 }; function webpackContext(req) { return __webpack_require__(webpackContextResolve(req)); }; function webpackContextResolve(req) { return map[req] || (function() { throw new Error("Cannot find module '" + req + "'.") }()); }; webpackContext.keys = function webpackContextKeys() { return Object.keys(map); }; webpackContext.resolve = webpackContextResolve; module.exports = webpackContext; webpackContext.id = 345; /***/ }, /* 346 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Afrikaans [af] //! author : Werner Mollentze : https://github.com/wernerm ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var af = moment.defineLocale('af', { months : 'Januarie_Februarie_Maart_April_Mei_Junie_Julie_Augustus_September_Oktober_November_Desember'.split('_'), monthsShort : 'Jan_Feb_Mrt_Apr_Mei_Jun_Jul_Aug_Sep_Okt_Nov_Des'.split('_'), weekdays : 'Sondag_Maandag_Dinsdag_Woensdag_Donderdag_Vrydag_Saterdag'.split('_'), weekdaysShort : 'Son_Maa_Din_Woe_Don_Vry_Sat'.split('_'), weekdaysMin : 'So_Ma_Di_Wo_Do_Vr_Sa'.split('_'), meridiemParse: /vm|nm/i, isPM : function (input) { return /^nm$/i.test(input); }, meridiem : function (hours, minutes, isLower) { if (hours < 12) { return isLower ? 'vm' : 'VM'; } else { return isLower ? 'nm' : 'NM'; } }, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay : '[Vandag om] LT', nextDay : '[Môre om] LT', nextWeek : 'dddd [om] LT', lastDay : '[Gister om] LT', lastWeek : '[Laas] dddd [om] LT', sameElse : 'L' }, relativeTime : { future : 'oor %s', past : '%s gelede', s : '\'n paar sekondes', m : '\'n minuut', mm : '%d minute', h : '\'n uur', hh : '%d ure', d : '\'n dag', dd : '%d dae', M : '\'n maand', MM : '%d maande', y : '\'n jaar', yy : '%d jaar' }, ordinalParse: /\d{1,2}(ste|de)/, ordinal : function (number) { return number + ((number === 1 || number === 8 || number >= 20) ? 'ste' : 'de'); // Thanks to Joris Röling : https://github.com/jjupiter }, week : { dow : 1, // Maandag is die eerste dag van die week. doy : 4 // Die week wat die 4de Januarie bevat is die eerste week van die jaar. } }); return af; }))); /***/ }, /* 347 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Arabic [ar] //! author : Abdel Said: https://github.com/abdelsaid //! author : Ahmed Elkhatib //! author : forabi https://github.com/forabi ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '١', '2': '٢', '3': '٣', '4': '٤', '5': '٥', '6': '٦', '7': '٧', '8': '٨', '9': '٩', '0': '٠' }; var numberMap = { '١': '1', '٢': '2', '٣': '3', '٤': '4', '٥': '5', '٦': '6', '٧': '7', '٨': '8', '٩': '9', '٠': '0' }; var pluralForm = function (n) { return n === 0 ? 0 : n === 1 ? 1 : n === 2 ? 2 : n % 100 >= 3 && n % 100 <= 10 ? 3 : n % 100 >= 11 ? 4 : 5; }; var plurals = { s : ['أقل من ثانية', 'ثانية واحدة', ['ثانيتان', 'ثانيتين'], '%d ثوان', '%d ثانية', '%d ثانية'], m : ['أقل من دقيقة', 'دقيقة واحدة', ['دقيقتان', 'دقيقتين'], '%d دقائق', '%d دقيقة', '%d دقيقة'], h : ['أقل من ساعة', 'ساعة واحدة', ['ساعتان', 'ساعتين'], '%d ساعات', '%d ساعة', '%d ساعة'], d : ['أقل من يوم', 'يوم واحد', ['يومان', 'يومين'], '%d أيام', '%d يومًا', '%d يوم'], M : ['أقل من شهر', 'شهر واحد', ['شهران', 'شهرين'], '%d أشهر', '%d شهرا', '%d شهر'], y : ['أقل من عام', 'عام واحد', ['عامان', 'عامين'], '%d أعوام', '%d عامًا', '%d عام'] }; var pluralize = function (u) { return function (number, withoutSuffix, string, isFuture) { var f = pluralForm(number), str = plurals[u][pluralForm(number)]; if (f === 2) { str = str[withoutSuffix ? 0 : 1]; } return str.replace(/%d/i, number); }; }; var months = [ 'كانون الثاني يناير', 'شباط فبراير', 'آذار مارس', 'نيسان أبريل', 'أيار مايو', 'حزيران يونيو', 'تموز يوليو', 'آب أغسطس', 'أيلول سبتمبر', 'تشرين الأول أكتوبر', 'تشرين الثاني نوفمبر', 'كانون الأول ديسمبر' ]; var ar = moment.defineLocale('ar', { months : months, monthsShort : months, weekdays : 'الأحد_الإثنين_الثلاثاء_الأربعاء_الخميس_الجمعة_السبت'.split('_'), weekdaysShort : 'أحد_إثنين_ثلاثاء_أربعاء_خميس_جمعة_سبت'.split('_'), weekdaysMin : 'ح_ن_ث_ر_خ_ج_س'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'D/\u200FM/\u200FYYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, meridiemParse: /ص|م/, isPM : function (input) { return 'م' === input; }, meridiem : function (hour, minute, isLower) { if (hour < 12) { return 'ص'; } else { return 'م'; } }, calendar : { sameDay: '[اليوم عند الساعة] LT', nextDay: '[غدًا عند الساعة] LT', nextWeek: 'dddd [عند الساعة] LT', lastDay: '[أمس عند الساعة] LT', lastWeek: 'dddd [عند الساعة] LT', sameElse: 'L' }, relativeTime : { future : 'بعد %s', past : 'منذ %s', s : pluralize('s'), m : pluralize('m'), mm : pluralize('m'), h : pluralize('h'), hh : pluralize('h'), d : pluralize('d'), dd : pluralize('d'), M : pluralize('M'), MM : pluralize('M'), y : pluralize('y'), yy : pluralize('y') }, preparse: function (string) { return string.replace(/\u200f/g, '').replace(/[١٢٣٤٥٦٧٨٩٠]/g, function (match) { return numberMap[match]; }).replace(/،/g, ','); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }).replace(/,/g, '،'); }, week : { dow : 6, // Saturday is the first day of the week. doy : 12 // The week that contains Jan 1st is the first week of the year. } }); return ar; }))); /***/ }, /* 348 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Arabic (Algeria) [ar-dz] //! author : Noureddine LOUAHEDJ : https://github.com/noureddineme ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var arDz = moment.defineLocale('ar-dz', { months : 'جانفي_فيفري_مارس_أفريل_ماي_جوان_جويلية_أوت_سبتمبر_أكتوبر_نوفمبر_ديسمبر'.split('_'), monthsShort : 'جانفي_فيفري_مارس_أفريل_ماي_جوان_جويلية_أوت_سبتمبر_أكتوبر_نوفمبر_ديسمبر'.split('_'), weekdays : 'الأحد_الإثنين_الثلاثاء_الأربعاء_الخميس_الجمعة_السبت'.split('_'), weekdaysShort : 'احد_اثنين_ثلاثاء_اربعاء_خميس_جمعة_سبت'.split('_'), weekdaysMin : 'أح_إث_ثلا_أر_خم_جم_سب'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, calendar : { sameDay: '[اليوم على الساعة] LT', nextDay: '[غدا على الساعة] LT', nextWeek: 'dddd [على الساعة] LT', lastDay: '[أمس على الساعة] LT', lastWeek: 'dddd [على الساعة] LT', sameElse: 'L' }, relativeTime : { future : 'في %s', past : 'منذ %s', s : 'ثوان', m : 'دقيقة', mm : '%d دقائق', h : 'ساعة', hh : '%d ساعات', d : 'يوم', dd : '%d أيام', M : 'شهر', MM : '%d أشهر', y : 'سنة', yy : '%d سنوات' }, week : { dow : 0, // Sunday is the first day of the week. doy : 4 // The week that contains Jan 1st is the first week of the year. } }); return arDz; }))); /***/ }, /* 349 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Arabic (Lybia) [ar-ly] //! author : Ali Hmer: https://github.com/kikoanis ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '1', '2': '2', '3': '3', '4': '4', '5': '5', '6': '6', '7': '7', '8': '8', '9': '9', '0': '0' }; var pluralForm = function (n) { return n === 0 ? 0 : n === 1 ? 1 : n === 2 ? 2 : n % 100 >= 3 && n % 100 <= 10 ? 3 : n % 100 >= 11 ? 4 : 5; }; var plurals = { s : ['أقل من ثانية', 'ثانية واحدة', ['ثانيتان', 'ثانيتين'], '%d ثوان', '%d ثانية', '%d ثانية'], m : ['أقل من دقيقة', 'دقيقة واحدة', ['دقيقتان', 'دقيقتين'], '%d دقائق', '%d دقيقة', '%d دقيقة'], h : ['أقل من ساعة', 'ساعة واحدة', ['ساعتان', 'ساعتين'], '%d ساعات', '%d ساعة', '%d ساعة'], d : ['أقل من يوم', 'يوم واحد', ['يومان', 'يومين'], '%d أيام', '%d يومًا', '%d يوم'], M : ['أقل من شهر', 'شهر واحد', ['شهران', 'شهرين'], '%d أشهر', '%d شهرا', '%d شهر'], y : ['أقل من عام', 'عام واحد', ['عامان', 'عامين'], '%d أعوام', '%d عامًا', '%d عام'] }; var pluralize = function (u) { return function (number, withoutSuffix, string, isFuture) { var f = pluralForm(number), str = plurals[u][pluralForm(number)]; if (f === 2) { str = str[withoutSuffix ? 0 : 1]; } return str.replace(/%d/i, number); }; }; var months = [ 'يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر' ]; var arLy = moment.defineLocale('ar-ly', { months : months, monthsShort : months, weekdays : 'الأحد_الإثنين_الثلاثاء_الأربعاء_الخميس_الجمعة_السبت'.split('_'), weekdaysShort : 'أحد_إثنين_ثلاثاء_أربعاء_خميس_جمعة_سبت'.split('_'), weekdaysMin : 'ح_ن_ث_ر_خ_ج_س'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'D/\u200FM/\u200FYYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, meridiemParse: /ص|م/, isPM : function (input) { return 'م' === input; }, meridiem : function (hour, minute, isLower) { if (hour < 12) { return 'ص'; } else { return 'م'; } }, calendar : { sameDay: '[اليوم عند الساعة] LT', nextDay: '[غدًا عند الساعة] LT', nextWeek: 'dddd [عند الساعة] LT', lastDay: '[أمس عند الساعة] LT', lastWeek: 'dddd [عند الساعة] LT', sameElse: 'L' }, relativeTime : { future : 'بعد %s', past : 'منذ %s', s : pluralize('s'), m : pluralize('m'), mm : pluralize('m'), h : pluralize('h'), hh : pluralize('h'), d : pluralize('d'), dd : pluralize('d'), M : pluralize('M'), MM : pluralize('M'), y : pluralize('y'), yy : pluralize('y') }, preparse: function (string) { return string.replace(/\u200f/g, '').replace(/،/g, ','); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }).replace(/,/g, '،'); }, week : { dow : 6, // Saturday is the first day of the week. doy : 12 // The week that contains Jan 1st is the first week of the year. } }); return arLy; }))); /***/ }, /* 350 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Arabic (Morocco) [ar-ma] //! author : ElFadili Yassine : https://github.com/ElFadiliY //! author : Abdel Said : https://github.com/abdelsaid ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var arMa = moment.defineLocale('ar-ma', { months : 'يناير_فبراير_مارس_أبريل_ماي_يونيو_يوليوز_غشت_شتنبر_أكتوبر_نونبر_دجنبر'.split('_'), monthsShort : 'يناير_فبراير_مارس_أبريل_ماي_يونيو_يوليوز_غشت_شتنبر_أكتوبر_نونبر_دجنبر'.split('_'), weekdays : 'الأحد_الإتنين_الثلاثاء_الأربعاء_الخميس_الجمعة_السبت'.split('_'), weekdaysShort : 'احد_اتنين_ثلاثاء_اربعاء_خميس_جمعة_سبت'.split('_'), weekdaysMin : 'ح_ن_ث_ر_خ_ج_س'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, calendar : { sameDay: '[اليوم على الساعة] LT', nextDay: '[غدا على الساعة] LT', nextWeek: 'dddd [على الساعة] LT', lastDay: '[أمس على الساعة] LT', lastWeek: 'dddd [على الساعة] LT', sameElse: 'L' }, relativeTime : { future : 'في %s', past : 'منذ %s', s : 'ثوان', m : 'دقيقة', mm : '%d دقائق', h : 'ساعة', hh : '%d ساعات', d : 'يوم', dd : '%d أيام', M : 'شهر', MM : '%d أشهر', y : 'سنة', yy : '%d سنوات' }, week : { dow : 6, // Saturday is the first day of the week. doy : 12 // The week that contains Jan 1st is the first week of the year. } }); return arMa; }))); /***/ }, /* 351 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Arabic (Saudi Arabia) [ar-sa] //! author : Suhail Alkowaileet : https://github.com/xsoh ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '١', '2': '٢', '3': '٣', '4': '٤', '5': '٥', '6': '٦', '7': '٧', '8': '٨', '9': '٩', '0': '٠' }; var numberMap = { '١': '1', '٢': '2', '٣': '3', '٤': '4', '٥': '5', '٦': '6', '٧': '7', '٨': '8', '٩': '9', '٠': '0' }; var arSa = moment.defineLocale('ar-sa', { months : 'يناير_فبراير_مارس_أبريل_مايو_يونيو_يوليو_أغسطس_سبتمبر_أكتوبر_نوفمبر_ديسمبر'.split('_'), monthsShort : 'يناير_فبراير_مارس_أبريل_مايو_يونيو_يوليو_أغسطس_سبتمبر_أكتوبر_نوفمبر_ديسمبر'.split('_'), weekdays : 'الأحد_الإثنين_الثلاثاء_الأربعاء_الخميس_الجمعة_السبت'.split('_'), weekdaysShort : 'أحد_إثنين_ثلاثاء_أربعاء_خميس_جمعة_سبت'.split('_'), weekdaysMin : 'ح_ن_ث_ر_خ_ج_س'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, meridiemParse: /ص|م/, isPM : function (input) { return 'م' === input; }, meridiem : function (hour, minute, isLower) { if (hour < 12) { return 'ص'; } else { return 'م'; } }, calendar : { sameDay: '[اليوم على الساعة] LT', nextDay: '[غدا على الساعة] LT', nextWeek: 'dddd [على الساعة] LT', lastDay: '[أمس على الساعة] LT', lastWeek: 'dddd [على الساعة] LT', sameElse: 'L' }, relativeTime : { future : 'في %s', past : 'منذ %s', s : 'ثوان', m : 'دقيقة', mm : '%d دقائق', h : 'ساعة', hh : '%d ساعات', d : 'يوم', dd : '%d أيام', M : 'شهر', MM : '%d أشهر', y : 'سنة', yy : '%d سنوات' }, preparse: function (string) { return string.replace(/[١٢٣٤٥٦٧٨٩٠]/g, function (match) { return numberMap[match]; }).replace(/،/g, ','); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }).replace(/,/g, '،'); }, week : { dow : 0, // Sunday is the first day of the week. doy : 6 // The week that contains Jan 1st is the first week of the year. } }); return arSa; }))); /***/ }, /* 352 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Arabic (Tunisia) [ar-tn] //! author : Nader Toukabri : https://github.com/naderio ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var arTn = moment.defineLocale('ar-tn', { months: 'جانفي_فيفري_مارس_أفريل_ماي_جوان_جويلية_أوت_سبتمبر_أكتوبر_نوفمبر_ديسمبر'.split('_'), monthsShort: 'جانفي_فيفري_مارس_أفريل_ماي_جوان_جويلية_أوت_سبتمبر_أكتوبر_نوفمبر_ديسمبر'.split('_'), weekdays: 'الأحد_الإثنين_الثلاثاء_الأربعاء_الخميس_الجمعة_السبت'.split('_'), weekdaysShort: 'أحد_إثنين_ثلاثاء_أربعاء_خميس_جمعة_سبت'.split('_'), weekdaysMin: 'ح_ن_ث_ر_خ_ج_س'.split('_'), weekdaysParseExact : true, longDateFormat: { LT: 'HH:mm', LTS: 'HH:mm:ss', L: 'DD/MM/YYYY', LL: 'D MMMM YYYY', LLL: 'D MMMM YYYY HH:mm', LLLL: 'dddd D MMMM YYYY HH:mm' }, calendar: { sameDay: '[اليوم على الساعة] LT', nextDay: '[غدا على الساعة] LT', nextWeek: 'dddd [على الساعة] LT', lastDay: '[أمس على الساعة] LT', lastWeek: 'dddd [على الساعة] LT', sameElse: 'L' }, relativeTime: { future: 'في %s', past: 'منذ %s', s: 'ثوان', m: 'دقيقة', mm: '%d دقائق', h: 'ساعة', hh: '%d ساعات', d: 'يوم', dd: '%d أيام', M: 'شهر', MM: '%d أشهر', y: 'سنة', yy: '%d سنوات' }, week: { dow: 1, // Monday is the first day of the week. doy: 4 // The week that contains Jan 4th is the first week of the year. } }); return arTn; }))); /***/ }, /* 353 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Azerbaijani [az] //! author : topchiyev : https://github.com/topchiyev ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var suffixes = { 1: '-inci', 5: '-inci', 8: '-inci', 70: '-inci', 80: '-inci', 2: '-nci', 7: '-nci', 20: '-nci', 50: '-nci', 3: '-üncü', 4: '-üncü', 100: '-üncü', 6: '-ncı', 9: '-uncu', 10: '-uncu', 30: '-uncu', 60: '-ıncı', 90: '-ıncı' }; var az = moment.defineLocale('az', { months : 'yanvar_fevral_mart_aprel_may_iyun_iyul_avqust_sentyabr_oktyabr_noyabr_dekabr'.split('_'), monthsShort : 'yan_fev_mar_apr_may_iyn_iyl_avq_sen_okt_noy_dek'.split('_'), weekdays : 'Bazar_Bazar ertəsi_Çərşənbə axşamı_Çərşənbə_Cümə axşamı_Cümə_Şənbə'.split('_'), weekdaysShort : 'Baz_BzE_ÇAx_Çər_CAx_Cüm_Şən'.split('_'), weekdaysMin : 'Bz_BE_ÇA_Çə_CA_Cü_Şə'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay : '[bugün saat] LT', nextDay : '[sabah saat] LT', nextWeek : '[gələn həftə] dddd [saat] LT', lastDay : '[dünən] LT', lastWeek : '[keçən həftə] dddd [saat] LT', sameElse : 'L' }, relativeTime : { future : '%s sonra', past : '%s əvvəl', s : 'birneçə saniyyə', m : 'bir dəqiqə', mm : '%d dəqiqə', h : 'bir saat', hh : '%d saat', d : 'bir gün', dd : '%d gün', M : 'bir ay', MM : '%d ay', y : 'bir il', yy : '%d il' }, meridiemParse: /gecə|səhər|gündüz|axşam/, isPM : function (input) { return /^(gündüz|axşam)$/.test(input); }, meridiem : function (hour, minute, isLower) { if (hour < 4) { return 'gecə'; } else if (hour < 12) { return 'səhər'; } else if (hour < 17) { return 'gündüz'; } else { return 'axşam'; } }, ordinalParse: /\d{1,2}-(ıncı|inci|nci|üncü|ncı|uncu)/, ordinal : function (number) { if (number === 0) { // special case for zero return number + '-ıncı'; } var a = number % 10, b = number % 100 - a, c = number >= 100 ? 100 : null; return number + (suffixes[a] || suffixes[b] || suffixes[c]); }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return az; }))); /***/ }, /* 354 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Belarusian [be] //! author : Dmitry Demidov : https://github.com/demidov91 //! author: Praleska: http://praleska.pro/ //! Author : Menelion Elensúle : https://github.com/Oire ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function plural(word, num) { var forms = word.split('_'); return num % 10 === 1 && num % 100 !== 11 ? forms[0] : (num % 10 >= 2 && num % 10 <= 4 && (num % 100 < 10 || num % 100 >= 20) ? forms[1] : forms[2]); } function relativeTimeWithPlural(number, withoutSuffix, key) { var format = { 'mm': withoutSuffix ? 'хвіліна_хвіліны_хвілін' : 'хвіліну_хвіліны_хвілін', 'hh': withoutSuffix ? 'гадзіна_гадзіны_гадзін' : 'гадзіну_гадзіны_гадзін', 'dd': 'дзень_дні_дзён', 'MM': 'месяц_месяцы_месяцаў', 'yy': 'год_гады_гадоў' }; if (key === 'm') { return withoutSuffix ? 'хвіліна' : 'хвіліну'; } else if (key === 'h') { return withoutSuffix ? 'гадзіна' : 'гадзіну'; } else { return number + ' ' + plural(format[key], +number); } } var be = moment.defineLocale('be', { months : { format: 'студзеня_лютага_сакавіка_красавіка_траўня_чэрвеня_ліпеня_жніўня_верасня_кастрычніка_лістапада_снежня'.split('_'), standalone: 'студзень_люты_сакавік_красавік_травень_чэрвень_ліпень_жнівень_верасень_кастрычнік_лістапад_снежань'.split('_') }, monthsShort : 'студ_лют_сак_крас_трав_чэрв_ліп_жнів_вер_каст_ліст_снеж'.split('_'), weekdays : { format: 'нядзелю_панядзелак_аўторак_сераду_чацвер_пятніцу_суботу'.split('_'), standalone: 'нядзеля_панядзелак_аўторак_серада_чацвер_пятніца_субота'.split('_'), isFormat: /\[ ?[Вв] ?(?:мінулую|наступную)? ?\] ?dddd/ }, weekdaysShort : 'нд_пн_ат_ср_чц_пт_сб'.split('_'), weekdaysMin : 'нд_пн_ат_ср_чц_пт_сб'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY г.', LLL : 'D MMMM YYYY г., HH:mm', LLLL : 'dddd, D MMMM YYYY г., HH:mm' }, calendar : { sameDay: '[Сёння ў] LT', nextDay: '[Заўтра ў] LT', lastDay: '[Учора ў] LT', nextWeek: function () { return '[У] dddd [ў] LT'; }, lastWeek: function () { switch (this.day()) { case 0: case 3: case 5: case 6: return '[У мінулую] dddd [ў] LT'; case 1: case 2: case 4: return '[У мінулы] dddd [ў] LT'; } }, sameElse: 'L' }, relativeTime : { future : 'праз %s', past : '%s таму', s : 'некалькі секунд', m : relativeTimeWithPlural, mm : relativeTimeWithPlural, h : relativeTimeWithPlural, hh : relativeTimeWithPlural, d : 'дзень', dd : relativeTimeWithPlural, M : 'месяц', MM : relativeTimeWithPlural, y : 'год', yy : relativeTimeWithPlural }, meridiemParse: /ночы|раніцы|дня|вечара/, isPM : function (input) { return /^(дня|вечара)$/.test(input); }, meridiem : function (hour, minute, isLower) { if (hour < 4) { return 'ночы'; } else if (hour < 12) { return 'раніцы'; } else if (hour < 17) { return 'дня'; } else { return 'вечара'; } }, ordinalParse: /\d{1,2}-(і|ы|га)/, ordinal: function (number, period) { switch (period) { case 'M': case 'd': case 'DDD': case 'w': case 'W': return (number % 10 === 2 || number % 10 === 3) && (number % 100 !== 12 && number % 100 !== 13) ? number + '-і' : number + '-ы'; case 'D': return number + '-га'; default: return number; } }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return be; }))); /***/ }, /* 355 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Bulgarian [bg] //! author : Krasen Borisov : https://github.com/kraz ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var bg = moment.defineLocale('bg', { months : 'януари_февруари_март_април_май_юни_юли_август_септември_октомври_ноември_декември'.split('_'), monthsShort : 'янр_фев_мар_апр_май_юни_юли_авг_сеп_окт_ное_дек'.split('_'), weekdays : 'неделя_понеделник_вторник_сряда_четвъртък_петък_събота'.split('_'), weekdaysShort : 'нед_пон_вто_сря_чет_пет_съб'.split('_'), weekdaysMin : 'нд_пн_вт_ср_чт_пт_сб'.split('_'), longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'D.MM.YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY H:mm', LLLL : 'dddd, D MMMM YYYY H:mm' }, calendar : { sameDay : '[Днес в] LT', nextDay : '[Утре в] LT', nextWeek : 'dddd [в] LT', lastDay : '[Вчера в] LT', lastWeek : function () { switch (this.day()) { case 0: case 3: case 6: return '[В изминалата] dddd [в] LT'; case 1: case 2: case 4: case 5: return '[В изминалия] dddd [в] LT'; } }, sameElse : 'L' }, relativeTime : { future : 'след %s', past : 'преди %s', s : 'няколко секунди', m : 'минута', mm : '%d минути', h : 'час', hh : '%d часа', d : 'ден', dd : '%d дни', M : 'месец', MM : '%d месеца', y : 'година', yy : '%d години' }, ordinalParse: /\d{1,2}-(ев|ен|ти|ви|ри|ми)/, ordinal : function (number) { var lastDigit = number % 10, last2Digits = number % 100; if (number === 0) { return number + '-ев'; } else if (last2Digits === 0) { return number + '-ен'; } else if (last2Digits > 10 && last2Digits < 20) { return number + '-ти'; } else if (lastDigit === 1) { return number + '-ви'; } else if (lastDigit === 2) { return number + '-ри'; } else if (lastDigit === 7 || lastDigit === 8) { return number + '-ми'; } else { return number + '-ти'; } }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return bg; }))); /***/ }, /* 356 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Bengali [bn] //! author : Kaushik Gandhi : https://github.com/kaushikgandhi ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '১', '2': '২', '3': '৩', '4': '৪', '5': '৫', '6': '৬', '7': '৭', '8': '৮', '9': '৯', '0': '০' }; var numberMap = { '১': '1', '২': '2', '৩': '3', '৪': '4', '৫': '5', '৬': '6', '৭': '7', '৮': '8', '৯': '9', '০': '0' }; var bn = moment.defineLocale('bn', { months : 'জানুয়ারী_ফেব্রুয়ারি_মার্চ_এপ্রিল_মে_জুন_জুলাই_আগস্ট_সেপ্টেম্বর_অক্টোবর_নভেম্বর_ডিসেম্বর'.split('_'), monthsShort : 'জানু_ফেব_মার্চ_এপ্র_মে_জুন_জুল_আগ_সেপ্ট_অক্টো_নভে_ডিসে'.split('_'), weekdays : 'রবিবার_সোমবার_মঙ্গলবার_বুধবার_বৃহস্পতিবার_শুক্রবার_শনিবার'.split('_'), weekdaysShort : 'রবি_সোম_মঙ্গল_বুধ_বৃহস্পতি_শুক্র_শনি'.split('_'), weekdaysMin : 'রবি_সোম_মঙ্গ_বুধ_বৃহঃ_শুক্র_শনি'.split('_'), longDateFormat : { LT : 'A h:mm সময়', LTS : 'A h:mm:ss সময়', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY, A h:mm সময়', LLLL : 'dddd, D MMMM YYYY, A h:mm সময়' }, calendar : { sameDay : '[আজ] LT', nextDay : '[আগামীকাল] LT', nextWeek : 'dddd, LT', lastDay : '[গতকাল] LT', lastWeek : '[গত] dddd, LT', sameElse : 'L' }, relativeTime : { future : '%s পরে', past : '%s আগে', s : 'কয়েক সেকেন্ড', m : 'এক মিনিট', mm : '%d মিনিট', h : 'এক ঘন্টা', hh : '%d ঘন্টা', d : 'এক দিন', dd : '%d দিন', M : 'এক মাস', MM : '%d মাস', y : 'এক বছর', yy : '%d বছর' }, preparse: function (string) { return string.replace(/[১২৩৪৫৬৭৮৯০]/g, function (match) { return numberMap[match]; }); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }); }, meridiemParse: /রাত|সকাল|দুপুর|বিকাল|রাত/, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if ((meridiem === 'রাত' && hour >= 4) || (meridiem === 'দুপুর' && hour < 5) || meridiem === 'বিকাল') { return hour + 12; } else { return hour; } }, meridiem : function (hour, minute, isLower) { if (hour < 4) { return 'রাত'; } else if (hour < 10) { return 'সকাল'; } else if (hour < 17) { return 'দুপুর'; } else if (hour < 20) { return 'বিকাল'; } else { return 'রাত'; } }, week : { dow : 0, // Sunday is the first day of the week. doy : 6 // The week that contains Jan 1st is the first week of the year. } }); return bn; }))); /***/ }, /* 357 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Tibetan [bo] //! author : Thupten N. Chakrishar : https://github.com/vajradog ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '༡', '2': '༢', '3': '༣', '4': '༤', '5': '༥', '6': '༦', '7': '༧', '8': '༨', '9': '༩', '0': '༠' }; var numberMap = { '༡': '1', '༢': '2', '༣': '3', '༤': '4', '༥': '5', '༦': '6', '༧': '7', '༨': '8', '༩': '9', '༠': '0' }; var bo = moment.defineLocale('bo', { months : 'ཟླ་བ་དང་པོ_ཟླ་བ་གཉིས་པ_ཟླ་བ་གསུམ་པ_ཟླ་བ་བཞི་པ_ཟླ་བ་ལྔ་པ_ཟླ་བ་དྲུག་པ_ཟླ་བ་བདུན་པ_ཟླ་བ་བརྒྱད་པ_ཟླ་བ་དགུ་པ_ཟླ་བ་བཅུ་པ_ཟླ་བ་བཅུ་གཅིག་པ_ཟླ་བ་བཅུ་གཉིས་པ'.split('_'), monthsShort : 'ཟླ་བ་དང་པོ_ཟླ་བ་གཉིས་པ_ཟླ་བ་གསུམ་པ_ཟླ་བ་བཞི་པ_ཟླ་བ་ལྔ་པ_ཟླ་བ་དྲུག་པ_ཟླ་བ་བདུན་པ_ཟླ་བ་བརྒྱད་པ_ཟླ་བ་དགུ་པ_ཟླ་བ་བཅུ་པ_ཟླ་བ་བཅུ་གཅིག་པ_ཟླ་བ་བཅུ་གཉིས་པ'.split('_'), weekdays : 'གཟའ་ཉི་མ་_གཟའ་ཟླ་བ་_གཟའ་མིག་དམར་_གཟའ་ལྷག་པ་_གཟའ་ཕུར་བུ_གཟའ་པ་སངས་_གཟའ་སྤེན་པ་'.split('_'), weekdaysShort : 'ཉི་མ་_ཟླ་བ་_མིག་དམར་_ལྷག་པ་_ཕུར་བུ_པ་སངས་_སྤེན་པ་'.split('_'), weekdaysMin : 'ཉི་མ་_ཟླ་བ་_མིག་དམར་_ལྷག་པ་_ཕུར་བུ_པ་སངས་_སྤེན་པ་'.split('_'), longDateFormat : { LT : 'A h:mm', LTS : 'A h:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY, A h:mm', LLLL : 'dddd, D MMMM YYYY, A h:mm' }, calendar : { sameDay : '[དི་རིང] LT', nextDay : '[སང་ཉིན] LT', nextWeek : '[བདུན་ཕྲག་རྗེས་མ], LT', lastDay : '[ཁ་སང] LT', lastWeek : '[བདུན་ཕྲག་མཐའ་མ] dddd, LT', sameElse : 'L' }, relativeTime : { future : '%s ལ་', past : '%s སྔན་ལ', s : 'ལམ་སང', m : 'སྐར་མ་གཅིག', mm : '%d སྐར་མ', h : 'ཆུ་ཚོད་གཅིག', hh : '%d ཆུ་ཚོད', d : 'ཉིན་གཅིག', dd : '%d ཉིན་', M : 'ཟླ་བ་གཅིག', MM : '%d ཟླ་བ', y : 'ལོ་གཅིག', yy : '%d ལོ' }, preparse: function (string) { return string.replace(/[༡༢༣༤༥༦༧༨༩༠]/g, function (match) { return numberMap[match]; }); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }); }, meridiemParse: /མཚན་མོ|ཞོགས་ཀས|ཉིན་གུང|དགོང་དག|མཚན་མོ/, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if ((meridiem === 'མཚན་མོ' && hour >= 4) || (meridiem === 'ཉིན་གུང' && hour < 5) || meridiem === 'དགོང་དག') { return hour + 12; } else { return hour; } }, meridiem : function (hour, minute, isLower) { if (hour < 4) { return 'མཚན་མོ'; } else if (hour < 10) { return 'ཞོགས་ཀས'; } else if (hour < 17) { return 'ཉིན་གུང'; } else if (hour < 20) { return 'དགོང་དག'; } else { return 'མཚན་མོ'; } }, week : { dow : 0, // Sunday is the first day of the week. doy : 6 // The week that contains Jan 1st is the first week of the year. } }); return bo; }))); /***/ }, /* 358 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Breton [br] //! author : Jean-Baptiste Le Duigou : https://github.com/jbleduigou ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function relativeTimeWithMutation(number, withoutSuffix, key) { var format = { 'mm': 'munutenn', 'MM': 'miz', 'dd': 'devezh' }; return number + ' ' + mutation(format[key], number); } function specialMutationForYears(number) { switch (lastNumber(number)) { case 1: case 3: case 4: case 5: case 9: return number + ' bloaz'; default: return number + ' vloaz'; } } function lastNumber(number) { if (number > 9) { return lastNumber(number % 10); } return number; } function mutation(text, number) { if (number === 2) { return softMutation(text); } return text; } function softMutation(text) { var mutationTable = { 'm': 'v', 'b': 'v', 'd': 'z' }; if (mutationTable[text.charAt(0)] === undefined) { return text; } return mutationTable[text.charAt(0)] + text.substring(1); } var br = moment.defineLocale('br', { months : 'Genver_C\'hwevrer_Meurzh_Ebrel_Mae_Mezheven_Gouere_Eost_Gwengolo_Here_Du_Kerzu'.split('_'), monthsShort : 'Gen_C\'hwe_Meu_Ebr_Mae_Eve_Gou_Eos_Gwe_Her_Du_Ker'.split('_'), weekdays : 'Sul_Lun_Meurzh_Merc\'her_Yaou_Gwener_Sadorn'.split('_'), weekdaysShort : 'Sul_Lun_Meu_Mer_Yao_Gwe_Sad'.split('_'), weekdaysMin : 'Su_Lu_Me_Mer_Ya_Gw_Sa'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'h[e]mm A', LTS : 'h[e]mm:ss A', L : 'DD/MM/YYYY', LL : 'D [a viz] MMMM YYYY', LLL : 'D [a viz] MMMM YYYY h[e]mm A', LLLL : 'dddd, D [a viz] MMMM YYYY h[e]mm A' }, calendar : { sameDay : '[Hiziv da] LT', nextDay : '[Warc\'hoazh da] LT', nextWeek : 'dddd [da] LT', lastDay : '[Dec\'h da] LT', lastWeek : 'dddd [paset da] LT', sameElse : 'L' }, relativeTime : { future : 'a-benn %s', past : '%s \'zo', s : 'un nebeud segondennoù', m : 'ur vunutenn', mm : relativeTimeWithMutation, h : 'un eur', hh : '%d eur', d : 'un devezh', dd : relativeTimeWithMutation, M : 'ur miz', MM : relativeTimeWithMutation, y : 'ur bloaz', yy : specialMutationForYears }, ordinalParse: /\d{1,2}(añ|vet)/, ordinal : function (number) { var output = (number === 1) ? 'añ' : 'vet'; return number + output; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return br; }))); /***/ }, /* 359 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Bosnian [bs] //! author : Nedim Cholich : https://github.com/frontyard //! based on (hr) translation by Bojan Marković ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function translate(number, withoutSuffix, key) { var result = number + ' '; switch (key) { case 'm': return withoutSuffix ? 'jedna minuta' : 'jedne minute'; case 'mm': if (number === 1) { result += 'minuta'; } else if (number === 2 || number === 3 || number === 4) { result += 'minute'; } else { result += 'minuta'; } return result; case 'h': return withoutSuffix ? 'jedan sat' : 'jednog sata'; case 'hh': if (number === 1) { result += 'sat'; } else if (number === 2 || number === 3 || number === 4) { result += 'sata'; } else { result += 'sati'; } return result; case 'dd': if (number === 1) { result += 'dan'; } else { result += 'dana'; } return result; case 'MM': if (number === 1) { result += 'mjesec'; } else if (number === 2 || number === 3 || number === 4) { result += 'mjeseca'; } else { result += 'mjeseci'; } return result; case 'yy': if (number === 1) { result += 'godina'; } else if (number === 2 || number === 3 || number === 4) { result += 'godine'; } else { result += 'godina'; } return result; } } var bs = moment.defineLocale('bs', { months : 'januar_februar_mart_april_maj_juni_juli_august_septembar_oktobar_novembar_decembar'.split('_'), monthsShort : 'jan._feb._mar._apr._maj._jun._jul._aug._sep._okt._nov._dec.'.split('_'), monthsParseExact: true, weekdays : 'nedjelja_ponedjeljak_utorak_srijeda_četvrtak_petak_subota'.split('_'), weekdaysShort : 'ned._pon._uto._sri._čet._pet._sub.'.split('_'), weekdaysMin : 'ne_po_ut_sr_če_pe_su'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY H:mm', LLLL : 'dddd, D. MMMM YYYY H:mm' }, calendar : { sameDay : '[danas u] LT', nextDay : '[sutra u] LT', nextWeek : function () { switch (this.day()) { case 0: return '[u] [nedjelju] [u] LT'; case 3: return '[u] [srijedu] [u] LT'; case 6: return '[u] [subotu] [u] LT'; case 1: case 2: case 4: case 5: return '[u] dddd [u] LT'; } }, lastDay : '[jučer u] LT', lastWeek : function () { switch (this.day()) { case 0: case 3: return '[prošlu] dddd [u] LT'; case 6: return '[prošle] [subote] [u] LT'; case 1: case 2: case 4: case 5: return '[prošli] dddd [u] LT'; } }, sameElse : 'L' }, relativeTime : { future : 'za %s', past : 'prije %s', s : 'par sekundi', m : translate, mm : translate, h : translate, hh : translate, d : 'dan', dd : translate, M : 'mjesec', MM : translate, y : 'godinu', yy : translate }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return bs; }))); /***/ }, /* 360 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Catalan [ca] //! author : Juan G. Hurtado : https://github.com/juanghurtado ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var ca = moment.defineLocale('ca', { months : 'gener_febrer_març_abril_maig_juny_juliol_agost_setembre_octubre_novembre_desembre'.split('_'), monthsShort : 'gen._febr._mar._abr._mai._jun._jul._ag._set._oct._nov._des.'.split('_'), monthsParseExact : true, weekdays : 'diumenge_dilluns_dimarts_dimecres_dijous_divendres_dissabte'.split('_'), weekdaysShort : 'dg._dl._dt._dc._dj._dv._ds.'.split('_'), weekdaysMin : 'Dg_Dl_Dt_Dc_Dj_Dv_Ds'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY H:mm', LLLL : 'dddd D MMMM YYYY H:mm' }, calendar : { sameDay : function () { return '[avui a ' + ((this.hours() !== 1) ? 'les' : 'la') + '] LT'; }, nextDay : function () { return '[demà a ' + ((this.hours() !== 1) ? 'les' : 'la') + '] LT'; }, nextWeek : function () { return 'dddd [a ' + ((this.hours() !== 1) ? 'les' : 'la') + '] LT'; }, lastDay : function () { return '[ahir a ' + ((this.hours() !== 1) ? 'les' : 'la') + '] LT'; }, lastWeek : function () { return '[el] dddd [passat a ' + ((this.hours() !== 1) ? 'les' : 'la') + '] LT'; }, sameElse : 'L' }, relativeTime : { future : 'd\'aquí %s', past : 'fa %s', s : 'uns segons', m : 'un minut', mm : '%d minuts', h : 'una hora', hh : '%d hores', d : 'un dia', dd : '%d dies', M : 'un mes', MM : '%d mesos', y : 'un any', yy : '%d anys' }, ordinalParse: /\d{1,2}(r|n|t|è|a)/, ordinal : function (number, period) { var output = (number === 1) ? 'r' : (number === 2) ? 'n' : (number === 3) ? 'r' : (number === 4) ? 't' : 'è'; if (period === 'w' || period === 'W') { output = 'a'; } return number + output; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return ca; }))); /***/ }, /* 361 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Czech [cs] //! author : petrbela : https://github.com/petrbela ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var months = 'leden_únor_březen_duben_květen_červen_červenec_srpen_září_říjen_listopad_prosinec'.split('_'); var monthsShort = 'led_úno_bře_dub_kvě_čvn_čvc_srp_zář_říj_lis_pro'.split('_'); function plural(n) { return (n > 1) && (n < 5) && (~~(n / 10) !== 1); } function translate(number, withoutSuffix, key, isFuture) { var result = number + ' '; switch (key) { case 's': // a few seconds / in a few seconds / a few seconds ago return (withoutSuffix || isFuture) ? 'pár sekund' : 'pár sekundami'; case 'm': // a minute / in a minute / a minute ago return withoutSuffix ? 'minuta' : (isFuture ? 'minutu' : 'minutou'); case 'mm': // 9 minutes / in 9 minutes / 9 minutes ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'minuty' : 'minut'); } else { return result + 'minutami'; } break; case 'h': // an hour / in an hour / an hour ago return withoutSuffix ? 'hodina' : (isFuture ? 'hodinu' : 'hodinou'); case 'hh': // 9 hours / in 9 hours / 9 hours ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'hodiny' : 'hodin'); } else { return result + 'hodinami'; } break; case 'd': // a day / in a day / a day ago return (withoutSuffix || isFuture) ? 'den' : 'dnem'; case 'dd': // 9 days / in 9 days / 9 days ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'dny' : 'dní'); } else { return result + 'dny'; } break; case 'M': // a month / in a month / a month ago return (withoutSuffix || isFuture) ? 'měsíc' : 'měsícem'; case 'MM': // 9 months / in 9 months / 9 months ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'měsíce' : 'měsíců'); } else { return result + 'měsíci'; } break; case 'y': // a year / in a year / a year ago return (withoutSuffix || isFuture) ? 'rok' : 'rokem'; case 'yy': // 9 years / in 9 years / 9 years ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'roky' : 'let'); } else { return result + 'lety'; } break; } } var cs = moment.defineLocale('cs', { months : months, monthsShort : monthsShort, monthsParse : (function (months, monthsShort) { var i, _monthsParse = []; for (i = 0; i < 12; i++) { // use custom parser to solve problem with July (červenec) _monthsParse[i] = new RegExp('^' + months[i] + '$|^' + monthsShort[i] + '$', 'i'); } return _monthsParse; }(months, monthsShort)), shortMonthsParse : (function (monthsShort) { var i, _shortMonthsParse = []; for (i = 0; i < 12; i++) { _shortMonthsParse[i] = new RegExp('^' + monthsShort[i] + '$', 'i'); } return _shortMonthsParse; }(monthsShort)), longMonthsParse : (function (months) { var i, _longMonthsParse = []; for (i = 0; i < 12; i++) { _longMonthsParse[i] = new RegExp('^' + months[i] + '$', 'i'); } return _longMonthsParse; }(months)), weekdays : 'neděle_pondělí_úterý_středa_čtvrtek_pátek_sobota'.split('_'), weekdaysShort : 'ne_po_út_st_čt_pá_so'.split('_'), weekdaysMin : 'ne_po_út_st_čt_pá_so'.split('_'), longDateFormat : { LT: 'H:mm', LTS : 'H:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY H:mm', LLLL : 'dddd D. MMMM YYYY H:mm', l : 'D. M. YYYY' }, calendar : { sameDay: '[dnes v] LT', nextDay: '[zítra v] LT', nextWeek: function () { switch (this.day()) { case 0: return '[v neděli v] LT'; case 1: case 2: return '[v] dddd [v] LT'; case 3: return '[ve středu v] LT'; case 4: return '[ve čtvrtek v] LT'; case 5: return '[v pátek v] LT'; case 6: return '[v sobotu v] LT'; } }, lastDay: '[včera v] LT', lastWeek: function () { switch (this.day()) { case 0: return '[minulou neděli v] LT'; case 1: case 2: return '[minulé] dddd [v] LT'; case 3: return '[minulou středu v] LT'; case 4: case 5: return '[minulý] dddd [v] LT'; case 6: return '[minulou sobotu v] LT'; } }, sameElse: 'L' }, relativeTime : { future : 'za %s', past : 'před %s', s : translate, m : translate, mm : translate, h : translate, hh : translate, d : translate, dd : translate, M : translate, MM : translate, y : translate, yy : translate }, ordinalParse : /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return cs; }))); /***/ }, /* 362 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Chuvash [cv] //! author : Anatoly Mironov : https://github.com/mirontoli ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var cv = moment.defineLocale('cv', { months : 'кӑрлач_нарӑс_пуш_ака_май_ҫӗртме_утӑ_ҫурла_авӑн_юпа_чӳк_раштав'.split('_'), monthsShort : 'кӑр_нар_пуш_ака_май_ҫӗр_утӑ_ҫур_авн_юпа_чӳк_раш'.split('_'), weekdays : 'вырсарникун_тунтикун_ытларикун_юнкун_кӗҫнерникун_эрнекун_шӑматкун'.split('_'), weekdaysShort : 'выр_тун_ытл_юн_кӗҫ_эрн_шӑм'.split('_'), weekdaysMin : 'вр_тн_ыт_юн_кҫ_эр_шм'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD-MM-YYYY', LL : 'YYYY [ҫулхи] MMMM [уйӑхӗн] D[-мӗшӗ]', LLL : 'YYYY [ҫулхи] MMMM [уйӑхӗн] D[-мӗшӗ], HH:mm', LLLL : 'dddd, YYYY [ҫулхи] MMMM [уйӑхӗн] D[-мӗшӗ], HH:mm' }, calendar : { sameDay: '[Паян] LT [сехетре]', nextDay: '[Ыран] LT [сехетре]', lastDay: '[Ӗнер] LT [сехетре]', nextWeek: '[Ҫитес] dddd LT [сехетре]', lastWeek: '[Иртнӗ] dddd LT [сехетре]', sameElse: 'L' }, relativeTime : { future : function (output) { var affix = /сехет$/i.exec(output) ? 'рен' : /ҫул$/i.exec(output) ? 'тан' : 'ран'; return output + affix; }, past : '%s каялла', s : 'пӗр-ик ҫеккунт', m : 'пӗр минут', mm : '%d минут', h : 'пӗр сехет', hh : '%d сехет', d : 'пӗр кун', dd : '%d кун', M : 'пӗр уйӑх', MM : '%d уйӑх', y : 'пӗр ҫул', yy : '%d ҫул' }, ordinalParse: /\d{1,2}-мӗш/, ordinal : '%d-мӗш', week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return cv; }))); /***/ }, /* 363 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Welsh [cy] //! author : Robert Allen : https://github.com/robgallen //! author : https://github.com/ryangreaves ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var cy = moment.defineLocale('cy', { months: 'Ionawr_Chwefror_Mawrth_Ebrill_Mai_Mehefin_Gorffennaf_Awst_Medi_Hydref_Tachwedd_Rhagfyr'.split('_'), monthsShort: 'Ion_Chwe_Maw_Ebr_Mai_Meh_Gor_Aws_Med_Hyd_Tach_Rhag'.split('_'), weekdays: 'Dydd Sul_Dydd Llun_Dydd Mawrth_Dydd Mercher_Dydd Iau_Dydd Gwener_Dydd Sadwrn'.split('_'), weekdaysShort: 'Sul_Llun_Maw_Mer_Iau_Gwe_Sad'.split('_'), weekdaysMin: 'Su_Ll_Ma_Me_Ia_Gw_Sa'.split('_'), weekdaysParseExact : true, // time formats are the same as en-gb longDateFormat: { LT: 'HH:mm', LTS : 'HH:mm:ss', L: 'DD/MM/YYYY', LL: 'D MMMM YYYY', LLL: 'D MMMM YYYY HH:mm', LLLL: 'dddd, D MMMM YYYY HH:mm' }, calendar: { sameDay: '[Heddiw am] LT', nextDay: '[Yfory am] LT', nextWeek: 'dddd [am] LT', lastDay: '[Ddoe am] LT', lastWeek: 'dddd [diwethaf am] LT', sameElse: 'L' }, relativeTime: { future: 'mewn %s', past: '%s yn ôl', s: 'ychydig eiliadau', m: 'munud', mm: '%d munud', h: 'awr', hh: '%d awr', d: 'diwrnod', dd: '%d diwrnod', M: 'mis', MM: '%d mis', y: 'blwyddyn', yy: '%d flynedd' }, ordinalParse: /\d{1,2}(fed|ain|af|il|ydd|ed|eg)/, // traditional ordinal numbers above 31 are not commonly used in colloquial Welsh ordinal: function (number) { var b = number, output = '', lookup = [ '', 'af', 'il', 'ydd', 'ydd', 'ed', 'ed', 'ed', 'fed', 'fed', 'fed', // 1af to 10fed 'eg', 'fed', 'eg', 'eg', 'fed', 'eg', 'eg', 'fed', 'eg', 'fed' // 11eg to 20fed ]; if (b > 20) { if (b === 40 || b === 50 || b === 60 || b === 80 || b === 100) { output = 'fed'; // not 30ain, 70ain or 90ain } else { output = 'ain'; } } else if (b > 0) { output = lookup[b]; } return number + output; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return cy; }))); /***/ }, /* 364 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Danish [da] //! author : Ulrik Nielsen : https://github.com/mrbase ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var da = moment.defineLocale('da', { months : 'januar_februar_marts_april_maj_juni_juli_august_september_oktober_november_december'.split('_'), monthsShort : 'jan_feb_mar_apr_maj_jun_jul_aug_sep_okt_nov_dec'.split('_'), weekdays : 'søndag_mandag_tirsdag_onsdag_torsdag_fredag_lørdag'.split('_'), weekdaysShort : 'søn_man_tir_ons_tor_fre_lør'.split('_'), weekdaysMin : 'sø_ma_ti_on_to_fr_lø'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY HH:mm', LLLL : 'dddd [d.] D. MMMM YYYY HH:mm' }, calendar : { sameDay : '[I dag kl.] LT', nextDay : '[I morgen kl.] LT', nextWeek : 'dddd [kl.] LT', lastDay : '[I går kl.] LT', lastWeek : '[sidste] dddd [kl] LT', sameElse : 'L' }, relativeTime : { future : 'om %s', past : '%s siden', s : 'få sekunder', m : 'et minut', mm : '%d minutter', h : 'en time', hh : '%d timer', d : 'en dag', dd : '%d dage', M : 'en måned', MM : '%d måneder', y : 'et år', yy : '%d år' }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return da; }))); /***/ }, /* 365 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : German [de] //! author : lluchs : https://github.com/lluchs //! author: Menelion Elensúle: https://github.com/Oire //! author : Mikolaj Dadela : https://github.com/mik01aj ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function processRelativeTime(number, withoutSuffix, key, isFuture) { var format = { 'm': ['eine Minute', 'einer Minute'], 'h': ['eine Stunde', 'einer Stunde'], 'd': ['ein Tag', 'einem Tag'], 'dd': [number + ' Tage', number + ' Tagen'], 'M': ['ein Monat', 'einem Monat'], 'MM': [number + ' Monate', number + ' Monaten'], 'y': ['ein Jahr', 'einem Jahr'], 'yy': [number + ' Jahre', number + ' Jahren'] }; return withoutSuffix ? format[key][0] : format[key][1]; } var de = moment.defineLocale('de', { months : 'Januar_Februar_März_April_Mai_Juni_Juli_August_September_Oktober_November_Dezember'.split('_'), monthsShort : 'Jan._Febr._Mrz._Apr._Mai_Jun._Jul._Aug._Sept._Okt._Nov._Dez.'.split('_'), monthsParseExact : true, weekdays : 'Sonntag_Montag_Dienstag_Mittwoch_Donnerstag_Freitag_Samstag'.split('_'), weekdaysShort : 'So._Mo._Di._Mi._Do._Fr._Sa.'.split('_'), weekdaysMin : 'So_Mo_Di_Mi_Do_Fr_Sa'.split('_'), weekdaysParseExact : true, longDateFormat : { LT: 'HH:mm', LTS: 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY HH:mm', LLLL : 'dddd, D. MMMM YYYY HH:mm' }, calendar : { sameDay: '[heute um] LT [Uhr]', sameElse: 'L', nextDay: '[morgen um] LT [Uhr]', nextWeek: 'dddd [um] LT [Uhr]', lastDay: '[gestern um] LT [Uhr]', lastWeek: '[letzten] dddd [um] LT [Uhr]' }, relativeTime : { future : 'in %s', past : 'vor %s', s : 'ein paar Sekunden', m : processRelativeTime, mm : '%d Minuten', h : processRelativeTime, hh : '%d Stunden', d : processRelativeTime, dd : processRelativeTime, M : processRelativeTime, MM : processRelativeTime, y : processRelativeTime, yy : processRelativeTime }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return de; }))); /***/ }, /* 366 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : German (Austria) [de-at] //! author : lluchs : https://github.com/lluchs //! author: Menelion Elensúle: https://github.com/Oire //! author : Martin Groller : https://github.com/MadMG //! author : Mikolaj Dadela : https://github.com/mik01aj ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function processRelativeTime(number, withoutSuffix, key, isFuture) { var format = { 'm': ['eine Minute', 'einer Minute'], 'h': ['eine Stunde', 'einer Stunde'], 'd': ['ein Tag', 'einem Tag'], 'dd': [number + ' Tage', number + ' Tagen'], 'M': ['ein Monat', 'einem Monat'], 'MM': [number + ' Monate', number + ' Monaten'], 'y': ['ein Jahr', 'einem Jahr'], 'yy': [number + ' Jahre', number + ' Jahren'] }; return withoutSuffix ? format[key][0] : format[key][1]; } var deAt = moment.defineLocale('de-at', { months : 'Jänner_Februar_März_April_Mai_Juni_Juli_August_September_Oktober_November_Dezember'.split('_'), monthsShort : 'Jän._Febr._Mrz._Apr._Mai_Jun._Jul._Aug._Sept._Okt._Nov._Dez.'.split('_'), monthsParseExact : true, weekdays : 'Sonntag_Montag_Dienstag_Mittwoch_Donnerstag_Freitag_Samstag'.split('_'), weekdaysShort : 'So._Mo._Di._Mi._Do._Fr._Sa.'.split('_'), weekdaysMin : 'So_Mo_Di_Mi_Do_Fr_Sa'.split('_'), weekdaysParseExact : true, longDateFormat : { LT: 'HH:mm', LTS: 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY HH:mm', LLLL : 'dddd, D. MMMM YYYY HH:mm' }, calendar : { sameDay: '[heute um] LT [Uhr]', sameElse: 'L', nextDay: '[morgen um] LT [Uhr]', nextWeek: 'dddd [um] LT [Uhr]', lastDay: '[gestern um] LT [Uhr]', lastWeek: '[letzten] dddd [um] LT [Uhr]' }, relativeTime : { future : 'in %s', past : 'vor %s', s : 'ein paar Sekunden', m : processRelativeTime, mm : '%d Minuten', h : processRelativeTime, hh : '%d Stunden', d : processRelativeTime, dd : processRelativeTime, M : processRelativeTime, MM : processRelativeTime, y : processRelativeTime, yy : processRelativeTime }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return deAt; }))); /***/ }, /* 367 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Maldivian [dv] //! author : Jawish Hameed : https://github.com/jawish ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var months = [ 'ޖެނުއަރީ', 'ފެބްރުއަރީ', 'މާރިޗު', 'އޭޕްރީލު', 'މޭ', 'ޖޫން', 'ޖުލައި', 'އޯގަސްޓު', 'ސެޕްޓެމްބަރު', 'އޮކްޓޯބަރު', 'ނޮވެމްބަރު', 'ޑިސެމްބަރު' ]; var weekdays = [ 'އާދިއްތަ', 'ހޯމަ', 'އަންގާރަ', 'ބުދަ', 'ބުރާސްފަތި', 'ހުކުރު', 'ހޮނިހިރު' ]; var dv = moment.defineLocale('dv', { months : months, monthsShort : months, weekdays : weekdays, weekdaysShort : weekdays, weekdaysMin : 'އާދި_ހޯމަ_އަން_ބުދަ_ބުރާ_ހުކު_ހޮނި'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'D/M/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, meridiemParse: /މކ|މފ/, isPM : function (input) { return 'މފ' === input; }, meridiem : function (hour, minute, isLower) { if (hour < 12) { return 'މކ'; } else { return 'މފ'; } }, calendar : { sameDay : '[މިއަދު] LT', nextDay : '[މާދަމާ] LT', nextWeek : 'dddd LT', lastDay : '[އިއްޔެ] LT', lastWeek : '[ފާއިތުވި] dddd LT', sameElse : 'L' }, relativeTime : { future : 'ތެރޭގައި %s', past : 'ކުރިން %s', s : 'ސިކުންތުކޮޅެއް', m : 'މިނިޓެއް', mm : 'މިނިޓު %d', h : 'ގަޑިއިރެއް', hh : 'ގަޑިއިރު %d', d : 'ދުވަހެއް', dd : 'ދުވަސް %d', M : 'މަހެއް', MM : 'މަސް %d', y : 'އަހަރެއް', yy : 'އަހަރު %d' }, preparse: function (string) { return string.replace(/،/g, ','); }, postformat: function (string) { return string.replace(/,/g, '،'); }, week : { dow : 7, // Sunday is the first day of the week. doy : 12 // The week that contains Jan 1st is the first week of the year. } }); return dv; }))); /***/ }, /* 368 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Greek [el] //! author : Aggelos Karalias : https://github.com/mehiel ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function isFunction(input) { return input instanceof Function || Object.prototype.toString.call(input) === '[object Function]'; } var el = moment.defineLocale('el', { monthsNominativeEl : 'Ιανουάριος_Φεβρουάριος_Μάρτιος_Απρίλιος_Μάιος_Ιούνιος_Ιούλιος_Αύγουστος_Σεπτέμβριος_Οκτώβριος_Νοέμβριος_Δεκέμβριος'.split('_'), monthsGenitiveEl : 'Ιανουαρίου_Φεβρουαρίου_Μαρτίου_Απριλίου_Μαΐου_Ιουνίου_Ιουλίου_Αυγούστου_Σεπτεμβρίου_Οκτωβρίου_Νοεμβρίου_Δεκεμβρίου'.split('_'), months : function (momentToFormat, format) { if (/D/.test(format.substring(0, format.indexOf('MMMM')))) { // if there is a day number before 'MMMM' return this._monthsGenitiveEl[momentToFormat.month()]; } else { return this._monthsNominativeEl[momentToFormat.month()]; } }, monthsShort : 'Ιαν_Φεβ_Μαρ_Απρ_Μαϊ_Ιουν_Ιουλ_Αυγ_Σεπ_Οκτ_Νοε_Δεκ'.split('_'), weekdays : 'Κυριακή_Δευτέρα_Τρίτη_Τετάρτη_Πέμπτη_Παρασκευή_Σάββατο'.split('_'), weekdaysShort : 'Κυρ_Δευ_Τρι_Τετ_Πεμ_Παρ_Σαβ'.split('_'), weekdaysMin : 'Κυ_Δε_Τρ_Τε_Πε_Πα_Σα'.split('_'), meridiem : function (hours, minutes, isLower) { if (hours > 11) { return isLower ? 'μμ' : 'ΜΜ'; } else { return isLower ? 'πμ' : 'ΠΜ'; } }, isPM : function (input) { return ((input + '').toLowerCase()[0] === 'μ'); }, meridiemParse : /[ΠΜ]\.?Μ?\.?/i, longDateFormat : { LT : 'h:mm A', LTS : 'h:mm:ss A', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY h:mm A', LLLL : 'dddd, D MMMM YYYY h:mm A' }, calendarEl : { sameDay : '[Σήμερα {}] LT', nextDay : '[Αύριο {}] LT', nextWeek : 'dddd [{}] LT', lastDay : '[Χθες {}] LT', lastWeek : function () { switch (this.day()) { case 6: return '[το προηγούμενο] dddd [{}] LT'; default: return '[την προηγούμενη] dddd [{}] LT'; } }, sameElse : 'L' }, calendar : function (key, mom) { var output = this._calendarEl[key], hours = mom && mom.hours(); if (isFunction(output)) { output = output.apply(mom); } return output.replace('{}', (hours % 12 === 1 ? 'στη' : 'στις')); }, relativeTime : { future : 'σε %s', past : '%s πριν', s : 'λίγα δευτερόλεπτα', m : 'ένα λεπτό', mm : '%d λεπτά', h : 'μία ώρα', hh : '%d ώρες', d : 'μία μέρα', dd : '%d μέρες', M : 'ένας μήνας', MM : '%d μήνες', y : 'ένας χρόνος', yy : '%d χρόνια' }, ordinalParse: /\d{1,2}η/, ordinal: '%dη', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4st is the first week of the year. } }); return el; }))); /***/ }, /* 369 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : English (Australia) [en-au] //! author : Jared Morse : https://github.com/jarcoal ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var enAu = moment.defineLocale('en-au', { months : 'January_February_March_April_May_June_July_August_September_October_November_December'.split('_'), monthsShort : 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'.split('_'), weekdays : 'Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday'.split('_'), weekdaysShort : 'Sun_Mon_Tue_Wed_Thu_Fri_Sat'.split('_'), weekdaysMin : 'Su_Mo_Tu_We_Th_Fr_Sa'.split('_'), longDateFormat : { LT : 'h:mm A', LTS : 'h:mm:ss A', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY h:mm A', LLLL : 'dddd, D MMMM YYYY h:mm A' }, calendar : { sameDay : '[Today at] LT', nextDay : '[Tomorrow at] LT', nextWeek : 'dddd [at] LT', lastDay : '[Yesterday at] LT', lastWeek : '[Last] dddd [at] LT', sameElse : 'L' }, relativeTime : { future : 'in %s', past : '%s ago', s : 'a few seconds', m : 'a minute', mm : '%d minutes', h : 'an hour', hh : '%d hours', d : 'a day', dd : '%d days', M : 'a month', MM : '%d months', y : 'a year', yy : '%d years' }, ordinalParse: /\d{1,2}(st|nd|rd|th)/, ordinal : function (number) { var b = number % 10, output = (~~(number % 100 / 10) === 1) ? 'th' : (b === 1) ? 'st' : (b === 2) ? 'nd' : (b === 3) ? 'rd' : 'th'; return number + output; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return enAu; }))); /***/ }, /* 370 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : English (Canada) [en-ca] //! author : Jonathan Abourbih : https://github.com/jonbca ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var enCa = moment.defineLocale('en-ca', { months : 'January_February_March_April_May_June_July_August_September_October_November_December'.split('_'), monthsShort : 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'.split('_'), weekdays : 'Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday'.split('_'), weekdaysShort : 'Sun_Mon_Tue_Wed_Thu_Fri_Sat'.split('_'), weekdaysMin : 'Su_Mo_Tu_We_Th_Fr_Sa'.split('_'), longDateFormat : { LT : 'h:mm A', LTS : 'h:mm:ss A', L : 'YYYY-MM-DD', LL : 'MMMM D, YYYY', LLL : 'MMMM D, YYYY h:mm A', LLLL : 'dddd, MMMM D, YYYY h:mm A' }, calendar : { sameDay : '[Today at] LT', nextDay : '[Tomorrow at] LT', nextWeek : 'dddd [at] LT', lastDay : '[Yesterday at] LT', lastWeek : '[Last] dddd [at] LT', sameElse : 'L' }, relativeTime : { future : 'in %s', past : '%s ago', s : 'a few seconds', m : 'a minute', mm : '%d minutes', h : 'an hour', hh : '%d hours', d : 'a day', dd : '%d days', M : 'a month', MM : '%d months', y : 'a year', yy : '%d years' }, ordinalParse: /\d{1,2}(st|nd|rd|th)/, ordinal : function (number) { var b = number % 10, output = (~~(number % 100 / 10) === 1) ? 'th' : (b === 1) ? 'st' : (b === 2) ? 'nd' : (b === 3) ? 'rd' : 'th'; return number + output; } }); return enCa; }))); /***/ }, /* 371 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : English (United Kingdom) [en-gb] //! author : Chris Gedrim : https://github.com/chrisgedrim ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var enGb = moment.defineLocale('en-gb', { months : 'January_February_March_April_May_June_July_August_September_October_November_December'.split('_'), monthsShort : 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'.split('_'), weekdays : 'Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday'.split('_'), weekdaysShort : 'Sun_Mon_Tue_Wed_Thu_Fri_Sat'.split('_'), weekdaysMin : 'Su_Mo_Tu_We_Th_Fr_Sa'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay : '[Today at] LT', nextDay : '[Tomorrow at] LT', nextWeek : 'dddd [at] LT', lastDay : '[Yesterday at] LT', lastWeek : '[Last] dddd [at] LT', sameElse : 'L' }, relativeTime : { future : 'in %s', past : '%s ago', s : 'a few seconds', m : 'a minute', mm : '%d minutes', h : 'an hour', hh : '%d hours', d : 'a day', dd : '%d days', M : 'a month', MM : '%d months', y : 'a year', yy : '%d years' }, ordinalParse: /\d{1,2}(st|nd|rd|th)/, ordinal : function (number) { var b = number % 10, output = (~~(number % 100 / 10) === 1) ? 'th' : (b === 1) ? 'st' : (b === 2) ? 'nd' : (b === 3) ? 'rd' : 'th'; return number + output; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return enGb; }))); /***/ }, /* 372 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : English (Ireland) [en-ie] //! author : Chris Cartlidge : https://github.com/chriscartlidge ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var enIe = moment.defineLocale('en-ie', { months : 'January_February_March_April_May_June_July_August_September_October_November_December'.split('_'), monthsShort : 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'.split('_'), weekdays : 'Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday'.split('_'), weekdaysShort : 'Sun_Mon_Tue_Wed_Thu_Fri_Sat'.split('_'), weekdaysMin : 'Su_Mo_Tu_We_Th_Fr_Sa'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD-MM-YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, calendar : { sameDay : '[Today at] LT', nextDay : '[Tomorrow at] LT', nextWeek : 'dddd [at] LT', lastDay : '[Yesterday at] LT', lastWeek : '[Last] dddd [at] LT', sameElse : 'L' }, relativeTime : { future : 'in %s', past : '%s ago', s : 'a few seconds', m : 'a minute', mm : '%d minutes', h : 'an hour', hh : '%d hours', d : 'a day', dd : '%d days', M : 'a month', MM : '%d months', y : 'a year', yy : '%d years' }, ordinalParse: /\d{1,2}(st|nd|rd|th)/, ordinal : function (number) { var b = number % 10, output = (~~(number % 100 / 10) === 1) ? 'th' : (b === 1) ? 'st' : (b === 2) ? 'nd' : (b === 3) ? 'rd' : 'th'; return number + output; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return enIe; }))); /***/ }, /* 373 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : English (New Zealand) [en-nz] //! author : Luke McGregor : https://github.com/lukemcgregor ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var enNz = moment.defineLocale('en-nz', { months : 'January_February_March_April_May_June_July_August_September_October_November_December'.split('_'), monthsShort : 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'.split('_'), weekdays : 'Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday'.split('_'), weekdaysShort : 'Sun_Mon_Tue_Wed_Thu_Fri_Sat'.split('_'), weekdaysMin : 'Su_Mo_Tu_We_Th_Fr_Sa'.split('_'), longDateFormat : { LT : 'h:mm A', LTS : 'h:mm:ss A', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY h:mm A', LLLL : 'dddd, D MMMM YYYY h:mm A' }, calendar : { sameDay : '[Today at] LT', nextDay : '[Tomorrow at] LT', nextWeek : 'dddd [at] LT', lastDay : '[Yesterday at] LT', lastWeek : '[Last] dddd [at] LT', sameElse : 'L' }, relativeTime : { future : 'in %s', past : '%s ago', s : 'a few seconds', m : 'a minute', mm : '%d minutes', h : 'an hour', hh : '%d hours', d : 'a day', dd : '%d days', M : 'a month', MM : '%d months', y : 'a year', yy : '%d years' }, ordinalParse: /\d{1,2}(st|nd|rd|th)/, ordinal : function (number) { var b = number % 10, output = (~~(number % 100 / 10) === 1) ? 'th' : (b === 1) ? 'st' : (b === 2) ? 'nd' : (b === 3) ? 'rd' : 'th'; return number + output; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return enNz; }))); /***/ }, /* 374 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Esperanto [eo] //! author : Colin Dean : https://github.com/colindean //! komento: Mi estas malcerta se mi korekte traktis akuzativojn en tiu traduko. //! Se ne, bonvolu korekti kaj avizi min por ke mi povas lerni! ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var eo = moment.defineLocale('eo', { months : 'januaro_februaro_marto_aprilo_majo_junio_julio_aŭgusto_septembro_oktobro_novembro_decembro'.split('_'), monthsShort : 'jan_feb_mar_apr_maj_jun_jul_aŭg_sep_okt_nov_dec'.split('_'), weekdays : 'Dimanĉo_Lundo_Mardo_Merkredo_Ĵaŭdo_Vendredo_Sabato'.split('_'), weekdaysShort : 'Dim_Lun_Mard_Merk_Ĵaŭ_Ven_Sab'.split('_'), weekdaysMin : 'Di_Lu_Ma_Me_Ĵa_Ve_Sa'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'YYYY-MM-DD', LL : 'D[-an de] MMMM, YYYY', LLL : 'D[-an de] MMMM, YYYY HH:mm', LLLL : 'dddd, [la] D[-an de] MMMM, YYYY HH:mm' }, meridiemParse: /[ap]\.t\.m/i, isPM: function (input) { return input.charAt(0).toLowerCase() === 'p'; }, meridiem : function (hours, minutes, isLower) { if (hours > 11) { return isLower ? 'p.t.m.' : 'P.T.M.'; } else { return isLower ? 'a.t.m.' : 'A.T.M.'; } }, calendar : { sameDay : '[Hodiaŭ je] LT', nextDay : '[Morgaŭ je] LT', nextWeek : 'dddd [je] LT', lastDay : '[Hieraŭ je] LT', lastWeek : '[pasinta] dddd [je] LT', sameElse : 'L' }, relativeTime : { future : 'je %s', past : 'antaŭ %s', s : 'sekundoj', m : 'minuto', mm : '%d minutoj', h : 'horo', hh : '%d horoj', d : 'tago',//ne 'diurno', ĉar estas uzita por proksimumo dd : '%d tagoj', M : 'monato', MM : '%d monatoj', y : 'jaro', yy : '%d jaroj' }, ordinalParse: /\d{1,2}a/, ordinal : '%da', week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return eo; }))); /***/ }, /* 375 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Spanish [es] //! author : Julio Napurí : https://github.com/julionc ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var monthsShortDot = 'ene._feb._mar._abr._may._jun._jul._ago._sep._oct._nov._dic.'.split('_'); var monthsShort = 'ene_feb_mar_abr_may_jun_jul_ago_sep_oct_nov_dic'.split('_'); var es = moment.defineLocale('es', { months : 'enero_febrero_marzo_abril_mayo_junio_julio_agosto_septiembre_octubre_noviembre_diciembre'.split('_'), monthsShort : function (m, format) { if (/-MMM-/.test(format)) { return monthsShort[m.month()]; } else { return monthsShortDot[m.month()]; } }, monthsParseExact : true, weekdays : 'domingo_lunes_martes_miércoles_jueves_viernes_sábado'.split('_'), weekdaysShort : 'dom._lun._mar._mié._jue._vie._sáb.'.split('_'), weekdaysMin : 'do_lu_ma_mi_ju_vi_sá'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'DD/MM/YYYY', LL : 'D [de] MMMM [de] YYYY', LLL : 'D [de] MMMM [de] YYYY H:mm', LLLL : 'dddd, D [de] MMMM [de] YYYY H:mm' }, calendar : { sameDay : function () { return '[hoy a la' + ((this.hours() !== 1) ? 's' : '') + '] LT'; }, nextDay : function () { return '[mañana a la' + ((this.hours() !== 1) ? 's' : '') + '] LT'; }, nextWeek : function () { return 'dddd [a la' + ((this.hours() !== 1) ? 's' : '') + '] LT'; }, lastDay : function () { return '[ayer a la' + ((this.hours() !== 1) ? 's' : '') + '] LT'; }, lastWeek : function () { return '[el] dddd [pasado a la' + ((this.hours() !== 1) ? 's' : '') + '] LT'; }, sameElse : 'L' }, relativeTime : { future : 'en %s', past : 'hace %s', s : 'unos segundos', m : 'un minuto', mm : '%d minutos', h : 'una hora', hh : '%d horas', d : 'un día', dd : '%d días', M : 'un mes', MM : '%d meses', y : 'un año', yy : '%d años' }, ordinalParse : /\d{1,2}º/, ordinal : '%dº', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return es; }))); /***/ }, /* 376 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Spanish (Dominican Republic) [es-do] ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var monthsShortDot = 'ene._feb._mar._abr._may._jun._jul._ago._sep._oct._nov._dic.'.split('_'); var monthsShort = 'ene_feb_mar_abr_may_jun_jul_ago_sep_oct_nov_dic'.split('_'); var esDo = moment.defineLocale('es-do', { months : 'enero_febrero_marzo_abril_mayo_junio_julio_agosto_septiembre_octubre_noviembre_diciembre'.split('_'), monthsShort : function (m, format) { if (/-MMM-/.test(format)) { return monthsShort[m.month()]; } else { return monthsShortDot[m.month()]; } }, monthsParseExact : true, weekdays : 'domingo_lunes_martes_miércoles_jueves_viernes_sábado'.split('_'), weekdaysShort : 'dom._lun._mar._mié._jue._vie._sáb.'.split('_'), weekdaysMin : 'do_lu_ma_mi_ju_vi_sá'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'h:mm A', LTS : 'h:mm:ss A', L : 'DD/MM/YYYY', LL : 'D [de] MMMM [de] YYYY', LLL : 'D [de] MMMM [de] YYYY h:mm A', LLLL : 'dddd, D [de] MMMM [de] YYYY h:mm A' }, calendar : { sameDay : function () { return '[hoy a la' + ((this.hours() !== 1) ? 's' : '') + '] LT'; }, nextDay : function () { return '[mañana a la' + ((this.hours() !== 1) ? 's' : '') + '] LT'; }, nextWeek : function () { return 'dddd [a la' + ((this.hours() !== 1) ? 's' : '') + '] LT'; }, lastDay : function () { return '[ayer a la' + ((this.hours() !== 1) ? 's' : '') + '] LT'; }, lastWeek : function () { return '[el] dddd [pasado a la' + ((this.hours() !== 1) ? 's' : '') + '] LT'; }, sameElse : 'L' }, relativeTime : { future : 'en %s', past : 'hace %s', s : 'unos segundos', m : 'un minuto', mm : '%d minutos', h : 'una hora', hh : '%d horas', d : 'un día', dd : '%d días', M : 'un mes', MM : '%d meses', y : 'un año', yy : '%d años' }, ordinalParse : /\d{1,2}º/, ordinal : '%dº', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return esDo; }))); /***/ }, /* 377 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Estonian [et] //! author : Henry Kehlmann : https://github.com/madhenry //! improvements : Illimar Tambek : https://github.com/ragulka ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function processRelativeTime(number, withoutSuffix, key, isFuture) { var format = { 's' : ['mõne sekundi', 'mõni sekund', 'paar sekundit'], 'm' : ['ühe minuti', 'üks minut'], 'mm': [number + ' minuti', number + ' minutit'], 'h' : ['ühe tunni', 'tund aega', 'üks tund'], 'hh': [number + ' tunni', number + ' tundi'], 'd' : ['ühe päeva', 'üks päev'], 'M' : ['kuu aja', 'kuu aega', 'üks kuu'], 'MM': [number + ' kuu', number + ' kuud'], 'y' : ['ühe aasta', 'aasta', 'üks aasta'], 'yy': [number + ' aasta', number + ' aastat'] }; if (withoutSuffix) { return format[key][2] ? format[key][2] : format[key][1]; } return isFuture ? format[key][0] : format[key][1]; } var et = moment.defineLocale('et', { months : 'jaanuar_veebruar_märts_aprill_mai_juuni_juuli_august_september_oktoober_november_detsember'.split('_'), monthsShort : 'jaan_veebr_märts_apr_mai_juuni_juuli_aug_sept_okt_nov_dets'.split('_'), weekdays : 'pühapäev_esmaspäev_teisipäev_kolmapäev_neljapäev_reede_laupäev'.split('_'), weekdaysShort : 'P_E_T_K_N_R_L'.split('_'), weekdaysMin : 'P_E_T_K_N_R_L'.split('_'), longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY H:mm', LLLL : 'dddd, D. MMMM YYYY H:mm' }, calendar : { sameDay : '[Täna,] LT', nextDay : '[Homme,] LT', nextWeek : '[Järgmine] dddd LT', lastDay : '[Eile,] LT', lastWeek : '[Eelmine] dddd LT', sameElse : 'L' }, relativeTime : { future : '%s pärast', past : '%s tagasi', s : processRelativeTime, m : processRelativeTime, mm : processRelativeTime, h : processRelativeTime, hh : processRelativeTime, d : processRelativeTime, dd : '%d päeva', M : processRelativeTime, MM : processRelativeTime, y : processRelativeTime, yy : processRelativeTime }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return et; }))); /***/ }, /* 378 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Basque [eu] //! author : Eneko Illarramendi : https://github.com/eillarra ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var eu = moment.defineLocale('eu', { months : 'urtarrila_otsaila_martxoa_apirila_maiatza_ekaina_uztaila_abuztua_iraila_urria_azaroa_abendua'.split('_'), monthsShort : 'urt._ots._mar._api._mai._eka._uzt._abu._ira._urr._aza._abe.'.split('_'), monthsParseExact : true, weekdays : 'igandea_astelehena_asteartea_asteazkena_osteguna_ostirala_larunbata'.split('_'), weekdaysShort : 'ig._al._ar._az._og._ol._lr.'.split('_'), weekdaysMin : 'ig_al_ar_az_og_ol_lr'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'YYYY-MM-DD', LL : 'YYYY[ko] MMMM[ren] D[a]', LLL : 'YYYY[ko] MMMM[ren] D[a] HH:mm', LLLL : 'dddd, YYYY[ko] MMMM[ren] D[a] HH:mm', l : 'YYYY-M-D', ll : 'YYYY[ko] MMM D[a]', lll : 'YYYY[ko] MMM D[a] HH:mm', llll : 'ddd, YYYY[ko] MMM D[a] HH:mm' }, calendar : { sameDay : '[gaur] LT[etan]', nextDay : '[bihar] LT[etan]', nextWeek : 'dddd LT[etan]', lastDay : '[atzo] LT[etan]', lastWeek : '[aurreko] dddd LT[etan]', sameElse : 'L' }, relativeTime : { future : '%s barru', past : 'duela %s', s : 'segundo batzuk', m : 'minutu bat', mm : '%d minutu', h : 'ordu bat', hh : '%d ordu', d : 'egun bat', dd : '%d egun', M : 'hilabete bat', MM : '%d hilabete', y : 'urte bat', yy : '%d urte' }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return eu; }))); /***/ }, /* 379 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Persian [fa] //! author : Ebrahim Byagowi : https://github.com/ebraminio ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '۱', '2': '۲', '3': '۳', '4': '۴', '5': '۵', '6': '۶', '7': '۷', '8': '۸', '9': '۹', '0': '۰' }; var numberMap = { '۱': '1', '۲': '2', '۳': '3', '۴': '4', '۵': '5', '۶': '6', '۷': '7', '۸': '8', '۹': '9', '۰': '0' }; var fa = moment.defineLocale('fa', { months : 'ژانویه_فوریه_مارس_آوریل_مه_ژوئن_ژوئیه_اوت_سپتامبر_اکتبر_نوامبر_دسامبر'.split('_'), monthsShort : 'ژانویه_فوریه_مارس_آوریل_مه_ژوئن_ژوئیه_اوت_سپتامبر_اکتبر_نوامبر_دسامبر'.split('_'), weekdays : 'یک\u200cشنبه_دوشنبه_سه\u200cشنبه_چهارشنبه_پنج\u200cشنبه_جمعه_شنبه'.split('_'), weekdaysShort : 'یک\u200cشنبه_دوشنبه_سه\u200cشنبه_چهارشنبه_پنج\u200cشنبه_جمعه_شنبه'.split('_'), weekdaysMin : 'ی_د_س_چ_پ_ج_ش'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, meridiemParse: /قبل از ظهر|بعد از ظهر/, isPM: function (input) { return /بعد از ظهر/.test(input); }, meridiem : function (hour, minute, isLower) { if (hour < 12) { return 'قبل از ظهر'; } else { return 'بعد از ظهر'; } }, calendar : { sameDay : '[امروز ساعت] LT', nextDay : '[فردا ساعت] LT', nextWeek : 'dddd [ساعت] LT', lastDay : '[دیروز ساعت] LT', lastWeek : 'dddd [پیش] [ساعت] LT', sameElse : 'L' }, relativeTime : { future : 'در %s', past : '%s پیش', s : 'چندین ثانیه', m : 'یک دقیقه', mm : '%d دقیقه', h : 'یک ساعت', hh : '%d ساعت', d : 'یک روز', dd : '%d روز', M : 'یک ماه', MM : '%d ماه', y : 'یک سال', yy : '%d سال' }, preparse: function (string) { return string.replace(/[۰-۹]/g, function (match) { return numberMap[match]; }).replace(/،/g, ','); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }).replace(/,/g, '،'); }, ordinalParse: /\d{1,2}م/, ordinal : '%dم', week : { dow : 6, // Saturday is the first day of the week. doy : 12 // The week that contains Jan 1st is the first week of the year. } }); return fa; }))); /***/ }, /* 380 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Finnish [fi] //! author : Tarmo Aidantausta : https://github.com/bleadof ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var numbersPast = 'nolla yksi kaksi kolme neljä viisi kuusi seitsemän kahdeksan yhdeksän'.split(' '); var numbersFuture = [ 'nolla', 'yhden', 'kahden', 'kolmen', 'neljän', 'viiden', 'kuuden', numbersPast[7], numbersPast[8], numbersPast[9] ]; function translate(number, withoutSuffix, key, isFuture) { var result = ''; switch (key) { case 's': return isFuture ? 'muutaman sekunnin' : 'muutama sekunti'; case 'm': return isFuture ? 'minuutin' : 'minuutti'; case 'mm': result = isFuture ? 'minuutin' : 'minuuttia'; break; case 'h': return isFuture ? 'tunnin' : 'tunti'; case 'hh': result = isFuture ? 'tunnin' : 'tuntia'; break; case 'd': return isFuture ? 'päivän' : 'päivä'; case 'dd': result = isFuture ? 'päivän' : 'päivää'; break; case 'M': return isFuture ? 'kuukauden' : 'kuukausi'; case 'MM': result = isFuture ? 'kuukauden' : 'kuukautta'; break; case 'y': return isFuture ? 'vuoden' : 'vuosi'; case 'yy': result = isFuture ? 'vuoden' : 'vuotta'; break; } result = verbalNumber(number, isFuture) + ' ' + result; return result; } function verbalNumber(number, isFuture) { return number < 10 ? (isFuture ? numbersFuture[number] : numbersPast[number]) : number; } var fi = moment.defineLocale('fi', { months : 'tammikuu_helmikuu_maaliskuu_huhtikuu_toukokuu_kesäkuu_heinäkuu_elokuu_syyskuu_lokakuu_marraskuu_joulukuu'.split('_'), monthsShort : 'tammi_helmi_maalis_huhti_touko_kesä_heinä_elo_syys_loka_marras_joulu'.split('_'), weekdays : 'sunnuntai_maanantai_tiistai_keskiviikko_torstai_perjantai_lauantai'.split('_'), weekdaysShort : 'su_ma_ti_ke_to_pe_la'.split('_'), weekdaysMin : 'su_ma_ti_ke_to_pe_la'.split('_'), longDateFormat : { LT : 'HH.mm', LTS : 'HH.mm.ss', L : 'DD.MM.YYYY', LL : 'Do MMMM[ta] YYYY', LLL : 'Do MMMM[ta] YYYY, [klo] HH.mm', LLLL : 'dddd, Do MMMM[ta] YYYY, [klo] HH.mm', l : 'D.M.YYYY', ll : 'Do MMM YYYY', lll : 'Do MMM YYYY, [klo] HH.mm', llll : 'ddd, Do MMM YYYY, [klo] HH.mm' }, calendar : { sameDay : '[tänään] [klo] LT', nextDay : '[huomenna] [klo] LT', nextWeek : 'dddd [klo] LT', lastDay : '[eilen] [klo] LT', lastWeek : '[viime] dddd[na] [klo] LT', sameElse : 'L' }, relativeTime : { future : '%s päästä', past : '%s sitten', s : translate, m : translate, mm : translate, h : translate, hh : translate, d : translate, dd : translate, M : translate, MM : translate, y : translate, yy : translate }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return fi; }))); /***/ }, /* 381 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Faroese [fo] //! author : Ragnar Johannesen : https://github.com/ragnar123 ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var fo = moment.defineLocale('fo', { months : 'januar_februar_mars_apríl_mai_juni_juli_august_september_oktober_november_desember'.split('_'), monthsShort : 'jan_feb_mar_apr_mai_jun_jul_aug_sep_okt_nov_des'.split('_'), weekdays : 'sunnudagur_mánadagur_týsdagur_mikudagur_hósdagur_fríggjadagur_leygardagur'.split('_'), weekdaysShort : 'sun_mán_týs_mik_hós_frí_ley'.split('_'), weekdaysMin : 'su_má_tý_mi_hó_fr_le'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D. MMMM, YYYY HH:mm' }, calendar : { sameDay : '[Í dag kl.] LT', nextDay : '[Í morgin kl.] LT', nextWeek : 'dddd [kl.] LT', lastDay : '[Í gjár kl.] LT', lastWeek : '[síðstu] dddd [kl] LT', sameElse : 'L' }, relativeTime : { future : 'um %s', past : '%s síðani', s : 'fá sekund', m : 'ein minutt', mm : '%d minuttir', h : 'ein tími', hh : '%d tímar', d : 'ein dagur', dd : '%d dagar', M : 'ein mánaði', MM : '%d mánaðir', y : 'eitt ár', yy : '%d ár' }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return fo; }))); /***/ }, /* 382 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : French [fr] //! author : John Fischer : https://github.com/jfroffice ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var fr = moment.defineLocale('fr', { months : 'janvier_février_mars_avril_mai_juin_juillet_août_septembre_octobre_novembre_décembre'.split('_'), monthsShort : 'janv._févr._mars_avr._mai_juin_juil._août_sept._oct._nov._déc.'.split('_'), monthsParseExact : true, weekdays : 'dimanche_lundi_mardi_mercredi_jeudi_vendredi_samedi'.split('_'), weekdaysShort : 'dim._lun._mar._mer._jeu._ven._sam.'.split('_'), weekdaysMin : 'Di_Lu_Ma_Me_Je_Ve_Sa'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, calendar : { sameDay: '[Aujourd\'hui à] LT', nextDay: '[Demain à] LT', nextWeek: 'dddd [à] LT', lastDay: '[Hier à] LT', lastWeek: 'dddd [dernier à] LT', sameElse: 'L' }, relativeTime : { future : 'dans %s', past : 'il y a %s', s : 'quelques secondes', m : 'une minute', mm : '%d minutes', h : 'une heure', hh : '%d heures', d : 'un jour', dd : '%d jours', M : 'un mois', MM : '%d mois', y : 'un an', yy : '%d ans' }, ordinalParse: /\d{1,2}(er|)/, ordinal : function (number) { return number + (number === 1 ? 'er' : ''); }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return fr; }))); /***/ }, /* 383 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : French (Canada) [fr-ca] //! author : Jonathan Abourbih : https://github.com/jonbca ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var frCa = moment.defineLocale('fr-ca', { months : 'janvier_février_mars_avril_mai_juin_juillet_août_septembre_octobre_novembre_décembre'.split('_'), monthsShort : 'janv._févr._mars_avr._mai_juin_juil._août_sept._oct._nov._déc.'.split('_'), monthsParseExact : true, weekdays : 'dimanche_lundi_mardi_mercredi_jeudi_vendredi_samedi'.split('_'), weekdaysShort : 'dim._lun._mar._mer._jeu._ven._sam.'.split('_'), weekdaysMin : 'Di_Lu_Ma_Me_Je_Ve_Sa'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'YYYY-MM-DD', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, calendar : { sameDay: '[Aujourd\'hui à] LT', nextDay: '[Demain à] LT', nextWeek: 'dddd [à] LT', lastDay: '[Hier à] LT', lastWeek: 'dddd [dernier à] LT', sameElse: 'L' }, relativeTime : { future : 'dans %s', past : 'il y a %s', s : 'quelques secondes', m : 'une minute', mm : '%d minutes', h : 'une heure', hh : '%d heures', d : 'un jour', dd : '%d jours', M : 'un mois', MM : '%d mois', y : 'un an', yy : '%d ans' }, ordinalParse: /\d{1,2}(er|e)/, ordinal : function (number) { return number + (number === 1 ? 'er' : 'e'); } }); return frCa; }))); /***/ }, /* 384 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : French (Switzerland) [fr-ch] //! author : Gaspard Bucher : https://github.com/gaspard ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var frCh = moment.defineLocale('fr-ch', { months : 'janvier_février_mars_avril_mai_juin_juillet_août_septembre_octobre_novembre_décembre'.split('_'), monthsShort : 'janv._févr._mars_avr._mai_juin_juil._août_sept._oct._nov._déc.'.split('_'), monthsParseExact : true, weekdays : 'dimanche_lundi_mardi_mercredi_jeudi_vendredi_samedi'.split('_'), weekdaysShort : 'dim._lun._mar._mer._jeu._ven._sam.'.split('_'), weekdaysMin : 'Di_Lu_Ma_Me_Je_Ve_Sa'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, calendar : { sameDay: '[Aujourd\'hui à] LT', nextDay: '[Demain à] LT', nextWeek: 'dddd [à] LT', lastDay: '[Hier à] LT', lastWeek: 'dddd [dernier à] LT', sameElse: 'L' }, relativeTime : { future : 'dans %s', past : 'il y a %s', s : 'quelques secondes', m : 'une minute', mm : '%d minutes', h : 'une heure', hh : '%d heures', d : 'un jour', dd : '%d jours', M : 'un mois', MM : '%d mois', y : 'un an', yy : '%d ans' }, ordinalParse: /\d{1,2}(er|e)/, ordinal : function (number) { return number + (number === 1 ? 'er' : 'e'); }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return frCh; }))); /***/ }, /* 385 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Frisian [fy] //! author : Robin van der Vliet : https://github.com/robin0van0der0v ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var monthsShortWithDots = 'jan._feb._mrt._apr._mai_jun._jul._aug._sep._okt._nov._des.'.split('_'); var monthsShortWithoutDots = 'jan_feb_mrt_apr_mai_jun_jul_aug_sep_okt_nov_des'.split('_'); var fy = moment.defineLocale('fy', { months : 'jannewaris_febrewaris_maart_april_maaie_juny_july_augustus_septimber_oktober_novimber_desimber'.split('_'), monthsShort : function (m, format) { if (/-MMM-/.test(format)) { return monthsShortWithoutDots[m.month()]; } else { return monthsShortWithDots[m.month()]; } }, monthsParseExact : true, weekdays : 'snein_moandei_tiisdei_woansdei_tongersdei_freed_sneon'.split('_'), weekdaysShort : 'si._mo._ti._wo._to._fr._so.'.split('_'), weekdaysMin : 'Si_Mo_Ti_Wo_To_Fr_So'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD-MM-YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, calendar : { sameDay: '[hjoed om] LT', nextDay: '[moarn om] LT', nextWeek: 'dddd [om] LT', lastDay: '[juster om] LT', lastWeek: '[ôfrûne] dddd [om] LT', sameElse: 'L' }, relativeTime : { future : 'oer %s', past : '%s lyn', s : 'in pear sekonden', m : 'ien minút', mm : '%d minuten', h : 'ien oere', hh : '%d oeren', d : 'ien dei', dd : '%d dagen', M : 'ien moanne', MM : '%d moannen', y : 'ien jier', yy : '%d jierren' }, ordinalParse: /\d{1,2}(ste|de)/, ordinal : function (number) { return number + ((number === 1 || number === 8 || number >= 20) ? 'ste' : 'de'); }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return fy; }))); /***/ }, /* 386 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Scottish Gaelic [gd] //! author : Jon Ashdown : https://github.com/jonashdown ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var months = [ 'Am Faoilleach', 'An Gearran', 'Am Màrt', 'An Giblean', 'An Cèitean', 'An t-Ògmhios', 'An t-Iuchar', 'An Lùnastal', 'An t-Sultain', 'An Dàmhair', 'An t-Samhain', 'An Dùbhlachd' ]; var monthsShort = ['Faoi', 'Gear', 'Màrt', 'Gibl', 'Cèit', 'Ògmh', 'Iuch', 'Lùn', 'Sult', 'Dàmh', 'Samh', 'Dùbh']; var weekdays = ['Didòmhnaich', 'Diluain', 'Dimàirt', 'Diciadain', 'Diardaoin', 'Dihaoine', 'Disathairne']; var weekdaysShort = ['Did', 'Dil', 'Dim', 'Dic', 'Dia', 'Dih', 'Dis']; var weekdaysMin = ['Dò', 'Lu', 'Mà', 'Ci', 'Ar', 'Ha', 'Sa']; var gd = moment.defineLocale('gd', { months : months, monthsShort : monthsShort, monthsParseExact : true, weekdays : weekdays, weekdaysShort : weekdaysShort, weekdaysMin : weekdaysMin, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay : '[An-diugh aig] LT', nextDay : '[A-màireach aig] LT', nextWeek : 'dddd [aig] LT', lastDay : '[An-dè aig] LT', lastWeek : 'dddd [seo chaidh] [aig] LT', sameElse : 'L' }, relativeTime : { future : 'ann an %s', past : 'bho chionn %s', s : 'beagan diogan', m : 'mionaid', mm : '%d mionaidean', h : 'uair', hh : '%d uairean', d : 'latha', dd : '%d latha', M : 'mìos', MM : '%d mìosan', y : 'bliadhna', yy : '%d bliadhna' }, ordinalParse : /\d{1,2}(d|na|mh)/, ordinal : function (number) { var output = number === 1 ? 'd' : number % 10 === 2 ? 'na' : 'mh'; return number + output; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return gd; }))); /***/ }, /* 387 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Galician [gl] //! author : Juan G. Hurtado : https://github.com/juanghurtado ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var gl = moment.defineLocale('gl', { months : 'xaneiro_febreiro_marzo_abril_maio_xuño_xullo_agosto_setembro_outubro_novembro_decembro'.split('_'), monthsShort : 'xan._feb._mar._abr._mai._xuñ._xul._ago._set._out._nov._dec.'.split('_'), monthsParseExact: true, weekdays : 'domingo_luns_martes_mércores_xoves_venres_sábado'.split('_'), weekdaysShort : 'dom._lun._mar._mér._xov._ven._sáb.'.split('_'), weekdaysMin : 'do_lu_ma_mé_xo_ve_sá'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'DD/MM/YYYY', LL : 'D [de] MMMM [de] YYYY', LLL : 'D [de] MMMM [de] YYYY H:mm', LLLL : 'dddd, D [de] MMMM [de] YYYY H:mm' }, calendar : { sameDay : function () { return '[hoxe ' + ((this.hours() !== 1) ? 'ás' : 'á') + '] LT'; }, nextDay : function () { return '[mañá ' + ((this.hours() !== 1) ? 'ás' : 'á') + '] LT'; }, nextWeek : function () { return 'dddd [' + ((this.hours() !== 1) ? 'ás' : 'a') + '] LT'; }, lastDay : function () { return '[onte ' + ((this.hours() !== 1) ? 'á' : 'a') + '] LT'; }, lastWeek : function () { return '[o] dddd [pasado ' + ((this.hours() !== 1) ? 'ás' : 'a') + '] LT'; }, sameElse : 'L' }, relativeTime : { future : function (str) { if (str.indexOf('un') === 0) { return 'n' + str; } return 'en ' + str; }, past : 'hai %s', s : 'uns segundos', m : 'un minuto', mm : '%d minutos', h : 'unha hora', hh : '%d horas', d : 'un día', dd : '%d días', M : 'un mes', MM : '%d meses', y : 'un ano', yy : '%d anos' }, ordinalParse : /\d{1,2}º/, ordinal : '%dº', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return gl; }))); /***/ }, /* 388 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Hebrew [he] //! author : Tomer Cohen : https://github.com/tomer //! author : Moshe Simantov : https://github.com/DevelopmentIL //! author : Tal Ater : https://github.com/TalAter ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var he = moment.defineLocale('he', { months : 'ינואר_פברואר_מרץ_אפריל_מאי_יוני_יולי_אוגוסט_ספטמבר_אוקטובר_נובמבר_דצמבר'.split('_'), monthsShort : 'ינו׳_פבר׳_מרץ_אפר׳_מאי_יוני_יולי_אוג׳_ספט׳_אוק׳_נוב׳_דצמ׳'.split('_'), weekdays : 'ראשון_שני_שלישי_רביעי_חמישי_שישי_שבת'.split('_'), weekdaysShort : 'א׳_ב׳_ג׳_ד׳_ה׳_ו׳_ש׳'.split('_'), weekdaysMin : 'א_ב_ג_ד_ה_ו_ש'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D [ב]MMMM YYYY', LLL : 'D [ב]MMMM YYYY HH:mm', LLLL : 'dddd, D [ב]MMMM YYYY HH:mm', l : 'D/M/YYYY', ll : 'D MMM YYYY', lll : 'D MMM YYYY HH:mm', llll : 'ddd, D MMM YYYY HH:mm' }, calendar : { sameDay : '[היום ב־]LT', nextDay : '[מחר ב־]LT', nextWeek : 'dddd [בשעה] LT', lastDay : '[אתמול ב־]LT', lastWeek : '[ביום] dddd [האחרון בשעה] LT', sameElse : 'L' }, relativeTime : { future : 'בעוד %s', past : 'לפני %s', s : 'מספר שניות', m : 'דקה', mm : '%d דקות', h : 'שעה', hh : function (number) { if (number === 2) { return 'שעתיים'; } return number + ' שעות'; }, d : 'יום', dd : function (number) { if (number === 2) { return 'יומיים'; } return number + ' ימים'; }, M : 'חודש', MM : function (number) { if (number === 2) { return 'חודשיים'; } return number + ' חודשים'; }, y : 'שנה', yy : function (number) { if (number === 2) { return 'שנתיים'; } else if (number % 10 === 0 && number !== 10) { return number + ' שנה'; } return number + ' שנים'; } }, meridiemParse: /אחה"צ|לפנה"צ|אחרי הצהריים|לפני הצהריים|לפנות בוקר|בבוקר|בערב/i, isPM : function (input) { return /^(אחה"צ|אחרי הצהריים|בערב)$/.test(input); }, meridiem : function (hour, minute, isLower) { if (hour < 5) { return 'לפנות בוקר'; } else if (hour < 10) { return 'בבוקר'; } else if (hour < 12) { return isLower ? 'לפנה"צ' : 'לפני הצהריים'; } else if (hour < 18) { return isLower ? 'אחה"צ' : 'אחרי הצהריים'; } else { return 'בערב'; } } }); return he; }))); /***/ }, /* 389 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Hindi [hi] //! author : Mayank Singhal : https://github.com/mayanksinghal ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '१', '2': '२', '3': '३', '4': '४', '5': '५', '6': '६', '7': '७', '8': '८', '9': '९', '0': '०' }; var numberMap = { '१': '1', '२': '2', '३': '3', '४': '4', '५': '5', '६': '6', '७': '7', '८': '8', '९': '9', '०': '0' }; var hi = moment.defineLocale('hi', { months : 'जनवरी_फ़रवरी_मार्च_अप्रैल_मई_जून_जुलाई_अगस्त_सितम्बर_अक्टूबर_नवम्बर_दिसम्बर'.split('_'), monthsShort : 'जन._फ़र._मार्च_अप्रै._मई_जून_जुल._अग._सित._अक्टू._नव._दिस.'.split('_'), monthsParseExact: true, weekdays : 'रविवार_सोमवार_मंगलवार_बुधवार_गुरूवार_शुक्रवार_शनिवार'.split('_'), weekdaysShort : 'रवि_सोम_मंगल_बुध_गुरू_शुक्र_शनि'.split('_'), weekdaysMin : 'र_सो_मं_बु_गु_शु_श'.split('_'), longDateFormat : { LT : 'A h:mm बजे', LTS : 'A h:mm:ss बजे', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY, A h:mm बजे', LLLL : 'dddd, D MMMM YYYY, A h:mm बजे' }, calendar : { sameDay : '[आज] LT', nextDay : '[कल] LT', nextWeek : 'dddd, LT', lastDay : '[कल] LT', lastWeek : '[पिछले] dddd, LT', sameElse : 'L' }, relativeTime : { future : '%s में', past : '%s पहले', s : 'कुछ ही क्षण', m : 'एक मिनट', mm : '%d मिनट', h : 'एक घंटा', hh : '%d घंटे', d : 'एक दिन', dd : '%d दिन', M : 'एक महीने', MM : '%d महीने', y : 'एक वर्ष', yy : '%d वर्ष' }, preparse: function (string) { return string.replace(/[१२३४५६७८९०]/g, function (match) { return numberMap[match]; }); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }); }, // Hindi notation for meridiems are quite fuzzy in practice. While there exists // a rigid notion of a 'Pahar' it is not used as rigidly in modern Hindi. meridiemParse: /रात|सुबह|दोपहर|शाम/, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === 'रात') { return hour < 4 ? hour : hour + 12; } else if (meridiem === 'सुबह') { return hour; } else if (meridiem === 'दोपहर') { return hour >= 10 ? hour : hour + 12; } else if (meridiem === 'शाम') { return hour + 12; } }, meridiem : function (hour, minute, isLower) { if (hour < 4) { return 'रात'; } else if (hour < 10) { return 'सुबह'; } else if (hour < 17) { return 'दोपहर'; } else if (hour < 20) { return 'शाम'; } else { return 'रात'; } }, week : { dow : 0, // Sunday is the first day of the week. doy : 6 // The week that contains Jan 1st is the first week of the year. } }); return hi; }))); /***/ }, /* 390 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Croatian [hr] //! author : Bojan Marković : https://github.com/bmarkovic ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function translate(number, withoutSuffix, key) { var result = number + ' '; switch (key) { case 'm': return withoutSuffix ? 'jedna minuta' : 'jedne minute'; case 'mm': if (number === 1) { result += 'minuta'; } else if (number === 2 || number === 3 || number === 4) { result += 'minute'; } else { result += 'minuta'; } return result; case 'h': return withoutSuffix ? 'jedan sat' : 'jednog sata'; case 'hh': if (number === 1) { result += 'sat'; } else if (number === 2 || number === 3 || number === 4) { result += 'sata'; } else { result += 'sati'; } return result; case 'dd': if (number === 1) { result += 'dan'; } else { result += 'dana'; } return result; case 'MM': if (number === 1) { result += 'mjesec'; } else if (number === 2 || number === 3 || number === 4) { result += 'mjeseca'; } else { result += 'mjeseci'; } return result; case 'yy': if (number === 1) { result += 'godina'; } else if (number === 2 || number === 3 || number === 4) { result += 'godine'; } else { result += 'godina'; } return result; } } var hr = moment.defineLocale('hr', { months : { format: 'siječnja_veljače_ožujka_travnja_svibnja_lipnja_srpnja_kolovoza_rujna_listopada_studenoga_prosinca'.split('_'), standalone: 'siječanj_veljača_ožujak_travanj_svibanj_lipanj_srpanj_kolovoz_rujan_listopad_studeni_prosinac'.split('_') }, monthsShort : 'sij._velj._ožu._tra._svi._lip._srp._kol._ruj._lis._stu._pro.'.split('_'), monthsParseExact: true, weekdays : 'nedjelja_ponedjeljak_utorak_srijeda_četvrtak_petak_subota'.split('_'), weekdaysShort : 'ned._pon._uto._sri._čet._pet._sub.'.split('_'), weekdaysMin : 'ne_po_ut_sr_če_pe_su'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY H:mm', LLLL : 'dddd, D. MMMM YYYY H:mm' }, calendar : { sameDay : '[danas u] LT', nextDay : '[sutra u] LT', nextWeek : function () { switch (this.day()) { case 0: return '[u] [nedjelju] [u] LT'; case 3: return '[u] [srijedu] [u] LT'; case 6: return '[u] [subotu] [u] LT'; case 1: case 2: case 4: case 5: return '[u] dddd [u] LT'; } }, lastDay : '[jučer u] LT', lastWeek : function () { switch (this.day()) { case 0: case 3: return '[prošlu] dddd [u] LT'; case 6: return '[prošle] [subote] [u] LT'; case 1: case 2: case 4: case 5: return '[prošli] dddd [u] LT'; } }, sameElse : 'L' }, relativeTime : { future : 'za %s', past : 'prije %s', s : 'par sekundi', m : translate, mm : translate, h : translate, hh : translate, d : 'dan', dd : translate, M : 'mjesec', MM : translate, y : 'godinu', yy : translate }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return hr; }))); /***/ }, /* 391 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Hungarian [hu] //! author : Adam Brunner : https://github.com/adambrunner ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var weekEndings = 'vasárnap hétfőn kedden szerdán csütörtökön pénteken szombaton'.split(' '); function translate(number, withoutSuffix, key, isFuture) { var num = number, suffix; switch (key) { case 's': return (isFuture || withoutSuffix) ? 'néhány másodperc' : 'néhány másodperce'; case 'm': return 'egy' + (isFuture || withoutSuffix ? ' perc' : ' perce'); case 'mm': return num + (isFuture || withoutSuffix ? ' perc' : ' perce'); case 'h': return 'egy' + (isFuture || withoutSuffix ? ' óra' : ' órája'); case 'hh': return num + (isFuture || withoutSuffix ? ' óra' : ' órája'); case 'd': return 'egy' + (isFuture || withoutSuffix ? ' nap' : ' napja'); case 'dd': return num + (isFuture || withoutSuffix ? ' nap' : ' napja'); case 'M': return 'egy' + (isFuture || withoutSuffix ? ' hónap' : ' hónapja'); case 'MM': return num + (isFuture || withoutSuffix ? ' hónap' : ' hónapja'); case 'y': return 'egy' + (isFuture || withoutSuffix ? ' év' : ' éve'); case 'yy': return num + (isFuture || withoutSuffix ? ' év' : ' éve'); } return ''; } function week(isFuture) { return (isFuture ? '' : '[múlt] ') + '[' + weekEndings[this.day()] + '] LT[-kor]'; } var hu = moment.defineLocale('hu', { months : 'január_február_március_április_május_június_július_augusztus_szeptember_október_november_december'.split('_'), monthsShort : 'jan_feb_márc_ápr_máj_jún_júl_aug_szept_okt_nov_dec'.split('_'), weekdays : 'vasárnap_hétfő_kedd_szerda_csütörtök_péntek_szombat'.split('_'), weekdaysShort : 'vas_hét_kedd_sze_csüt_pén_szo'.split('_'), weekdaysMin : 'v_h_k_sze_cs_p_szo'.split('_'), longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'YYYY.MM.DD.', LL : 'YYYY. MMMM D.', LLL : 'YYYY. MMMM D. H:mm', LLLL : 'YYYY. MMMM D., dddd H:mm' }, meridiemParse: /de|du/i, isPM: function (input) { return input.charAt(1).toLowerCase() === 'u'; }, meridiem : function (hours, minutes, isLower) { if (hours < 12) { return isLower === true ? 'de' : 'DE'; } else { return isLower === true ? 'du' : 'DU'; } }, calendar : { sameDay : '[ma] LT[-kor]', nextDay : '[holnap] LT[-kor]', nextWeek : function () { return week.call(this, true); }, lastDay : '[tegnap] LT[-kor]', lastWeek : function () { return week.call(this, false); }, sameElse : 'L' }, relativeTime : { future : '%s múlva', past : '%s', s : translate, m : translate, mm : translate, h : translate, hh : translate, d : translate, dd : translate, M : translate, MM : translate, y : translate, yy : translate }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return hu; }))); /***/ }, /* 392 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Armenian [hy-am] //! author : Armendarabyan : https://github.com/armendarabyan ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var hyAm = moment.defineLocale('hy-am', { months : { format: 'հունվարի_փետրվարի_մարտի_ապրիլի_մայիսի_հունիսի_հուլիսի_օգոստոսի_սեպտեմբերի_հոկտեմբերի_նոյեմբերի_դեկտեմբերի'.split('_'), standalone: 'հունվար_փետրվար_մարտ_ապրիլ_մայիս_հունիս_հուլիս_օգոստոս_սեպտեմբեր_հոկտեմբեր_նոյեմբեր_դեկտեմբեր'.split('_') }, monthsShort : 'հնվ_փտր_մրտ_ապր_մյս_հնս_հլս_օգս_սպտ_հկտ_նմբ_դկտ'.split('_'), weekdays : 'կիրակի_երկուշաբթի_երեքշաբթի_չորեքշաբթի_հինգշաբթի_ուրբաթ_շաբաթ'.split('_'), weekdaysShort : 'կրկ_երկ_երք_չրք_հնգ_ուրբ_շբթ'.split('_'), weekdaysMin : 'կրկ_երկ_երք_չրք_հնգ_ուրբ_շբթ'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY թ.', LLL : 'D MMMM YYYY թ., HH:mm', LLLL : 'dddd, D MMMM YYYY թ., HH:mm' }, calendar : { sameDay: '[այսօր] LT', nextDay: '[վաղը] LT', lastDay: '[երեկ] LT', nextWeek: function () { return 'dddd [օրը ժամը] LT'; }, lastWeek: function () { return '[անցած] dddd [օրը ժամը] LT'; }, sameElse: 'L' }, relativeTime : { future : '%s հետո', past : '%s առաջ', s : 'մի քանի վայրկյան', m : 'րոպե', mm : '%d րոպե', h : 'ժամ', hh : '%d ժամ', d : 'օր', dd : '%d օր', M : 'ամիս', MM : '%d ամիս', y : 'տարի', yy : '%d տարի' }, meridiemParse: /գիշերվա|առավոտվա|ցերեկվա|երեկոյան/, isPM: function (input) { return /^(ցերեկվա|երեկոյան)$/.test(input); }, meridiem : function (hour) { if (hour < 4) { return 'գիշերվա'; } else if (hour < 12) { return 'առավոտվա'; } else if (hour < 17) { return 'ցերեկվա'; } else { return 'երեկոյան'; } }, ordinalParse: /\d{1,2}|\d{1,2}-(ին|րդ)/, ordinal: function (number, period) { switch (period) { case 'DDD': case 'w': case 'W': case 'DDDo': if (number === 1) { return number + '-ին'; } return number + '-րդ'; default: return number; } }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return hyAm; }))); /***/ }, /* 393 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Indonesian [id] //! author : Mohammad Satrio Utomo : https://github.com/tyok //! reference: http://id.wikisource.org/wiki/Pedoman_Umum_Ejaan_Bahasa_Indonesia_yang_Disempurnakan ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var id = moment.defineLocale('id', { months : 'Januari_Februari_Maret_April_Mei_Juni_Juli_Agustus_September_Oktober_November_Desember'.split('_'), monthsShort : 'Jan_Feb_Mar_Apr_Mei_Jun_Jul_Ags_Sep_Okt_Nov_Des'.split('_'), weekdays : 'Minggu_Senin_Selasa_Rabu_Kamis_Jumat_Sabtu'.split('_'), weekdaysShort : 'Min_Sen_Sel_Rab_Kam_Jum_Sab'.split('_'), weekdaysMin : 'Mg_Sn_Sl_Rb_Km_Jm_Sb'.split('_'), longDateFormat : { LT : 'HH.mm', LTS : 'HH.mm.ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY [pukul] HH.mm', LLLL : 'dddd, D MMMM YYYY [pukul] HH.mm' }, meridiemParse: /pagi|siang|sore|malam/, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === 'pagi') { return hour; } else if (meridiem === 'siang') { return hour >= 11 ? hour : hour + 12; } else if (meridiem === 'sore' || meridiem === 'malam') { return hour + 12; } }, meridiem : function (hours, minutes, isLower) { if (hours < 11) { return 'pagi'; } else if (hours < 15) { return 'siang'; } else if (hours < 19) { return 'sore'; } else { return 'malam'; } }, calendar : { sameDay : '[Hari ini pukul] LT', nextDay : '[Besok pukul] LT', nextWeek : 'dddd [pukul] LT', lastDay : '[Kemarin pukul] LT', lastWeek : 'dddd [lalu pukul] LT', sameElse : 'L' }, relativeTime : { future : 'dalam %s', past : '%s yang lalu', s : 'beberapa detik', m : 'semenit', mm : '%d menit', h : 'sejam', hh : '%d jam', d : 'sehari', dd : '%d hari', M : 'sebulan', MM : '%d bulan', y : 'setahun', yy : '%d tahun' }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return id; }))); /***/ }, /* 394 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Icelandic [is] //! author : Hinrik Örn Sigurðsson : https://github.com/hinrik ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function plural(n) { if (n % 100 === 11) { return true; } else if (n % 10 === 1) { return false; } return true; } function translate(number, withoutSuffix, key, isFuture) { var result = number + ' '; switch (key) { case 's': return withoutSuffix || isFuture ? 'nokkrar sekúndur' : 'nokkrum sekúndum'; case 'm': return withoutSuffix ? 'mínúta' : 'mínútu'; case 'mm': if (plural(number)) { return result + (withoutSuffix || isFuture ? 'mínútur' : 'mínútum'); } else if (withoutSuffix) { return result + 'mínúta'; } return result + 'mínútu'; case 'hh': if (plural(number)) { return result + (withoutSuffix || isFuture ? 'klukkustundir' : 'klukkustundum'); } return result + 'klukkustund'; case 'd': if (withoutSuffix) { return 'dagur'; } return isFuture ? 'dag' : 'degi'; case 'dd': if (plural(number)) { if (withoutSuffix) { return result + 'dagar'; } return result + (isFuture ? 'daga' : 'dögum'); } else if (withoutSuffix) { return result + 'dagur'; } return result + (isFuture ? 'dag' : 'degi'); case 'M': if (withoutSuffix) { return 'mánuður'; } return isFuture ? 'mánuð' : 'mánuði'; case 'MM': if (plural(number)) { if (withoutSuffix) { return result + 'mánuðir'; } return result + (isFuture ? 'mánuði' : 'mánuðum'); } else if (withoutSuffix) { return result + 'mánuður'; } return result + (isFuture ? 'mánuð' : 'mánuði'); case 'y': return withoutSuffix || isFuture ? 'ár' : 'ári'; case 'yy': if (plural(number)) { return result + (withoutSuffix || isFuture ? 'ár' : 'árum'); } return result + (withoutSuffix || isFuture ? 'ár' : 'ári'); } } var is = moment.defineLocale('is', { months : 'janúar_febrúar_mars_apríl_maí_júní_júlí_ágúst_september_október_nóvember_desember'.split('_'), monthsShort : 'jan_feb_mar_apr_maí_jún_júl_ágú_sep_okt_nóv_des'.split('_'), weekdays : 'sunnudagur_mánudagur_þriðjudagur_miðvikudagur_fimmtudagur_föstudagur_laugardagur'.split('_'), weekdaysShort : 'sun_mán_þri_mið_fim_fös_lau'.split('_'), weekdaysMin : 'Su_Má_Þr_Mi_Fi_Fö_La'.split('_'), longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY [kl.] H:mm', LLLL : 'dddd, D. MMMM YYYY [kl.] H:mm' }, calendar : { sameDay : '[í dag kl.] LT', nextDay : '[á morgun kl.] LT', nextWeek : 'dddd [kl.] LT', lastDay : '[í gær kl.] LT', lastWeek : '[síðasta] dddd [kl.] LT', sameElse : 'L' }, relativeTime : { future : 'eftir %s', past : 'fyrir %s síðan', s : translate, m : translate, mm : translate, h : 'klukkustund', hh : translate, d : translate, dd : translate, M : translate, MM : translate, y : translate, yy : translate }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return is; }))); /***/ }, /* 395 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Italian [it] //! author : Lorenzo : https://github.com/aliem //! author: Mattia Larentis: https://github.com/nostalgiaz ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var it = moment.defineLocale('it', { months : 'gennaio_febbraio_marzo_aprile_maggio_giugno_luglio_agosto_settembre_ottobre_novembre_dicembre'.split('_'), monthsShort : 'gen_feb_mar_apr_mag_giu_lug_ago_set_ott_nov_dic'.split('_'), weekdays : 'Domenica_Lunedì_Martedì_Mercoledì_Giovedì_Venerdì_Sabato'.split('_'), weekdaysShort : 'Dom_Lun_Mar_Mer_Gio_Ven_Sab'.split('_'), weekdaysMin : 'Do_Lu_Ma_Me_Gi_Ve_Sa'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay: '[Oggi alle] LT', nextDay: '[Domani alle] LT', nextWeek: 'dddd [alle] LT', lastDay: '[Ieri alle] LT', lastWeek: function () { switch (this.day()) { case 0: return '[la scorsa] dddd [alle] LT'; default: return '[lo scorso] dddd [alle] LT'; } }, sameElse: 'L' }, relativeTime : { future : function (s) { return ((/^[0-9].+$/).test(s) ? 'tra' : 'in') + ' ' + s; }, past : '%s fa', s : 'alcuni secondi', m : 'un minuto', mm : '%d minuti', h : 'un\'ora', hh : '%d ore', d : 'un giorno', dd : '%d giorni', M : 'un mese', MM : '%d mesi', y : 'un anno', yy : '%d anni' }, ordinalParse : /\d{1,2}º/, ordinal: '%dº', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return it; }))); /***/ }, /* 396 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Japanese [ja] //! author : LI Long : https://github.com/baryon ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var ja = moment.defineLocale('ja', { months : '1月_2月_3月_4月_5月_6月_7月_8月_9月_10月_11月_12月'.split('_'), monthsShort : '1月_2月_3月_4月_5月_6月_7月_8月_9月_10月_11月_12月'.split('_'), weekdays : '日曜日_月曜日_火曜日_水曜日_木曜日_金曜日_土曜日'.split('_'), weekdaysShort : '日_月_火_水_木_金_土'.split('_'), weekdaysMin : '日_月_火_水_木_金_土'.split('_'), longDateFormat : { LT : 'Ah時m分', LTS : 'Ah時m分s秒', L : 'YYYY/MM/DD', LL : 'YYYY年M月D日', LLL : 'YYYY年M月D日Ah時m分', LLLL : 'YYYY年M月D日Ah時m分 dddd' }, meridiemParse: /午前|午後/i, isPM : function (input) { return input === '午後'; }, meridiem : function (hour, minute, isLower) { if (hour < 12) { return '午前'; } else { return '午後'; } }, calendar : { sameDay : '[今日] LT', nextDay : '[明日] LT', nextWeek : '[来週]dddd LT', lastDay : '[昨日] LT', lastWeek : '[前週]dddd LT', sameElse : 'L' }, ordinalParse : /\d{1,2}日/, ordinal : function (number, period) { switch (period) { case 'd': case 'D': case 'DDD': return number + '日'; default: return number; } }, relativeTime : { future : '%s後', past : '%s前', s : '数秒', m : '1分', mm : '%d分', h : '1時間', hh : '%d時間', d : '1日', dd : '%d日', M : '1ヶ月', MM : '%dヶ月', y : '1年', yy : '%d年' } }); return ja; }))); /***/ }, /* 397 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Javanese [jv] //! author : Rony Lantip : https://github.com/lantip //! reference: http://jv.wikipedia.org/wiki/Basa_Jawa ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var jv = moment.defineLocale('jv', { months : 'Januari_Februari_Maret_April_Mei_Juni_Juli_Agustus_September_Oktober_Nopember_Desember'.split('_'), monthsShort : 'Jan_Feb_Mar_Apr_Mei_Jun_Jul_Ags_Sep_Okt_Nop_Des'.split('_'), weekdays : 'Minggu_Senen_Seloso_Rebu_Kemis_Jemuwah_Septu'.split('_'), weekdaysShort : 'Min_Sen_Sel_Reb_Kem_Jem_Sep'.split('_'), weekdaysMin : 'Mg_Sn_Sl_Rb_Km_Jm_Sp'.split('_'), longDateFormat : { LT : 'HH.mm', LTS : 'HH.mm.ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY [pukul] HH.mm', LLLL : 'dddd, D MMMM YYYY [pukul] HH.mm' }, meridiemParse: /enjing|siyang|sonten|ndalu/, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === 'enjing') { return hour; } else if (meridiem === 'siyang') { return hour >= 11 ? hour : hour + 12; } else if (meridiem === 'sonten' || meridiem === 'ndalu') { return hour + 12; } }, meridiem : function (hours, minutes, isLower) { if (hours < 11) { return 'enjing'; } else if (hours < 15) { return 'siyang'; } else if (hours < 19) { return 'sonten'; } else { return 'ndalu'; } }, calendar : { sameDay : '[Dinten puniko pukul] LT', nextDay : '[Mbenjang pukul] LT', nextWeek : 'dddd [pukul] LT', lastDay : '[Kala wingi pukul] LT', lastWeek : 'dddd [kepengker pukul] LT', sameElse : 'L' }, relativeTime : { future : 'wonten ing %s', past : '%s ingkang kepengker', s : 'sawetawis detik', m : 'setunggal menit', mm : '%d menit', h : 'setunggal jam', hh : '%d jam', d : 'sedinten', dd : '%d dinten', M : 'sewulan', MM : '%d wulan', y : 'setaun', yy : '%d taun' }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return jv; }))); /***/ }, /* 398 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Georgian [ka] //! author : Irakli Janiashvili : https://github.com/irakli-janiashvili ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var ka = moment.defineLocale('ka', { months : { standalone: 'იანვარი_თებერვალი_მარტი_აპრილი_მაისი_ივნისი_ივლისი_აგვისტო_სექტემბერი_ოქტომბერი_ნოემბერი_დეკემბერი'.split('_'), format: 'იანვარს_თებერვალს_მარტს_აპრილის_მაისს_ივნისს_ივლისს_აგვისტს_სექტემბერს_ოქტომბერს_ნოემბერს_დეკემბერს'.split('_') }, monthsShort : 'იან_თებ_მარ_აპრ_მაი_ივნ_ივლ_აგვ_სექ_ოქტ_ნოე_დეკ'.split('_'), weekdays : { standalone: 'კვირა_ორშაბათი_სამშაბათი_ოთხშაბათი_ხუთშაბათი_პარასკევი_შაბათი'.split('_'), format: 'კვირას_ორშაბათს_სამშაბათს_ოთხშაბათს_ხუთშაბათს_პარასკევს_შაბათს'.split('_'), isFormat: /(წინა|შემდეგ)/ }, weekdaysShort : 'კვი_ორშ_სამ_ოთხ_ხუთ_პარ_შაბ'.split('_'), weekdaysMin : 'კვ_ორ_სა_ოთ_ხუ_პა_შა'.split('_'), longDateFormat : { LT : 'h:mm A', LTS : 'h:mm:ss A', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY h:mm A', LLLL : 'dddd, D MMMM YYYY h:mm A' }, calendar : { sameDay : '[დღეს] LT[-ზე]', nextDay : '[ხვალ] LT[-ზე]', lastDay : '[გუშინ] LT[-ზე]', nextWeek : '[შემდეგ] dddd LT[-ზე]', lastWeek : '[წინა] dddd LT-ზე', sameElse : 'L' }, relativeTime : { future : function (s) { return (/(წამი|წუთი|საათი|წელი)/).test(s) ? s.replace(/ი$/, 'ში') : s + 'ში'; }, past : function (s) { if ((/(წამი|წუთი|საათი|დღე|თვე)/).test(s)) { return s.replace(/(ი|ე)$/, 'ის წინ'); } if ((/წელი/).test(s)) { return s.replace(/წელი$/, 'წლის წინ'); } }, s : 'რამდენიმე წამი', m : 'წუთი', mm : '%d წუთი', h : 'საათი', hh : '%d საათი', d : 'დღე', dd : '%d დღე', M : 'თვე', MM : '%d თვე', y : 'წელი', yy : '%d წელი' }, ordinalParse: /0|1-ლი|მე-\d{1,2}|\d{1,2}-ე/, ordinal : function (number) { if (number === 0) { return number; } if (number === 1) { return number + '-ლი'; } if ((number < 20) || (number <= 100 && (number % 20 === 0)) || (number % 100 === 0)) { return 'მე-' + number; } return number + '-ე'; }, week : { dow : 1, doy : 7 } }); return ka; }))); /***/ }, /* 399 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Kazakh [kk] //! authors : Nurlan Rakhimzhanov : https://github.com/nurlan ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var suffixes = { 0: '-ші', 1: '-ші', 2: '-ші', 3: '-ші', 4: '-ші', 5: '-ші', 6: '-шы', 7: '-ші', 8: '-ші', 9: '-шы', 10: '-шы', 20: '-шы', 30: '-шы', 40: '-шы', 50: '-ші', 60: '-шы', 70: '-ші', 80: '-ші', 90: '-шы', 100: '-ші' }; var kk = moment.defineLocale('kk', { months : 'қаңтар_ақпан_наурыз_сәуір_мамыр_маусым_шілде_тамыз_қыркүйек_қазан_қараша_желтоқсан'.split('_'), monthsShort : 'қаң_ақп_нау_сәу_мам_мау_шіл_там_қыр_қаз_қар_жел'.split('_'), weekdays : 'жексенбі_дүйсенбі_сейсенбі_сәрсенбі_бейсенбі_жұма_сенбі'.split('_'), weekdaysShort : 'жек_дүй_сей_сәр_бей_жұм_сен'.split('_'), weekdaysMin : 'жк_дй_сй_ср_бй_жм_сн'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay : '[Бүгін сағат] LT', nextDay : '[Ертең сағат] LT', nextWeek : 'dddd [сағат] LT', lastDay : '[Кеше сағат] LT', lastWeek : '[Өткен аптаның] dddd [сағат] LT', sameElse : 'L' }, relativeTime : { future : '%s ішінде', past : '%s бұрын', s : 'бірнеше секунд', m : 'бір минут', mm : '%d минут', h : 'бір сағат', hh : '%d сағат', d : 'бір күн', dd : '%d күн', M : 'бір ай', MM : '%d ай', y : 'бір жыл', yy : '%d жыл' }, ordinalParse: /\d{1,2}-(ші|шы)/, ordinal : function (number) { var a = number % 10, b = number >= 100 ? 100 : null; return number + (suffixes[number] || suffixes[a] || suffixes[b]); }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return kk; }))); /***/ }, /* 400 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Cambodian [km] //! author : Kruy Vanna : https://github.com/kruyvanna ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var km = moment.defineLocale('km', { months: 'មករា_កុម្ភៈ_មីនា_មេសា_ឧសភា_មិថុនា_កក្កដា_សីហា_កញ្ញា_តុលា_វិច្ឆិកា_ធ្នូ'.split('_'), monthsShort: 'មករា_កុម្ភៈ_មីនា_មេសា_ឧសភា_មិថុនា_កក្កដា_សីហា_កញ្ញា_តុលា_វិច្ឆិកា_ធ្នូ'.split('_'), weekdays: 'អាទិត្យ_ច័ន្ទ_អង្គារ_ពុធ_ព្រហស្បតិ៍_សុក្រ_សៅរ៍'.split('_'), weekdaysShort: 'អាទិត្យ_ច័ន្ទ_អង្គារ_ពុធ_ព្រហស្បតិ៍_សុក្រ_សៅរ៍'.split('_'), weekdaysMin: 'អាទិត្យ_ច័ន្ទ_អង្គារ_ពុធ_ព្រហស្បតិ៍_សុក្រ_សៅរ៍'.split('_'), longDateFormat: { LT: 'HH:mm', LTS : 'HH:mm:ss', L: 'DD/MM/YYYY', LL: 'D MMMM YYYY', LLL: 'D MMMM YYYY HH:mm', LLLL: 'dddd, D MMMM YYYY HH:mm' }, calendar: { sameDay: '[ថ្ងៃនេះ ម៉ោង] LT', nextDay: '[ស្អែក ម៉ោង] LT', nextWeek: 'dddd [ម៉ោង] LT', lastDay: '[ម្សិលមិញ ម៉ោង] LT', lastWeek: 'dddd [សប្តាហ៍មុន] [ម៉ោង] LT', sameElse: 'L' }, relativeTime: { future: '%sទៀត', past: '%sមុន', s: 'ប៉ុន្មានវិនាទី', m: 'មួយនាទី', mm: '%d នាទី', h: 'មួយម៉ោង', hh: '%d ម៉ោង', d: 'មួយថ្ងៃ', dd: '%d ថ្ងៃ', M: 'មួយខែ', MM: '%d ខែ', y: 'មួយឆ្នាំ', yy: '%d ឆ្នាំ' }, week: { dow: 1, // Monday is the first day of the week. doy: 4 // The week that contains Jan 4th is the first week of the year. } }); return km; }))); /***/ }, /* 401 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Korean [ko] //! author : Kyungwook, Park : https://github.com/kyungw00k //! author : Jeeeyul Lee <[email protected]> ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var ko = moment.defineLocale('ko', { months : '1월_2월_3월_4월_5월_6월_7월_8월_9월_10월_11월_12월'.split('_'), monthsShort : '1월_2월_3월_4월_5월_6월_7월_8월_9월_10월_11월_12월'.split('_'), weekdays : '일요일_월요일_화요일_수요일_목요일_금요일_토요일'.split('_'), weekdaysShort : '일_월_화_수_목_금_토'.split('_'), weekdaysMin : '일_월_화_수_목_금_토'.split('_'), longDateFormat : { LT : 'A h시 m분', LTS : 'A h시 m분 s초', L : 'YYYY.MM.DD', LL : 'YYYY년 MMMM D일', LLL : 'YYYY년 MMMM D일 A h시 m분', LLLL : 'YYYY년 MMMM D일 dddd A h시 m분' }, calendar : { sameDay : '오늘 LT', nextDay : '내일 LT', nextWeek : 'dddd LT', lastDay : '어제 LT', lastWeek : '지난주 dddd LT', sameElse : 'L' }, relativeTime : { future : '%s 후', past : '%s 전', s : '몇 초', ss : '%d초', m : '일분', mm : '%d분', h : '한 시간', hh : '%d시간', d : '하루', dd : '%d일', M : '한 달', MM : '%d달', y : '일 년', yy : '%d년' }, ordinalParse : /\d{1,2}일/, ordinal : '%d일', meridiemParse : /오전|오후/, isPM : function (token) { return token === '오후'; }, meridiem : function (hour, minute, isUpper) { return hour < 12 ? '오전' : '오후'; } }); return ko; }))); /***/ }, /* 402 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Kyrgyz [ky] //! author : Chyngyz Arystan uulu : https://github.com/chyngyz ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var suffixes = { 0: '-чү', 1: '-чи', 2: '-чи', 3: '-чү', 4: '-чү', 5: '-чи', 6: '-чы', 7: '-чи', 8: '-чи', 9: '-чу', 10: '-чу', 20: '-чы', 30: '-чу', 40: '-чы', 50: '-чү', 60: '-чы', 70: '-чи', 80: '-чи', 90: '-чу', 100: '-чү' }; var ky = moment.defineLocale('ky', { months : 'январь_февраль_март_апрель_май_июнь_июль_август_сентябрь_октябрь_ноябрь_декабрь'.split('_'), monthsShort : 'янв_фев_март_апр_май_июнь_июль_авг_сен_окт_ноя_дек'.split('_'), weekdays : 'Жекшемби_Дүйшөмбү_Шейшемби_Шаршемби_Бейшемби_Жума_Ишемби'.split('_'), weekdaysShort : 'Жек_Дүй_Шей_Шар_Бей_Жум_Ише'.split('_'), weekdaysMin : 'Жк_Дй_Шй_Шр_Бй_Жм_Иш'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay : '[Бүгүн саат] LT', nextDay : '[Эртең саат] LT', nextWeek : 'dddd [саат] LT', lastDay : '[Кече саат] LT', lastWeek : '[Өткен аптанын] dddd [күнү] [саат] LT', sameElse : 'L' }, relativeTime : { future : '%s ичинде', past : '%s мурун', s : 'бирнече секунд', m : 'бир мүнөт', mm : '%d мүнөт', h : 'бир саат', hh : '%d саат', d : 'бир күн', dd : '%d күн', M : 'бир ай', MM : '%d ай', y : 'бир жыл', yy : '%d жыл' }, ordinalParse: /\d{1,2}-(чи|чы|чү|чу)/, ordinal : function (number) { var a = number % 10, b = number >= 100 ? 100 : null; return number + (suffixes[number] || suffixes[a] || suffixes[b]); }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return ky; }))); /***/ }, /* 403 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Luxembourgish [lb] //! author : mweimerskirch : https://github.com/mweimerskirch //! author : David Raison : https://github.com/kwisatz ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function processRelativeTime(number, withoutSuffix, key, isFuture) { var format = { 'm': ['eng Minutt', 'enger Minutt'], 'h': ['eng Stonn', 'enger Stonn'], 'd': ['een Dag', 'engem Dag'], 'M': ['ee Mount', 'engem Mount'], 'y': ['ee Joer', 'engem Joer'] }; return withoutSuffix ? format[key][0] : format[key][1]; } function processFutureTime(string) { var number = string.substr(0, string.indexOf(' ')); if (eifelerRegelAppliesToNumber(number)) { return 'a ' + string; } return 'an ' + string; } function processPastTime(string) { var number = string.substr(0, string.indexOf(' ')); if (eifelerRegelAppliesToNumber(number)) { return 'viru ' + string; } return 'virun ' + string; } /** * Returns true if the word before the given number loses the '-n' ending. * e.g. 'an 10 Deeg' but 'a 5 Deeg' * * @param number {integer} * @returns {boolean} */ function eifelerRegelAppliesToNumber(number) { number = parseInt(number, 10); if (isNaN(number)) { return false; } if (number < 0) { // Negative Number --> always true return true; } else if (number < 10) { // Only 1 digit if (4 <= number && number <= 7) { return true; } return false; } else if (number < 100) { // 2 digits var lastDigit = number % 10, firstDigit = number / 10; if (lastDigit === 0) { return eifelerRegelAppliesToNumber(firstDigit); } return eifelerRegelAppliesToNumber(lastDigit); } else if (number < 10000) { // 3 or 4 digits --> recursively check first digit while (number >= 10) { number = number / 10; } return eifelerRegelAppliesToNumber(number); } else { // Anything larger than 4 digits: recursively check first n-3 digits number = number / 1000; return eifelerRegelAppliesToNumber(number); } } var lb = moment.defineLocale('lb', { months: 'Januar_Februar_Mäerz_Abrëll_Mee_Juni_Juli_August_September_Oktober_November_Dezember'.split('_'), monthsShort: 'Jan._Febr._Mrz._Abr._Mee_Jun._Jul._Aug._Sept._Okt._Nov._Dez.'.split('_'), monthsParseExact : true, weekdays: 'Sonndeg_Méindeg_Dënschdeg_Mëttwoch_Donneschdeg_Freideg_Samschdeg'.split('_'), weekdaysShort: 'So._Mé._Dë._Më._Do._Fr._Sa.'.split('_'), weekdaysMin: 'So_Mé_Dë_Më_Do_Fr_Sa'.split('_'), weekdaysParseExact : true, longDateFormat: { LT: 'H:mm [Auer]', LTS: 'H:mm:ss [Auer]', L: 'DD.MM.YYYY', LL: 'D. MMMM YYYY', LLL: 'D. MMMM YYYY H:mm [Auer]', LLLL: 'dddd, D. MMMM YYYY H:mm [Auer]' }, calendar: { sameDay: '[Haut um] LT', sameElse: 'L', nextDay: '[Muer um] LT', nextWeek: 'dddd [um] LT', lastDay: '[Gëschter um] LT', lastWeek: function () { // Different date string for 'Dënschdeg' (Tuesday) and 'Donneschdeg' (Thursday) due to phonological rule switch (this.day()) { case 2: case 4: return '[Leschten] dddd [um] LT'; default: return '[Leschte] dddd [um] LT'; } } }, relativeTime : { future : processFutureTime, past : processPastTime, s : 'e puer Sekonnen', m : processRelativeTime, mm : '%d Minutten', h : processRelativeTime, hh : '%d Stonnen', d : processRelativeTime, dd : '%d Deeg', M : processRelativeTime, MM : '%d Méint', y : processRelativeTime, yy : '%d Joer' }, ordinalParse: /\d{1,2}\./, ordinal: '%d.', week: { dow: 1, // Monday is the first day of the week. doy: 4 // The week that contains Jan 4th is the first week of the year. } }); return lb; }))); /***/ }, /* 404 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Lao [lo] //! author : Ryan Hart : https://github.com/ryanhart2 ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var lo = moment.defineLocale('lo', { months : 'ມັງກອນ_ກຸມພາ_ມີນາ_ເມສາ_ພຶດສະພາ_ມິຖຸນາ_ກໍລະກົດ_ສິງຫາ_ກັນຍາ_ຕຸລາ_ພະຈິກ_ທັນວາ'.split('_'), monthsShort : 'ມັງກອນ_ກຸມພາ_ມີນາ_ເມສາ_ພຶດສະພາ_ມິຖຸນາ_ກໍລະກົດ_ສິງຫາ_ກັນຍາ_ຕຸລາ_ພະຈິກ_ທັນວາ'.split('_'), weekdays : 'ອາທິດ_ຈັນ_ອັງຄານ_ພຸດ_ພະຫັດ_ສຸກ_ເສົາ'.split('_'), weekdaysShort : 'ທິດ_ຈັນ_ອັງຄານ_ພຸດ_ພະຫັດ_ສຸກ_ເສົາ'.split('_'), weekdaysMin : 'ທ_ຈ_ອຄ_ພ_ພຫ_ສກ_ສ'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'ວັນdddd D MMMM YYYY HH:mm' }, meridiemParse: /ຕອນເຊົ້າ|ຕອນແລງ/, isPM: function (input) { return input === 'ຕອນແລງ'; }, meridiem : function (hour, minute, isLower) { if (hour < 12) { return 'ຕອນເຊົ້າ'; } else { return 'ຕອນແລງ'; } }, calendar : { sameDay : '[ມື້ນີ້ເວລາ] LT', nextDay : '[ມື້ອື່ນເວລາ] LT', nextWeek : '[ວັນ]dddd[ໜ້າເວລາ] LT', lastDay : '[ມື້ວານນີ້ເວລາ] LT', lastWeek : '[ວັນ]dddd[ແລ້ວນີ້ເວລາ] LT', sameElse : 'L' }, relativeTime : { future : 'ອີກ %s', past : '%sຜ່ານມາ', s : 'ບໍ່ເທົ່າໃດວິນາທີ', m : '1 ນາທີ', mm : '%d ນາທີ', h : '1 ຊົ່ວໂມງ', hh : '%d ຊົ່ວໂມງ', d : '1 ມື້', dd : '%d ມື້', M : '1 ເດືອນ', MM : '%d ເດືອນ', y : '1 ປີ', yy : '%d ປີ' }, ordinalParse: /(ທີ່)\d{1,2}/, ordinal : function (number) { return 'ທີ່' + number; } }); return lo; }))); /***/ }, /* 405 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Lithuanian [lt] //! author : Mindaugas Mozūras : https://github.com/mmozuras ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var units = { 'm' : 'minutė_minutės_minutę', 'mm': 'minutės_minučių_minutes', 'h' : 'valanda_valandos_valandą', 'hh': 'valandos_valandų_valandas', 'd' : 'diena_dienos_dieną', 'dd': 'dienos_dienų_dienas', 'M' : 'mėnuo_mėnesio_mėnesį', 'MM': 'mėnesiai_mėnesių_mėnesius', 'y' : 'metai_metų_metus', 'yy': 'metai_metų_metus' }; function translateSeconds(number, withoutSuffix, key, isFuture) { if (withoutSuffix) { return 'kelios sekundės'; } else { return isFuture ? 'kelių sekundžių' : 'kelias sekundes'; } } function translateSingular(number, withoutSuffix, key, isFuture) { return withoutSuffix ? forms(key)[0] : (isFuture ? forms(key)[1] : forms(key)[2]); } function special(number) { return number % 10 === 0 || (number > 10 && number < 20); } function forms(key) { return units[key].split('_'); } function translate(number, withoutSuffix, key, isFuture) { var result = number + ' '; if (number === 1) { return result + translateSingular(number, withoutSuffix, key[0], isFuture); } else if (withoutSuffix) { return result + (special(number) ? forms(key)[1] : forms(key)[0]); } else { if (isFuture) { return result + forms(key)[1]; } else { return result + (special(number) ? forms(key)[1] : forms(key)[2]); } } } var lt = moment.defineLocale('lt', { months : { format: 'sausio_vasario_kovo_balandžio_gegužės_birželio_liepos_rugpjūčio_rugsėjo_spalio_lapkričio_gruodžio'.split('_'), standalone: 'sausis_vasaris_kovas_balandis_gegužė_birželis_liepa_rugpjūtis_rugsėjis_spalis_lapkritis_gruodis'.split('_'), isFormat: /D[oD]?(\[[^\[\]]*\]|\s)+MMMM?|MMMM?(\[[^\[\]]*\]|\s)+D[oD]?/ }, monthsShort : 'sau_vas_kov_bal_geg_bir_lie_rgp_rgs_spa_lap_grd'.split('_'), weekdays : { format: 'sekmadienį_pirmadienį_antradienį_trečiadienį_ketvirtadienį_penktadienį_šeštadienį'.split('_'), standalone: 'sekmadienis_pirmadienis_antradienis_trečiadienis_ketvirtadienis_penktadienis_šeštadienis'.split('_'), isFormat: /dddd HH:mm/ }, weekdaysShort : 'Sek_Pir_Ant_Tre_Ket_Pen_Šeš'.split('_'), weekdaysMin : 'S_P_A_T_K_Pn_Š'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'YYYY-MM-DD', LL : 'YYYY [m.] MMMM D [d.]', LLL : 'YYYY [m.] MMMM D [d.], HH:mm [val.]', LLLL : 'YYYY [m.] MMMM D [d.], dddd, HH:mm [val.]', l : 'YYYY-MM-DD', ll : 'YYYY [m.] MMMM D [d.]', lll : 'YYYY [m.] MMMM D [d.], HH:mm [val.]', llll : 'YYYY [m.] MMMM D [d.], ddd, HH:mm [val.]' }, calendar : { sameDay : '[Šiandien] LT', nextDay : '[Rytoj] LT', nextWeek : 'dddd LT', lastDay : '[Vakar] LT', lastWeek : '[Praėjusį] dddd LT', sameElse : 'L' }, relativeTime : { future : 'po %s', past : 'prieš %s', s : translateSeconds, m : translateSingular, mm : translate, h : translateSingular, hh : translate, d : translateSingular, dd : translate, M : translateSingular, MM : translate, y : translateSingular, yy : translate }, ordinalParse: /\d{1,2}-oji/, ordinal : function (number) { return number + '-oji'; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return lt; }))); /***/ }, /* 406 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Latvian [lv] //! author : Kristaps Karlsons : https://github.com/skakri //! author : Jānis Elmeris : https://github.com/JanisE ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var units = { 'm': 'minūtes_minūtēm_minūte_minūtes'.split('_'), 'mm': 'minūtes_minūtēm_minūte_minūtes'.split('_'), 'h': 'stundas_stundām_stunda_stundas'.split('_'), 'hh': 'stundas_stundām_stunda_stundas'.split('_'), 'd': 'dienas_dienām_diena_dienas'.split('_'), 'dd': 'dienas_dienām_diena_dienas'.split('_'), 'M': 'mēneša_mēnešiem_mēnesis_mēneši'.split('_'), 'MM': 'mēneša_mēnešiem_mēnesis_mēneši'.split('_'), 'y': 'gada_gadiem_gads_gadi'.split('_'), 'yy': 'gada_gadiem_gads_gadi'.split('_') }; /** * @param withoutSuffix boolean true = a length of time; false = before/after a period of time. */ function format(forms, number, withoutSuffix) { if (withoutSuffix) { // E.g. "21 minūte", "3 minūtes". return number % 10 === 1 && number % 100 !== 11 ? forms[2] : forms[3]; } else { // E.g. "21 minūtes" as in "pēc 21 minūtes". // E.g. "3 minūtēm" as in "pēc 3 minūtēm". return number % 10 === 1 && number % 100 !== 11 ? forms[0] : forms[1]; } } function relativeTimeWithPlural(number, withoutSuffix, key) { return number + ' ' + format(units[key], number, withoutSuffix); } function relativeTimeWithSingular(number, withoutSuffix, key) { return format(units[key], number, withoutSuffix); } function relativeSeconds(number, withoutSuffix) { return withoutSuffix ? 'dažas sekundes' : 'dažām sekundēm'; } var lv = moment.defineLocale('lv', { months : 'janvāris_februāris_marts_aprīlis_maijs_jūnijs_jūlijs_augusts_septembris_oktobris_novembris_decembris'.split('_'), monthsShort : 'jan_feb_mar_apr_mai_jūn_jūl_aug_sep_okt_nov_dec'.split('_'), weekdays : 'svētdiena_pirmdiena_otrdiena_trešdiena_ceturtdiena_piektdiena_sestdiena'.split('_'), weekdaysShort : 'Sv_P_O_T_C_Pk_S'.split('_'), weekdaysMin : 'Sv_P_O_T_C_Pk_S'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY.', LL : 'YYYY. [gada] D. MMMM', LLL : 'YYYY. [gada] D. MMMM, HH:mm', LLLL : 'YYYY. [gada] D. MMMM, dddd, HH:mm' }, calendar : { sameDay : '[Šodien pulksten] LT', nextDay : '[Rīt pulksten] LT', nextWeek : 'dddd [pulksten] LT', lastDay : '[Vakar pulksten] LT', lastWeek : '[Pagājušā] dddd [pulksten] LT', sameElse : 'L' }, relativeTime : { future : 'pēc %s', past : 'pirms %s', s : relativeSeconds, m : relativeTimeWithSingular, mm : relativeTimeWithPlural, h : relativeTimeWithSingular, hh : relativeTimeWithPlural, d : relativeTimeWithSingular, dd : relativeTimeWithPlural, M : relativeTimeWithSingular, MM : relativeTimeWithPlural, y : relativeTimeWithSingular, yy : relativeTimeWithPlural }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return lv; }))); /***/ }, /* 407 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Montenegrin [me] //! author : Miodrag Nikač <[email protected]> : https://github.com/miodragnikac ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var translator = { words: { //Different grammatical cases m: ['jedan minut', 'jednog minuta'], mm: ['minut', 'minuta', 'minuta'], h: ['jedan sat', 'jednog sata'], hh: ['sat', 'sata', 'sati'], dd: ['dan', 'dana', 'dana'], MM: ['mjesec', 'mjeseca', 'mjeseci'], yy: ['godina', 'godine', 'godina'] }, correctGrammaticalCase: function (number, wordKey) { return number === 1 ? wordKey[0] : (number >= 2 && number <= 4 ? wordKey[1] : wordKey[2]); }, translate: function (number, withoutSuffix, key) { var wordKey = translator.words[key]; if (key.length === 1) { return withoutSuffix ? wordKey[0] : wordKey[1]; } else { return number + ' ' + translator.correctGrammaticalCase(number, wordKey); } } }; var me = moment.defineLocale('me', { months: 'januar_februar_mart_april_maj_jun_jul_avgust_septembar_oktobar_novembar_decembar'.split('_'), monthsShort: 'jan._feb._mar._apr._maj_jun_jul_avg._sep._okt._nov._dec.'.split('_'), monthsParseExact : true, weekdays: 'nedjelja_ponedjeljak_utorak_srijeda_četvrtak_petak_subota'.split('_'), weekdaysShort: 'ned._pon._uto._sri._čet._pet._sub.'.split('_'), weekdaysMin: 'ne_po_ut_sr_če_pe_su'.split('_'), weekdaysParseExact : true, longDateFormat: { LT: 'H:mm', LTS : 'H:mm:ss', L: 'DD.MM.YYYY', LL: 'D. MMMM YYYY', LLL: 'D. MMMM YYYY H:mm', LLLL: 'dddd, D. MMMM YYYY H:mm' }, calendar: { sameDay: '[danas u] LT', nextDay: '[sjutra u] LT', nextWeek: function () { switch (this.day()) { case 0: return '[u] [nedjelju] [u] LT'; case 3: return '[u] [srijedu] [u] LT'; case 6: return '[u] [subotu] [u] LT'; case 1: case 2: case 4: case 5: return '[u] dddd [u] LT'; } }, lastDay : '[juče u] LT', lastWeek : function () { var lastWeekDays = [ '[prošle] [nedjelje] [u] LT', '[prošlog] [ponedjeljka] [u] LT', '[prošlog] [utorka] [u] LT', '[prošle] [srijede] [u] LT', '[prošlog] [četvrtka] [u] LT', '[prošlog] [petka] [u] LT', '[prošle] [subote] [u] LT' ]; return lastWeekDays[this.day()]; }, sameElse : 'L' }, relativeTime : { future : 'za %s', past : 'prije %s', s : 'nekoliko sekundi', m : translator.translate, mm : translator.translate, h : translator.translate, hh : translator.translate, d : 'dan', dd : translator.translate, M : 'mjesec', MM : translator.translate, y : 'godinu', yy : translator.translate }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return me; }))); /***/ }, /* 408 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Maori [mi] //! author : John Corrigan <[email protected]> : https://github.com/johnideal ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var mi = moment.defineLocale('mi', { months: 'Kohi-tāte_Hui-tanguru_Poutū-te-rangi_Paenga-whāwhā_Haratua_Pipiri_Hōngoingoi_Here-turi-kōkā_Mahuru_Whiringa-ā-nuku_Whiringa-ā-rangi_Hakihea'.split('_'), monthsShort: 'Kohi_Hui_Pou_Pae_Hara_Pipi_Hōngoi_Here_Mahu_Whi-nu_Whi-ra_Haki'.split('_'), monthsRegex: /(?:['a-z\u0101\u014D\u016B]+\-?){1,3}/i, monthsStrictRegex: /(?:['a-z\u0101\u014D\u016B]+\-?){1,3}/i, monthsShortRegex: /(?:['a-z\u0101\u014D\u016B]+\-?){1,3}/i, monthsShortStrictRegex: /(?:['a-z\u0101\u014D\u016B]+\-?){1,2}/i, weekdays: 'Rātapu_Mane_Tūrei_Wenerei_Tāite_Paraire_Hātarei'.split('_'), weekdaysShort: 'Ta_Ma_Tū_We_Tāi_Pa_Hā'.split('_'), weekdaysMin: 'Ta_Ma_Tū_We_Tāi_Pa_Hā'.split('_'), longDateFormat: { LT: 'HH:mm', LTS: 'HH:mm:ss', L: 'DD/MM/YYYY', LL: 'D MMMM YYYY', LLL: 'D MMMM YYYY [i] HH:mm', LLLL: 'dddd, D MMMM YYYY [i] HH:mm' }, calendar: { sameDay: '[i teie mahana, i] LT', nextDay: '[apopo i] LT', nextWeek: 'dddd [i] LT', lastDay: '[inanahi i] LT', lastWeek: 'dddd [whakamutunga i] LT', sameElse: 'L' }, relativeTime: { future: 'i roto i %s', past: '%s i mua', s: 'te hēkona ruarua', m: 'he meneti', mm: '%d meneti', h: 'te haora', hh: '%d haora', d: 'he ra', dd: '%d ra', M: 'he marama', MM: '%d marama', y: 'he tau', yy: '%d tau' }, ordinalParse: /\d{1,2}º/, ordinal: '%dº', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return mi; }))); /***/ }, /* 409 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Macedonian [mk] //! author : Borislav Mickov : https://github.com/B0k0 ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var mk = moment.defineLocale('mk', { months : 'јануари_февруари_март_април_мај_јуни_јули_август_септември_октомври_ноември_декември'.split('_'), monthsShort : 'јан_фев_мар_апр_мај_јун_јул_авг_сеп_окт_ное_дек'.split('_'), weekdays : 'недела_понеделник_вторник_среда_четврток_петок_сабота'.split('_'), weekdaysShort : 'нед_пон_вто_сре_чет_пет_саб'.split('_'), weekdaysMin : 'нe_пo_вт_ср_че_пе_сa'.split('_'), longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'D.MM.YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY H:mm', LLLL : 'dddd, D MMMM YYYY H:mm' }, calendar : { sameDay : '[Денес во] LT', nextDay : '[Утре во] LT', nextWeek : '[Во] dddd [во] LT', lastDay : '[Вчера во] LT', lastWeek : function () { switch (this.day()) { case 0: case 3: case 6: return '[Изминатата] dddd [во] LT'; case 1: case 2: case 4: case 5: return '[Изминатиот] dddd [во] LT'; } }, sameElse : 'L' }, relativeTime : { future : 'после %s', past : 'пред %s', s : 'неколку секунди', m : 'минута', mm : '%d минути', h : 'час', hh : '%d часа', d : 'ден', dd : '%d дена', M : 'месец', MM : '%d месеци', y : 'година', yy : '%d години' }, ordinalParse: /\d{1,2}-(ев|ен|ти|ви|ри|ми)/, ordinal : function (number) { var lastDigit = number % 10, last2Digits = number % 100; if (number === 0) { return number + '-ев'; } else if (last2Digits === 0) { return number + '-ен'; } else if (last2Digits > 10 && last2Digits < 20) { return number + '-ти'; } else if (lastDigit === 1) { return number + '-ви'; } else if (lastDigit === 2) { return number + '-ри'; } else if (lastDigit === 7 || lastDigit === 8) { return number + '-ми'; } else { return number + '-ти'; } }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return mk; }))); /***/ }, /* 410 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Malayalam [ml] //! author : Floyd Pink : https://github.com/floydpink ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var ml = moment.defineLocale('ml', { months : 'ജനുവരി_ഫെബ്രുവരി_മാർച്ച്_ഏപ്രിൽ_മേയ്_ജൂൺ_ജൂലൈ_ഓഗസ്റ്റ്_സെപ്റ്റംബർ_ഒക്ടോബർ_നവംബർ_ഡിസംബർ'.split('_'), monthsShort : 'ജനു._ഫെബ്രു._മാർ._ഏപ്രി._മേയ്_ജൂൺ_ജൂലൈ._ഓഗ._സെപ്റ്റ._ഒക്ടോ._നവം._ഡിസം.'.split('_'), monthsParseExact : true, weekdays : 'ഞായറാഴ്ച_തിങ്കളാഴ്ച_ചൊവ്വാഴ്ച_ബുധനാഴ്ച_വ്യാഴാഴ്ച_വെള്ളിയാഴ്ച_ശനിയാഴ്ച'.split('_'), weekdaysShort : 'ഞായർ_തിങ്കൾ_ചൊവ്വ_ബുധൻ_വ്യാഴം_വെള്ളി_ശനി'.split('_'), weekdaysMin : 'ഞാ_തി_ചൊ_ബു_വ്യാ_വെ_ശ'.split('_'), longDateFormat : { LT : 'A h:mm -നു', LTS : 'A h:mm:ss -നു', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY, A h:mm -നു', LLLL : 'dddd, D MMMM YYYY, A h:mm -നു' }, calendar : { sameDay : '[ഇന്ന്] LT', nextDay : '[നാളെ] LT', nextWeek : 'dddd, LT', lastDay : '[ഇന്നലെ] LT', lastWeek : '[കഴിഞ്ഞ] dddd, LT', sameElse : 'L' }, relativeTime : { future : '%s കഴിഞ്ഞ്', past : '%s മുൻപ്', s : 'അൽപ നിമിഷങ്ങൾ', m : 'ഒരു മിനിറ്റ്', mm : '%d മിനിറ്റ്', h : 'ഒരു മണിക്കൂർ', hh : '%d മണിക്കൂർ', d : 'ഒരു ദിവസം', dd : '%d ദിവസം', M : 'ഒരു മാസം', MM : '%d മാസം', y : 'ഒരു വർഷം', yy : '%d വർഷം' }, meridiemParse: /രാത്രി|രാവിലെ|ഉച്ച കഴിഞ്ഞ്|വൈകുന്നേരം|രാത്രി/i, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if ((meridiem === 'രാത്രി' && hour >= 4) || meridiem === 'ഉച്ച കഴിഞ്ഞ്' || meridiem === 'വൈകുന്നേരം') { return hour + 12; } else { return hour; } }, meridiem : function (hour, minute, isLower) { if (hour < 4) { return 'രാത്രി'; } else if (hour < 12) { return 'രാവിലെ'; } else if (hour < 17) { return 'ഉച്ച കഴിഞ്ഞ്'; } else if (hour < 20) { return 'വൈകുന്നേരം'; } else { return 'രാത്രി'; } } }); return ml; }))); /***/ }, /* 411 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Marathi [mr] //! author : Harshad Kale : https://github.com/kalehv //! author : Vivek Athalye : https://github.com/vnathalye ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '१', '2': '२', '3': '३', '4': '४', '5': '५', '6': '६', '7': '७', '8': '८', '9': '९', '0': '०' }; var numberMap = { '१': '1', '२': '2', '३': '3', '४': '4', '५': '5', '६': '6', '७': '7', '८': '8', '९': '9', '०': '0' }; function relativeTimeMr(number, withoutSuffix, string, isFuture) { var output = ''; if (withoutSuffix) { switch (string) { case 's': output = 'काही सेकंद'; break; case 'm': output = 'एक मिनिट'; break; case 'mm': output = '%d मिनिटे'; break; case 'h': output = 'एक तास'; break; case 'hh': output = '%d तास'; break; case 'd': output = 'एक दिवस'; break; case 'dd': output = '%d दिवस'; break; case 'M': output = 'एक महिना'; break; case 'MM': output = '%d महिने'; break; case 'y': output = 'एक वर्ष'; break; case 'yy': output = '%d वर्षे'; break; } } else { switch (string) { case 's': output = 'काही सेकंदां'; break; case 'm': output = 'एका मिनिटा'; break; case 'mm': output = '%d मिनिटां'; break; case 'h': output = 'एका तासा'; break; case 'hh': output = '%d तासां'; break; case 'd': output = 'एका दिवसा'; break; case 'dd': output = '%d दिवसां'; break; case 'M': output = 'एका महिन्या'; break; case 'MM': output = '%d महिन्यां'; break; case 'y': output = 'एका वर्षा'; break; case 'yy': output = '%d वर्षां'; break; } } return output.replace(/%d/i, number); } var mr = moment.defineLocale('mr', { months : 'जानेवारी_फेब्रुवारी_मार्च_एप्रिल_मे_जून_जुलै_ऑगस्ट_सप्टेंबर_ऑक्टोबर_नोव्हेंबर_डिसेंबर'.split('_'), monthsShort: 'जाने._फेब्रु._मार्च._एप्रि._मे._जून._जुलै._ऑग._सप्टें._ऑक्टो._नोव्हें._डिसें.'.split('_'), monthsParseExact : true, weekdays : 'रविवार_सोमवार_मंगळवार_बुधवार_गुरूवार_शुक्रवार_शनिवार'.split('_'), weekdaysShort : 'रवि_सोम_मंगळ_बुध_गुरू_शुक्र_शनि'.split('_'), weekdaysMin : 'र_सो_मं_बु_गु_शु_श'.split('_'), longDateFormat : { LT : 'A h:mm वाजता', LTS : 'A h:mm:ss वाजता', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY, A h:mm वाजता', LLLL : 'dddd, D MMMM YYYY, A h:mm वाजता' }, calendar : { sameDay : '[आज] LT', nextDay : '[उद्या] LT', nextWeek : 'dddd, LT', lastDay : '[काल] LT', lastWeek: '[मागील] dddd, LT', sameElse : 'L' }, relativeTime : { future: '%sमध्ये', past: '%sपूर्वी', s: relativeTimeMr, m: relativeTimeMr, mm: relativeTimeMr, h: relativeTimeMr, hh: relativeTimeMr, d: relativeTimeMr, dd: relativeTimeMr, M: relativeTimeMr, MM: relativeTimeMr, y: relativeTimeMr, yy: relativeTimeMr }, preparse: function (string) { return string.replace(/[१२३४५६७८९०]/g, function (match) { return numberMap[match]; }); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }); }, meridiemParse: /रात्री|सकाळी|दुपारी|सायंकाळी/, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === 'रात्री') { return hour < 4 ? hour : hour + 12; } else if (meridiem === 'सकाळी') { return hour; } else if (meridiem === 'दुपारी') { return hour >= 10 ? hour : hour + 12; } else if (meridiem === 'सायंकाळी') { return hour + 12; } }, meridiem: function (hour, minute, isLower) { if (hour < 4) { return 'रात्री'; } else if (hour < 10) { return 'सकाळी'; } else if (hour < 17) { return 'दुपारी'; } else if (hour < 20) { return 'सायंकाळी'; } else { return 'रात्री'; } }, week : { dow : 0, // Sunday is the first day of the week. doy : 6 // The week that contains Jan 1st is the first week of the year. } }); return mr; }))); /***/ }, /* 412 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Malay [ms] //! author : Weldan Jamili : https://github.com/weldan ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var ms = moment.defineLocale('ms', { months : 'Januari_Februari_Mac_April_Mei_Jun_Julai_Ogos_September_Oktober_November_Disember'.split('_'), monthsShort : 'Jan_Feb_Mac_Apr_Mei_Jun_Jul_Ogs_Sep_Okt_Nov_Dis'.split('_'), weekdays : 'Ahad_Isnin_Selasa_Rabu_Khamis_Jumaat_Sabtu'.split('_'), weekdaysShort : 'Ahd_Isn_Sel_Rab_Kha_Jum_Sab'.split('_'), weekdaysMin : 'Ah_Is_Sl_Rb_Km_Jm_Sb'.split('_'), longDateFormat : { LT : 'HH.mm', LTS : 'HH.mm.ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY [pukul] HH.mm', LLLL : 'dddd, D MMMM YYYY [pukul] HH.mm' }, meridiemParse: /pagi|tengahari|petang|malam/, meridiemHour: function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === 'pagi') { return hour; } else if (meridiem === 'tengahari') { return hour >= 11 ? hour : hour + 12; } else if (meridiem === 'petang' || meridiem === 'malam') { return hour + 12; } }, meridiem : function (hours, minutes, isLower) { if (hours < 11) { return 'pagi'; } else if (hours < 15) { return 'tengahari'; } else if (hours < 19) { return 'petang'; } else { return 'malam'; } }, calendar : { sameDay : '[Hari ini pukul] LT', nextDay : '[Esok pukul] LT', nextWeek : 'dddd [pukul] LT', lastDay : '[Kelmarin pukul] LT', lastWeek : 'dddd [lepas pukul] LT', sameElse : 'L' }, relativeTime : { future : 'dalam %s', past : '%s yang lepas', s : 'beberapa saat', m : 'seminit', mm : '%d minit', h : 'sejam', hh : '%d jam', d : 'sehari', dd : '%d hari', M : 'sebulan', MM : '%d bulan', y : 'setahun', yy : '%d tahun' }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return ms; }))); /***/ }, /* 413 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Malay [ms-my] //! note : DEPRECATED, the correct one is [ms] //! author : Weldan Jamili : https://github.com/weldan ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var msMy = moment.defineLocale('ms-my', { months : 'Januari_Februari_Mac_April_Mei_Jun_Julai_Ogos_September_Oktober_November_Disember'.split('_'), monthsShort : 'Jan_Feb_Mac_Apr_Mei_Jun_Jul_Ogs_Sep_Okt_Nov_Dis'.split('_'), weekdays : 'Ahad_Isnin_Selasa_Rabu_Khamis_Jumaat_Sabtu'.split('_'), weekdaysShort : 'Ahd_Isn_Sel_Rab_Kha_Jum_Sab'.split('_'), weekdaysMin : 'Ah_Is_Sl_Rb_Km_Jm_Sb'.split('_'), longDateFormat : { LT : 'HH.mm', LTS : 'HH.mm.ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY [pukul] HH.mm', LLLL : 'dddd, D MMMM YYYY [pukul] HH.mm' }, meridiemParse: /pagi|tengahari|petang|malam/, meridiemHour: function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === 'pagi') { return hour; } else if (meridiem === 'tengahari') { return hour >= 11 ? hour : hour + 12; } else if (meridiem === 'petang' || meridiem === 'malam') { return hour + 12; } }, meridiem : function (hours, minutes, isLower) { if (hours < 11) { return 'pagi'; } else if (hours < 15) { return 'tengahari'; } else if (hours < 19) { return 'petang'; } else { return 'malam'; } }, calendar : { sameDay : '[Hari ini pukul] LT', nextDay : '[Esok pukul] LT', nextWeek : 'dddd [pukul] LT', lastDay : '[Kelmarin pukul] LT', lastWeek : 'dddd [lepas pukul] LT', sameElse : 'L' }, relativeTime : { future : 'dalam %s', past : '%s yang lepas', s : 'beberapa saat', m : 'seminit', mm : '%d minit', h : 'sejam', hh : '%d jam', d : 'sehari', dd : '%d hari', M : 'sebulan', MM : '%d bulan', y : 'setahun', yy : '%d tahun' }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return msMy; }))); /***/ }, /* 414 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Burmese [my] //! author : Squar team, mysquar.com //! author : David Rossellat : https://github.com/gholadr //! author : Tin Aung Lin : https://github.com/thanyawzinmin ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '၁', '2': '၂', '3': '၃', '4': '၄', '5': '၅', '6': '၆', '7': '၇', '8': '၈', '9': '၉', '0': '၀' }; var numberMap = { '၁': '1', '၂': '2', '၃': '3', '၄': '4', '၅': '5', '၆': '6', '၇': '7', '၈': '8', '၉': '9', '၀': '0' }; var my = moment.defineLocale('my', { months: 'ဇန်နဝါရီ_ဖေဖော်ဝါရီ_မတ်_ဧပြီ_မေ_ဇွန်_ဇူလိုင်_သြဂုတ်_စက်တင်ဘာ_အောက်တိုဘာ_နိုဝင်ဘာ_ဒီဇင်ဘာ'.split('_'), monthsShort: 'ဇန်_ဖေ_မတ်_ပြီ_မေ_ဇွန်_လိုင်_သြ_စက်_အောက်_နို_ဒီ'.split('_'), weekdays: 'တနင်္ဂနွေ_တနင်္လာ_အင်္ဂါ_ဗုဒ္ဓဟူး_ကြာသပတေး_သောကြာ_စနေ'.split('_'), weekdaysShort: 'နွေ_လာ_ဂါ_ဟူး_ကြာ_သော_နေ'.split('_'), weekdaysMin: 'နွေ_လာ_ဂါ_ဟူး_ကြာ_သော_နေ'.split('_'), longDateFormat: { LT: 'HH:mm', LTS: 'HH:mm:ss', L: 'DD/MM/YYYY', LL: 'D MMMM YYYY', LLL: 'D MMMM YYYY HH:mm', LLLL: 'dddd D MMMM YYYY HH:mm' }, calendar: { sameDay: '[ယနေ.] LT [မှာ]', nextDay: '[မနက်ဖြန်] LT [မှာ]', nextWeek: 'dddd LT [မှာ]', lastDay: '[မနေ.က] LT [မှာ]', lastWeek: '[ပြီးခဲ့သော] dddd LT [မှာ]', sameElse: 'L' }, relativeTime: { future: 'လာမည့် %s မှာ', past: 'လွန်ခဲ့သော %s က', s: 'စက္ကန်.အနည်းငယ်', m: 'တစ်မိနစ်', mm: '%d မိနစ်', h: 'တစ်နာရီ', hh: '%d နာရီ', d: 'တစ်ရက်', dd: '%d ရက်', M: 'တစ်လ', MM: '%d လ', y: 'တစ်နှစ်', yy: '%d နှစ်' }, preparse: function (string) { return string.replace(/[၁၂၃၄၅၆၇၈၉၀]/g, function (match) { return numberMap[match]; }); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }); }, week: { dow: 1, // Monday is the first day of the week. doy: 4 // The week that contains Jan 1st is the first week of the year. } }); return my; }))); /***/ }, /* 415 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Norwegian Bokmål [nb] //! authors : Espen Hovlandsdal : https://github.com/rexxars //! Sigurd Gartmann : https://github.com/sigurdga ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var nb = moment.defineLocale('nb', { months : 'januar_februar_mars_april_mai_juni_juli_august_september_oktober_november_desember'.split('_'), monthsShort : 'jan._feb._mars_april_mai_juni_juli_aug._sep._okt._nov._des.'.split('_'), monthsParseExact : true, weekdays : 'søndag_mandag_tirsdag_onsdag_torsdag_fredag_lørdag'.split('_'), weekdaysShort : 'sø._ma._ti._on._to._fr._lø.'.split('_'), weekdaysMin : 'sø_ma_ti_on_to_fr_lø'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY [kl.] HH:mm', LLLL : 'dddd D. MMMM YYYY [kl.] HH:mm' }, calendar : { sameDay: '[i dag kl.] LT', nextDay: '[i morgen kl.] LT', nextWeek: 'dddd [kl.] LT', lastDay: '[i går kl.] LT', lastWeek: '[forrige] dddd [kl.] LT', sameElse: 'L' }, relativeTime : { future : 'om %s', past : '%s siden', s : 'noen sekunder', m : 'ett minutt', mm : '%d minutter', h : 'en time', hh : '%d timer', d : 'en dag', dd : '%d dager', M : 'en måned', MM : '%d måneder', y : 'ett år', yy : '%d år' }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return nb; }))); /***/ }, /* 416 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Nepalese [ne] //! author : suvash : https://github.com/suvash ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '१', '2': '२', '3': '३', '4': '४', '5': '५', '6': '६', '7': '७', '8': '८', '9': '९', '0': '०' }; var numberMap = { '१': '1', '२': '2', '३': '3', '४': '4', '५': '5', '६': '6', '७': '7', '८': '8', '९': '9', '०': '0' }; var ne = moment.defineLocale('ne', { months : 'जनवरी_फेब्रुवरी_मार्च_अप्रिल_मई_जुन_जुलाई_अगष्ट_सेप्टेम्बर_अक्टोबर_नोभेम्बर_डिसेम्बर'.split('_'), monthsShort : 'जन._फेब्रु._मार्च_अप्रि._मई_जुन_जुलाई._अग._सेप्ट._अक्टो._नोभे._डिसे.'.split('_'), monthsParseExact : true, weekdays : 'आइतबार_सोमबार_मङ्गलबार_बुधबार_बिहिबार_शुक्रबार_शनिबार'.split('_'), weekdaysShort : 'आइत._सोम._मङ्गल._बुध._बिहि._शुक्र._शनि.'.split('_'), weekdaysMin : 'आ._सो._मं._बु._बि._शु._श.'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'Aको h:mm बजे', LTS : 'Aको h:mm:ss बजे', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY, Aको h:mm बजे', LLLL : 'dddd, D MMMM YYYY, Aको h:mm बजे' }, preparse: function (string) { return string.replace(/[१२३४५६७८९०]/g, function (match) { return numberMap[match]; }); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }); }, meridiemParse: /राति|बिहान|दिउँसो|साँझ/, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === 'राति') { return hour < 4 ? hour : hour + 12; } else if (meridiem === 'बिहान') { return hour; } else if (meridiem === 'दिउँसो') { return hour >= 10 ? hour : hour + 12; } else if (meridiem === 'साँझ') { return hour + 12; } }, meridiem : function (hour, minute, isLower) { if (hour < 3) { return 'राति'; } else if (hour < 12) { return 'बिहान'; } else if (hour < 16) { return 'दिउँसो'; } else if (hour < 20) { return 'साँझ'; } else { return 'राति'; } }, calendar : { sameDay : '[आज] LT', nextDay : '[भोलि] LT', nextWeek : '[आउँदो] dddd[,] LT', lastDay : '[हिजो] LT', lastWeek : '[गएको] dddd[,] LT', sameElse : 'L' }, relativeTime : { future : '%sमा', past : '%s अगाडि', s : 'केही क्षण', m : 'एक मिनेट', mm : '%d मिनेट', h : 'एक घण्टा', hh : '%d घण्टा', d : 'एक दिन', dd : '%d दिन', M : 'एक महिना', MM : '%d महिना', y : 'एक बर्ष', yy : '%d बर्ष' }, week : { dow : 0, // Sunday is the first day of the week. doy : 6 // The week that contains Jan 1st is the first week of the year. } }); return ne; }))); /***/ }, /* 417 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Dutch [nl] //! author : Joris Röling : https://github.com/jorisroling //! author : Jacob Middag : https://github.com/middagj ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var monthsShortWithDots = 'jan._feb._mrt._apr._mei_jun._jul._aug._sep._okt._nov._dec.'.split('_'); var monthsShortWithoutDots = 'jan_feb_mrt_apr_mei_jun_jul_aug_sep_okt_nov_dec'.split('_'); var monthsParse = [/^jan/i, /^feb/i, /^maart|mrt.?$/i, /^apr/i, /^mei$/i, /^jun[i.]?$/i, /^jul[i.]?$/i, /^aug/i, /^sep/i, /^okt/i, /^nov/i, /^dec/i]; var monthsRegex = /^(januari|februari|maart|april|mei|april|ju[nl]i|augustus|september|oktober|november|december|jan\.?|feb\.?|mrt\.?|apr\.?|ju[nl]\.?|aug\.?|sep\.?|okt\.?|nov\.?|dec\.?)/i; var nl = moment.defineLocale('nl', { months : 'januari_februari_maart_april_mei_juni_juli_augustus_september_oktober_november_december'.split('_'), monthsShort : function (m, format) { if (/-MMM-/.test(format)) { return monthsShortWithoutDots[m.month()]; } else { return monthsShortWithDots[m.month()]; } }, monthsRegex: monthsRegex, monthsShortRegex: monthsRegex, monthsStrictRegex: /^(januari|februari|maart|mei|ju[nl]i|april|augustus|september|oktober|november|december)/i, monthsShortStrictRegex: /^(jan\.?|feb\.?|mrt\.?|apr\.?|mei|ju[nl]\.?|aug\.?|sep\.?|okt\.?|nov\.?|dec\.?)/i, monthsParse : monthsParse, longMonthsParse : monthsParse, shortMonthsParse : monthsParse, weekdays : 'zondag_maandag_dinsdag_woensdag_donderdag_vrijdag_zaterdag'.split('_'), weekdaysShort : 'zo._ma._di._wo._do._vr._za.'.split('_'), weekdaysMin : 'Zo_Ma_Di_Wo_Do_Vr_Za'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD-MM-YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, calendar : { sameDay: '[vandaag om] LT', nextDay: '[morgen om] LT', nextWeek: 'dddd [om] LT', lastDay: '[gisteren om] LT', lastWeek: '[afgelopen] dddd [om] LT', sameElse: 'L' }, relativeTime : { future : 'over %s', past : '%s geleden', s : 'een paar seconden', m : 'één minuut', mm : '%d minuten', h : 'één uur', hh : '%d uur', d : 'één dag', dd : '%d dagen', M : 'één maand', MM : '%d maanden', y : 'één jaar', yy : '%d jaar' }, ordinalParse: /\d{1,2}(ste|de)/, ordinal : function (number) { return number + ((number === 1 || number === 8 || number >= 20) ? 'ste' : 'de'); }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return nl; }))); /***/ }, /* 418 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Dutch (Belgium) [nl-be] //! author : Joris Röling : https://github.com/jorisroling //! author : Jacob Middag : https://github.com/middagj ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var monthsShortWithDots = 'jan._feb._mrt._apr._mei_jun._jul._aug._sep._okt._nov._dec.'.split('_'); var monthsShortWithoutDots = 'jan_feb_mrt_apr_mei_jun_jul_aug_sep_okt_nov_dec'.split('_'); var monthsParse = [/^jan/i, /^feb/i, /^maart|mrt.?$/i, /^apr/i, /^mei$/i, /^jun[i.]?$/i, /^jul[i.]?$/i, /^aug/i, /^sep/i, /^okt/i, /^nov/i, /^dec/i]; var monthsRegex = /^(januari|februari|maart|april|mei|april|ju[nl]i|augustus|september|oktober|november|december|jan\.?|feb\.?|mrt\.?|apr\.?|ju[nl]\.?|aug\.?|sep\.?|okt\.?|nov\.?|dec\.?)/i; var nlBe = moment.defineLocale('nl-be', { months : 'januari_februari_maart_april_mei_juni_juli_augustus_september_oktober_november_december'.split('_'), monthsShort : function (m, format) { if (/-MMM-/.test(format)) { return monthsShortWithoutDots[m.month()]; } else { return monthsShortWithDots[m.month()]; } }, monthsRegex: monthsRegex, monthsShortRegex: monthsRegex, monthsStrictRegex: /^(januari|februari|maart|mei|ju[nl]i|april|augustus|september|oktober|november|december)/i, monthsShortStrictRegex: /^(jan\.?|feb\.?|mrt\.?|apr\.?|mei|ju[nl]\.?|aug\.?|sep\.?|okt\.?|nov\.?|dec\.?)/i, monthsParse : monthsParse, longMonthsParse : monthsParse, shortMonthsParse : monthsParse, weekdays : 'zondag_maandag_dinsdag_woensdag_donderdag_vrijdag_zaterdag'.split('_'), weekdaysShort : 'zo._ma._di._wo._do._vr._za.'.split('_'), weekdaysMin : 'Zo_Ma_Di_Wo_Do_Vr_Za'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, calendar : { sameDay: '[vandaag om] LT', nextDay: '[morgen om] LT', nextWeek: 'dddd [om] LT', lastDay: '[gisteren om] LT', lastWeek: '[afgelopen] dddd [om] LT', sameElse: 'L' }, relativeTime : { future : 'over %s', past : '%s geleden', s : 'een paar seconden', m : 'één minuut', mm : '%d minuten', h : 'één uur', hh : '%d uur', d : 'één dag', dd : '%d dagen', M : 'één maand', MM : '%d maanden', y : 'één jaar', yy : '%d jaar' }, ordinalParse: /\d{1,2}(ste|de)/, ordinal : function (number) { return number + ((number === 1 || number === 8 || number >= 20) ? 'ste' : 'de'); }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return nlBe; }))); /***/ }, /* 419 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Nynorsk [nn] //! author : https://github.com/mechuwind ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var nn = moment.defineLocale('nn', { months : 'januar_februar_mars_april_mai_juni_juli_august_september_oktober_november_desember'.split('_'), monthsShort : 'jan_feb_mar_apr_mai_jun_jul_aug_sep_okt_nov_des'.split('_'), weekdays : 'sundag_måndag_tysdag_onsdag_torsdag_fredag_laurdag'.split('_'), weekdaysShort : 'sun_mån_tys_ons_tor_fre_lau'.split('_'), weekdaysMin : 'su_må_ty_on_to_fr_lø'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY [kl.] H:mm', LLLL : 'dddd D. MMMM YYYY [kl.] HH:mm' }, calendar : { sameDay: '[I dag klokka] LT', nextDay: '[I morgon klokka] LT', nextWeek: 'dddd [klokka] LT', lastDay: '[I går klokka] LT', lastWeek: '[Føregåande] dddd [klokka] LT', sameElse: 'L' }, relativeTime : { future : 'om %s', past : '%s sidan', s : 'nokre sekund', m : 'eit minutt', mm : '%d minutt', h : 'ein time', hh : '%d timar', d : 'ein dag', dd : '%d dagar', M : 'ein månad', MM : '%d månader', y : 'eit år', yy : '%d år' }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return nn; }))); /***/ }, /* 420 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Punjabi (India) [pa-in] //! author : Harpreet Singh : https://github.com/harpreetkhalsagtbit ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '੧', '2': '੨', '3': '੩', '4': '੪', '5': '੫', '6': '੬', '7': '੭', '8': '੮', '9': '੯', '0': '੦' }; var numberMap = { '੧': '1', '੨': '2', '੩': '3', '੪': '4', '੫': '5', '੬': '6', '੭': '7', '੮': '8', '੯': '9', '੦': '0' }; var paIn = moment.defineLocale('pa-in', { // There are months name as per Nanakshahi Calender but they are not used as rigidly in modern Punjabi. months : 'ਜਨਵਰੀ_ਫ਼ਰਵਰੀ_ਮਾਰਚ_ਅਪ੍ਰੈਲ_ਮਈ_ਜੂਨ_ਜੁਲਾਈ_ਅਗਸਤ_ਸਤੰਬਰ_ਅਕਤੂਬਰ_ਨਵੰਬਰ_ਦਸੰਬਰ'.split('_'), monthsShort : 'ਜਨਵਰੀ_ਫ਼ਰਵਰੀ_ਮਾਰਚ_ਅਪ੍ਰੈਲ_ਮਈ_ਜੂਨ_ਜੁਲਾਈ_ਅਗਸਤ_ਸਤੰਬਰ_ਅਕਤੂਬਰ_ਨਵੰਬਰ_ਦਸੰਬਰ'.split('_'), weekdays : 'ਐਤਵਾਰ_ਸੋਮਵਾਰ_ਮੰਗਲਵਾਰ_ਬੁਧਵਾਰ_ਵੀਰਵਾਰ_ਸ਼ੁੱਕਰਵਾਰ_ਸ਼ਨੀਚਰਵਾਰ'.split('_'), weekdaysShort : 'ਐਤ_ਸੋਮ_ਮੰਗਲ_ਬੁਧ_ਵੀਰ_ਸ਼ੁਕਰ_ਸ਼ਨੀ'.split('_'), weekdaysMin : 'ਐਤ_ਸੋਮ_ਮੰਗਲ_ਬੁਧ_ਵੀਰ_ਸ਼ੁਕਰ_ਸ਼ਨੀ'.split('_'), longDateFormat : { LT : 'A h:mm ਵਜੇ', LTS : 'A h:mm:ss ਵਜੇ', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY, A h:mm ਵਜੇ', LLLL : 'dddd, D MMMM YYYY, A h:mm ਵਜੇ' }, calendar : { sameDay : '[ਅਜ] LT', nextDay : '[ਕਲ] LT', nextWeek : 'dddd, LT', lastDay : '[ਕਲ] LT', lastWeek : '[ਪਿਛਲੇ] dddd, LT', sameElse : 'L' }, relativeTime : { future : '%s ਵਿੱਚ', past : '%s ਪਿਛਲੇ', s : 'ਕੁਝ ਸਕਿੰਟ', m : 'ਇਕ ਮਿੰਟ', mm : '%d ਮਿੰਟ', h : 'ਇੱਕ ਘੰਟਾ', hh : '%d ਘੰਟੇ', d : 'ਇੱਕ ਦਿਨ', dd : '%d ਦਿਨ', M : 'ਇੱਕ ਮਹੀਨਾ', MM : '%d ਮਹੀਨੇ', y : 'ਇੱਕ ਸਾਲ', yy : '%d ਸਾਲ' }, preparse: function (string) { return string.replace(/[੧੨੩੪੫੬੭੮੯੦]/g, function (match) { return numberMap[match]; }); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }); }, // Punjabi notation for meridiems are quite fuzzy in practice. While there exists // a rigid notion of a 'Pahar' it is not used as rigidly in modern Punjabi. meridiemParse: /ਰਾਤ|ਸਵੇਰ|ਦੁਪਹਿਰ|ਸ਼ਾਮ/, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === 'ਰਾਤ') { return hour < 4 ? hour : hour + 12; } else if (meridiem === 'ਸਵੇਰ') { return hour; } else if (meridiem === 'ਦੁਪਹਿਰ') { return hour >= 10 ? hour : hour + 12; } else if (meridiem === 'ਸ਼ਾਮ') { return hour + 12; } }, meridiem : function (hour, minute, isLower) { if (hour < 4) { return 'ਰਾਤ'; } else if (hour < 10) { return 'ਸਵੇਰ'; } else if (hour < 17) { return 'ਦੁਪਹਿਰ'; } else if (hour < 20) { return 'ਸ਼ਾਮ'; } else { return 'ਰਾਤ'; } }, week : { dow : 0, // Sunday is the first day of the week. doy : 6 // The week that contains Jan 1st is the first week of the year. } }); return paIn; }))); /***/ }, /* 421 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Polish [pl] //! author : Rafal Hirsz : https://github.com/evoL ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var monthsNominative = 'styczeń_luty_marzec_kwiecień_maj_czerwiec_lipiec_sierpień_wrzesień_październik_listopad_grudzień'.split('_'); var monthsSubjective = 'stycznia_lutego_marca_kwietnia_maja_czerwca_lipca_sierpnia_września_października_listopada_grudnia'.split('_'); function plural(n) { return (n % 10 < 5) && (n % 10 > 1) && ((~~(n / 10) % 10) !== 1); } function translate(number, withoutSuffix, key) { var result = number + ' '; switch (key) { case 'm': return withoutSuffix ? 'minuta' : 'minutę'; case 'mm': return result + (plural(number) ? 'minuty' : 'minut'); case 'h': return withoutSuffix ? 'godzina' : 'godzinę'; case 'hh': return result + (plural(number) ? 'godziny' : 'godzin'); case 'MM': return result + (plural(number) ? 'miesiące' : 'miesięcy'); case 'yy': return result + (plural(number) ? 'lata' : 'lat'); } } var pl = moment.defineLocale('pl', { months : function (momentToFormat, format) { if (format === '') { // Hack: if format empty we know this is used to generate // RegExp by moment. Give then back both valid forms of months // in RegExp ready format. return '(' + monthsSubjective[momentToFormat.month()] + '|' + monthsNominative[momentToFormat.month()] + ')'; } else if (/D MMMM/.test(format)) { return monthsSubjective[momentToFormat.month()]; } else { return monthsNominative[momentToFormat.month()]; } }, monthsShort : 'sty_lut_mar_kwi_maj_cze_lip_sie_wrz_paź_lis_gru'.split('_'), weekdays : 'niedziela_poniedziałek_wtorek_środa_czwartek_piątek_sobota'.split('_'), weekdaysShort : 'ndz_pon_wt_śr_czw_pt_sob'.split('_'), weekdaysMin : 'Nd_Pn_Wt_Śr_Cz_Pt_So'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay: '[Dziś o] LT', nextDay: '[Jutro o] LT', nextWeek: '[W] dddd [o] LT', lastDay: '[Wczoraj o] LT', lastWeek: function () { switch (this.day()) { case 0: return '[W zeszłą niedzielę o] LT'; case 3: return '[W zeszłą środę o] LT'; case 6: return '[W zeszłą sobotę o] LT'; default: return '[W zeszły] dddd [o] LT'; } }, sameElse: 'L' }, relativeTime : { future : 'za %s', past : '%s temu', s : 'kilka sekund', m : translate, mm : translate, h : translate, hh : translate, d : '1 dzień', dd : '%d dni', M : 'miesiąc', MM : translate, y : 'rok', yy : translate }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return pl; }))); /***/ }, /* 422 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Portuguese [pt] //! author : Jefferson : https://github.com/jalex79 ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var pt = moment.defineLocale('pt', { months : 'Janeiro_Fevereiro_Março_Abril_Maio_Junho_Julho_Agosto_Setembro_Outubro_Novembro_Dezembro'.split('_'), monthsShort : 'Jan_Fev_Mar_Abr_Mai_Jun_Jul_Ago_Set_Out_Nov_Dez'.split('_'), weekdays : 'Domingo_Segunda-Feira_Terça-Feira_Quarta-Feira_Quinta-Feira_Sexta-Feira_Sábado'.split('_'), weekdaysShort : 'Dom_Seg_Ter_Qua_Qui_Sex_Sáb'.split('_'), weekdaysMin : 'Dom_2ª_3ª_4ª_5ª_6ª_Sáb'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D [de] MMMM [de] YYYY', LLL : 'D [de] MMMM [de] YYYY HH:mm', LLLL : 'dddd, D [de] MMMM [de] YYYY HH:mm' }, calendar : { sameDay: '[Hoje às] LT', nextDay: '[Amanhã às] LT', nextWeek: 'dddd [às] LT', lastDay: '[Ontem às] LT', lastWeek: function () { return (this.day() === 0 || this.day() === 6) ? '[Último] dddd [às] LT' : // Saturday + Sunday '[Última] dddd [às] LT'; // Monday - Friday }, sameElse: 'L' }, relativeTime : { future : 'em %s', past : 'há %s', s : 'segundos', m : 'um minuto', mm : '%d minutos', h : 'uma hora', hh : '%d horas', d : 'um dia', dd : '%d dias', M : 'um mês', MM : '%d meses', y : 'um ano', yy : '%d anos' }, ordinalParse: /\d{1,2}º/, ordinal : '%dº', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return pt; }))); /***/ }, /* 423 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Portuguese (Brazil) [pt-br] //! author : Caio Ribeiro Pereira : https://github.com/caio-ribeiro-pereira ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var ptBr = moment.defineLocale('pt-br', { months : 'Janeiro_Fevereiro_Março_Abril_Maio_Junho_Julho_Agosto_Setembro_Outubro_Novembro_Dezembro'.split('_'), monthsShort : 'Jan_Fev_Mar_Abr_Mai_Jun_Jul_Ago_Set_Out_Nov_Dez'.split('_'), weekdays : 'Domingo_Segunda-feira_Terça-feira_Quarta-feira_Quinta-feira_Sexta-feira_Sábado'.split('_'), weekdaysShort : 'Dom_Seg_Ter_Qua_Qui_Sex_Sáb'.split('_'), weekdaysMin : 'Dom_2ª_3ª_4ª_5ª_6ª_Sáb'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D [de] MMMM [de] YYYY', LLL : 'D [de] MMMM [de] YYYY [às] HH:mm', LLLL : 'dddd, D [de] MMMM [de] YYYY [às] HH:mm' }, calendar : { sameDay: '[Hoje às] LT', nextDay: '[Amanhã às] LT', nextWeek: 'dddd [às] LT', lastDay: '[Ontem às] LT', lastWeek: function () { return (this.day() === 0 || this.day() === 6) ? '[Último] dddd [às] LT' : // Saturday + Sunday '[Última] dddd [às] LT'; // Monday - Friday }, sameElse: 'L' }, relativeTime : { future : 'em %s', past : '%s atrás', s : 'poucos segundos', m : 'um minuto', mm : '%d minutos', h : 'uma hora', hh : '%d horas', d : 'um dia', dd : '%d dias', M : 'um mês', MM : '%d meses', y : 'um ano', yy : '%d anos' }, ordinalParse: /\d{1,2}º/, ordinal : '%dº' }); return ptBr; }))); /***/ }, /* 424 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Romanian [ro] //! author : Vlad Gurdiga : https://github.com/gurdiga //! author : Valentin Agachi : https://github.com/avaly ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function relativeTimeWithPlural(number, withoutSuffix, key) { var format = { 'mm': 'minute', 'hh': 'ore', 'dd': 'zile', 'MM': 'luni', 'yy': 'ani' }, separator = ' '; if (number % 100 >= 20 || (number >= 100 && number % 100 === 0)) { separator = ' de '; } return number + separator + format[key]; } var ro = moment.defineLocale('ro', { months : 'ianuarie_februarie_martie_aprilie_mai_iunie_iulie_august_septembrie_octombrie_noiembrie_decembrie'.split('_'), monthsShort : 'ian._febr._mart._apr._mai_iun._iul._aug._sept._oct._nov._dec.'.split('_'), monthsParseExact: true, weekdays : 'duminică_luni_marți_miercuri_joi_vineri_sâmbătă'.split('_'), weekdaysShort : 'Dum_Lun_Mar_Mie_Joi_Vin_Sâm'.split('_'), weekdaysMin : 'Du_Lu_Ma_Mi_Jo_Vi_Sâ'.split('_'), longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY H:mm', LLLL : 'dddd, D MMMM YYYY H:mm' }, calendar : { sameDay: '[azi la] LT', nextDay: '[mâine la] LT', nextWeek: 'dddd [la] LT', lastDay: '[ieri la] LT', lastWeek: '[fosta] dddd [la] LT', sameElse: 'L' }, relativeTime : { future : 'peste %s', past : '%s în urmă', s : 'câteva secunde', m : 'un minut', mm : relativeTimeWithPlural, h : 'o oră', hh : relativeTimeWithPlural, d : 'o zi', dd : relativeTimeWithPlural, M : 'o lună', MM : relativeTimeWithPlural, y : 'un an', yy : relativeTimeWithPlural }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return ro; }))); /***/ }, /* 425 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Russian [ru] //! author : Viktorminator : https://github.com/Viktorminator //! Author : Menelion Elensúle : https://github.com/Oire //! author : Коренберг Марк : https://github.com/socketpair ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function plural(word, num) { var forms = word.split('_'); return num % 10 === 1 && num % 100 !== 11 ? forms[0] : (num % 10 >= 2 && num % 10 <= 4 && (num % 100 < 10 || num % 100 >= 20) ? forms[1] : forms[2]); } function relativeTimeWithPlural(number, withoutSuffix, key) { var format = { 'mm': withoutSuffix ? 'минута_минуты_минут' : 'минуту_минуты_минут', 'hh': 'час_часа_часов', 'dd': 'день_дня_дней', 'MM': 'месяц_месяца_месяцев', 'yy': 'год_года_лет' }; if (key === 'm') { return withoutSuffix ? 'минута' : 'минуту'; } else { return number + ' ' + plural(format[key], +number); } } var monthsParse = [/^янв/i, /^фев/i, /^мар/i, /^апр/i, /^ма[йя]/i, /^июн/i, /^июл/i, /^авг/i, /^сен/i, /^окт/i, /^ноя/i, /^дек/i]; // http://new.gramota.ru/spravka/rules/139-prop : § 103 // Сокращения месяцев: http://new.gramota.ru/spravka/buro/search-answer?s=242637 // CLDR data: http://www.unicode.org/cldr/charts/28/summary/ru.html#1753 var ru = moment.defineLocale('ru', { months : { format: 'января_февраля_марта_апреля_мая_июня_июля_августа_сентября_октября_ноября_декабря'.split('_'), standalone: 'январь_февраль_март_апрель_май_июнь_июль_август_сентябрь_октябрь_ноябрь_декабрь'.split('_') }, monthsShort : { // по CLDR именно "июл." и "июн.", но какой смысл менять букву на точку ? format: 'янв._февр._мар._апр._мая_июня_июля_авг._сент._окт._нояб._дек.'.split('_'), standalone: 'янв._февр._март_апр._май_июнь_июль_авг._сент._окт._нояб._дек.'.split('_') }, weekdays : { standalone: 'воскресенье_понедельник_вторник_среда_четверг_пятница_суббота'.split('_'), format: 'воскресенье_понедельник_вторник_среду_четверг_пятницу_субботу'.split('_'), isFormat: /\[ ?[Вв] ?(?:прошлую|следующую|эту)? ?\] ?dddd/ }, weekdaysShort : 'вс_пн_вт_ср_чт_пт_сб'.split('_'), weekdaysMin : 'вс_пн_вт_ср_чт_пт_сб'.split('_'), monthsParse : monthsParse, longMonthsParse : monthsParse, shortMonthsParse : monthsParse, // полные названия с падежами, по три буквы, для некоторых, по 4 буквы, сокращения с точкой и без точки monthsRegex: /^(январ[ья]|янв\.?|феврал[ья]|февр?\.?|марта?|мар\.?|апрел[ья]|апр\.?|ма[йя]|июн[ья]|июн\.?|июл[ья]|июл\.?|августа?|авг\.?|сентябр[ья]|сент?\.?|октябр[ья]|окт\.?|ноябр[ья]|нояб?\.?|декабр[ья]|дек\.?)/i, // копия предыдущего monthsShortRegex: /^(январ[ья]|янв\.?|феврал[ья]|февр?\.?|марта?|мар\.?|апрел[ья]|апр\.?|ма[йя]|июн[ья]|июн\.?|июл[ья]|июл\.?|августа?|авг\.?|сентябр[ья]|сент?\.?|октябр[ья]|окт\.?|ноябр[ья]|нояб?\.?|декабр[ья]|дек\.?)/i, // полные названия с падежами monthsStrictRegex: /^(январ[яь]|феврал[яь]|марта?|апрел[яь]|ма[яй]|июн[яь]|июл[яь]|августа?|сентябр[яь]|октябр[яь]|ноябр[яь]|декабр[яь])/i, // Выражение, которое соотвествует только сокращённым формам monthsShortStrictRegex: /^(янв\.|февр?\.|мар[т.]|апр\.|ма[яй]|июн[ья.]|июл[ья.]|авг\.|сент?\.|окт\.|нояб?\.|дек\.)/i, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY г.', LLL : 'D MMMM YYYY г., HH:mm', LLLL : 'dddd, D MMMM YYYY г., HH:mm' }, calendar : { sameDay: '[Сегодня в] LT', nextDay: '[Завтра в] LT', lastDay: '[Вчера в] LT', nextWeek: function (now) { if (now.week() !== this.week()) { switch (this.day()) { case 0: return '[В следующее] dddd [в] LT'; case 1: case 2: case 4: return '[В следующий] dddd [в] LT'; case 3: case 5: case 6: return '[В следующую] dddd [в] LT'; } } else { if (this.day() === 2) { return '[Во] dddd [в] LT'; } else { return '[В] dddd [в] LT'; } } }, lastWeek: function (now) { if (now.week() !== this.week()) { switch (this.day()) { case 0: return '[В прошлое] dddd [в] LT'; case 1: case 2: case 4: return '[В прошлый] dddd [в] LT'; case 3: case 5: case 6: return '[В прошлую] dddd [в] LT'; } } else { if (this.day() === 2) { return '[Во] dddd [в] LT'; } else { return '[В] dddd [в] LT'; } } }, sameElse: 'L' }, relativeTime : { future : 'через %s', past : '%s назад', s : 'несколько секунд', m : relativeTimeWithPlural, mm : relativeTimeWithPlural, h : 'час', hh : relativeTimeWithPlural, d : 'день', dd : relativeTimeWithPlural, M : 'месяц', MM : relativeTimeWithPlural, y : 'год', yy : relativeTimeWithPlural }, meridiemParse: /ночи|утра|дня|вечера/i, isPM : function (input) { return /^(дня|вечера)$/.test(input); }, meridiem : function (hour, minute, isLower) { if (hour < 4) { return 'ночи'; } else if (hour < 12) { return 'утра'; } else if (hour < 17) { return 'дня'; } else { return 'вечера'; } }, ordinalParse: /\d{1,2}-(й|го|я)/, ordinal: function (number, period) { switch (period) { case 'M': case 'd': case 'DDD': return number + '-й'; case 'D': return number + '-го'; case 'w': case 'W': return number + '-я'; default: return number; } }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return ru; }))); /***/ }, /* 426 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Northern Sami [se] //! authors : Bård Rolstad Henriksen : https://github.com/karamell ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var se = moment.defineLocale('se', { months : 'ođđajagemánnu_guovvamánnu_njukčamánnu_cuoŋománnu_miessemánnu_geassemánnu_suoidnemánnu_borgemánnu_čakčamánnu_golggotmánnu_skábmamánnu_juovlamánnu'.split('_'), monthsShort : 'ođđj_guov_njuk_cuo_mies_geas_suoi_borg_čakč_golg_skáb_juov'.split('_'), weekdays : 'sotnabeaivi_vuossárga_maŋŋebárga_gaskavahkku_duorastat_bearjadat_lávvardat'.split('_'), weekdaysShort : 'sotn_vuos_maŋ_gask_duor_bear_láv'.split('_'), weekdaysMin : 's_v_m_g_d_b_L'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'MMMM D. [b.] YYYY', LLL : 'MMMM D. [b.] YYYY [ti.] HH:mm', LLLL : 'dddd, MMMM D. [b.] YYYY [ti.] HH:mm' }, calendar : { sameDay: '[otne ti] LT', nextDay: '[ihttin ti] LT', nextWeek: 'dddd [ti] LT', lastDay: '[ikte ti] LT', lastWeek: '[ovddit] dddd [ti] LT', sameElse: 'L' }, relativeTime : { future : '%s geažes', past : 'maŋit %s', s : 'moadde sekunddat', m : 'okta minuhta', mm : '%d minuhtat', h : 'okta diimmu', hh : '%d diimmut', d : 'okta beaivi', dd : '%d beaivvit', M : 'okta mánnu', MM : '%d mánut', y : 'okta jahki', yy : '%d jagit' }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return se; }))); /***/ }, /* 427 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Sinhalese [si] //! author : Sampath Sitinamaluwa : https://github.com/sampathsris ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; /*jshint -W100*/ var si = moment.defineLocale('si', { months : 'ජනවාරි_පෙබරවාරි_මාර්තු_අප්‍රේල්_මැයි_ජූනි_ජූලි_අගෝස්තු_සැප්තැම්බර්_ඔක්තෝබර්_නොවැම්බර්_දෙසැම්බර්'.split('_'), monthsShort : 'ජන_පෙබ_මාර්_අප්_මැයි_ජූනි_ජූලි_අගෝ_සැප්_ඔක්_නොවැ_දෙසැ'.split('_'), weekdays : 'ඉරිදා_සඳුදා_අඟහරුවාදා_බදාදා_බ්‍රහස්පතින්දා_සිකුරාදා_සෙනසුරාදා'.split('_'), weekdaysShort : 'ඉරි_සඳු_අඟ_බදා_බ්‍රහ_සිකු_සෙන'.split('_'), weekdaysMin : 'ඉ_ස_අ_බ_බ්‍ර_සි_සෙ'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'a h:mm', LTS : 'a h:mm:ss', L : 'YYYY/MM/DD', LL : 'YYYY MMMM D', LLL : 'YYYY MMMM D, a h:mm', LLLL : 'YYYY MMMM D [වැනි] dddd, a h:mm:ss' }, calendar : { sameDay : '[අද] LT[ට]', nextDay : '[හෙට] LT[ට]', nextWeek : 'dddd LT[ට]', lastDay : '[ඊයේ] LT[ට]', lastWeek : '[පසුගිය] dddd LT[ට]', sameElse : 'L' }, relativeTime : { future : '%sකින්', past : '%sකට පෙර', s : 'තත්පර කිහිපය', m : 'මිනිත්තුව', mm : 'මිනිත්තු %d', h : 'පැය', hh : 'පැය %d', d : 'දිනය', dd : 'දින %d', M : 'මාසය', MM : 'මාස %d', y : 'වසර', yy : 'වසර %d' }, ordinalParse: /\d{1,2} වැනි/, ordinal : function (number) { return number + ' වැනි'; }, meridiemParse : /පෙර වරු|පස් වරු|පෙ.ව|ප.ව./, isPM : function (input) { return input === 'ප.ව.' || input === 'පස් වරු'; }, meridiem : function (hours, minutes, isLower) { if (hours > 11) { return isLower ? 'ප.ව.' : 'පස් වරු'; } else { return isLower ? 'පෙ.ව.' : 'පෙර වරු'; } } }); return si; }))); /***/ }, /* 428 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Slovak [sk] //! author : Martin Minka : https://github.com/k2s //! based on work of petrbela : https://github.com/petrbela ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var months = 'január_február_marec_apríl_máj_jún_júl_august_september_október_november_december'.split('_'); var monthsShort = 'jan_feb_mar_apr_máj_jún_júl_aug_sep_okt_nov_dec'.split('_'); function plural(n) { return (n > 1) && (n < 5); } function translate(number, withoutSuffix, key, isFuture) { var result = number + ' '; switch (key) { case 's': // a few seconds / in a few seconds / a few seconds ago return (withoutSuffix || isFuture) ? 'pár sekúnd' : 'pár sekundami'; case 'm': // a minute / in a minute / a minute ago return withoutSuffix ? 'minúta' : (isFuture ? 'minútu' : 'minútou'); case 'mm': // 9 minutes / in 9 minutes / 9 minutes ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'minúty' : 'minút'); } else { return result + 'minútami'; } break; case 'h': // an hour / in an hour / an hour ago return withoutSuffix ? 'hodina' : (isFuture ? 'hodinu' : 'hodinou'); case 'hh': // 9 hours / in 9 hours / 9 hours ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'hodiny' : 'hodín'); } else { return result + 'hodinami'; } break; case 'd': // a day / in a day / a day ago return (withoutSuffix || isFuture) ? 'deň' : 'dňom'; case 'dd': // 9 days / in 9 days / 9 days ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'dni' : 'dní'); } else { return result + 'dňami'; } break; case 'M': // a month / in a month / a month ago return (withoutSuffix || isFuture) ? 'mesiac' : 'mesiacom'; case 'MM': // 9 months / in 9 months / 9 months ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'mesiace' : 'mesiacov'); } else { return result + 'mesiacmi'; } break; case 'y': // a year / in a year / a year ago return (withoutSuffix || isFuture) ? 'rok' : 'rokom'; case 'yy': // 9 years / in 9 years / 9 years ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'roky' : 'rokov'); } else { return result + 'rokmi'; } break; } } var sk = moment.defineLocale('sk', { months : months, monthsShort : monthsShort, weekdays : 'nedeľa_pondelok_utorok_streda_štvrtok_piatok_sobota'.split('_'), weekdaysShort : 'ne_po_ut_st_št_pi_so'.split('_'), weekdaysMin : 'ne_po_ut_st_št_pi_so'.split('_'), longDateFormat : { LT: 'H:mm', LTS : 'H:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY H:mm', LLLL : 'dddd D. MMMM YYYY H:mm' }, calendar : { sameDay: '[dnes o] LT', nextDay: '[zajtra o] LT', nextWeek: function () { switch (this.day()) { case 0: return '[v nedeľu o] LT'; case 1: case 2: return '[v] dddd [o] LT'; case 3: return '[v stredu o] LT'; case 4: return '[vo štvrtok o] LT'; case 5: return '[v piatok o] LT'; case 6: return '[v sobotu o] LT'; } }, lastDay: '[včera o] LT', lastWeek: function () { switch (this.day()) { case 0: return '[minulú nedeľu o] LT'; case 1: case 2: return '[minulý] dddd [o] LT'; case 3: return '[minulú stredu o] LT'; case 4: case 5: return '[minulý] dddd [o] LT'; case 6: return '[minulú sobotu o] LT'; } }, sameElse: 'L' }, relativeTime : { future : 'za %s', past : 'pred %s', s : translate, m : translate, mm : translate, h : translate, hh : translate, d : translate, dd : translate, M : translate, MM : translate, y : translate, yy : translate }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return sk; }))); /***/ }, /* 429 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Slovenian [sl] //! author : Robert Sedovšek : https://github.com/sedovsek ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function processRelativeTime(number, withoutSuffix, key, isFuture) { var result = number + ' '; switch (key) { case 's': return withoutSuffix || isFuture ? 'nekaj sekund' : 'nekaj sekundami'; case 'm': return withoutSuffix ? 'ena minuta' : 'eno minuto'; case 'mm': if (number === 1) { result += withoutSuffix ? 'minuta' : 'minuto'; } else if (number === 2) { result += withoutSuffix || isFuture ? 'minuti' : 'minutama'; } else if (number < 5) { result += withoutSuffix || isFuture ? 'minute' : 'minutami'; } else { result += withoutSuffix || isFuture ? 'minut' : 'minutami'; } return result; case 'h': return withoutSuffix ? 'ena ura' : 'eno uro'; case 'hh': if (number === 1) { result += withoutSuffix ? 'ura' : 'uro'; } else if (number === 2) { result += withoutSuffix || isFuture ? 'uri' : 'urama'; } else if (number < 5) { result += withoutSuffix || isFuture ? 'ure' : 'urami'; } else { result += withoutSuffix || isFuture ? 'ur' : 'urami'; } return result; case 'd': return withoutSuffix || isFuture ? 'en dan' : 'enim dnem'; case 'dd': if (number === 1) { result += withoutSuffix || isFuture ? 'dan' : 'dnem'; } else if (number === 2) { result += withoutSuffix || isFuture ? 'dni' : 'dnevoma'; } else { result += withoutSuffix || isFuture ? 'dni' : 'dnevi'; } return result; case 'M': return withoutSuffix || isFuture ? 'en mesec' : 'enim mesecem'; case 'MM': if (number === 1) { result += withoutSuffix || isFuture ? 'mesec' : 'mesecem'; } else if (number === 2) { result += withoutSuffix || isFuture ? 'meseca' : 'mesecema'; } else if (number < 5) { result += withoutSuffix || isFuture ? 'mesece' : 'meseci'; } else { result += withoutSuffix || isFuture ? 'mesecev' : 'meseci'; } return result; case 'y': return withoutSuffix || isFuture ? 'eno leto' : 'enim letom'; case 'yy': if (number === 1) { result += withoutSuffix || isFuture ? 'leto' : 'letom'; } else if (number === 2) { result += withoutSuffix || isFuture ? 'leti' : 'letoma'; } else if (number < 5) { result += withoutSuffix || isFuture ? 'leta' : 'leti'; } else { result += withoutSuffix || isFuture ? 'let' : 'leti'; } return result; } } var sl = moment.defineLocale('sl', { months : 'januar_februar_marec_april_maj_junij_julij_avgust_september_oktober_november_december'.split('_'), monthsShort : 'jan._feb._mar._apr._maj._jun._jul._avg._sep._okt._nov._dec.'.split('_'), monthsParseExact: true, weekdays : 'nedelja_ponedeljek_torek_sreda_četrtek_petek_sobota'.split('_'), weekdaysShort : 'ned._pon._tor._sre._čet._pet._sob.'.split('_'), weekdaysMin : 'ne_po_to_sr_če_pe_so'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY H:mm', LLLL : 'dddd, D. MMMM YYYY H:mm' }, calendar : { sameDay : '[danes ob] LT', nextDay : '[jutri ob] LT', nextWeek : function () { switch (this.day()) { case 0: return '[v] [nedeljo] [ob] LT'; case 3: return '[v] [sredo] [ob] LT'; case 6: return '[v] [soboto] [ob] LT'; case 1: case 2: case 4: case 5: return '[v] dddd [ob] LT'; } }, lastDay : '[včeraj ob] LT', lastWeek : function () { switch (this.day()) { case 0: return '[prejšnjo] [nedeljo] [ob] LT'; case 3: return '[prejšnjo] [sredo] [ob] LT'; case 6: return '[prejšnjo] [soboto] [ob] LT'; case 1: case 2: case 4: case 5: return '[prejšnji] dddd [ob] LT'; } }, sameElse : 'L' }, relativeTime : { future : 'čez %s', past : 'pred %s', s : processRelativeTime, m : processRelativeTime, mm : processRelativeTime, h : processRelativeTime, hh : processRelativeTime, d : processRelativeTime, dd : processRelativeTime, M : processRelativeTime, MM : processRelativeTime, y : processRelativeTime, yy : processRelativeTime }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return sl; }))); /***/ }, /* 430 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Albanian [sq] //! author : Flakërim Ismani : https://github.com/flakerimi //! author : Menelion Elensúle : https://github.com/Oire //! author : Oerd Cukalla : https://github.com/oerd ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var sq = moment.defineLocale('sq', { months : 'Janar_Shkurt_Mars_Prill_Maj_Qershor_Korrik_Gusht_Shtator_Tetor_Nëntor_Dhjetor'.split('_'), monthsShort : 'Jan_Shk_Mar_Pri_Maj_Qer_Kor_Gus_Sht_Tet_Nën_Dhj'.split('_'), weekdays : 'E Diel_E Hënë_E Martë_E Mërkurë_E Enjte_E Premte_E Shtunë'.split('_'), weekdaysShort : 'Die_Hën_Mar_Mër_Enj_Pre_Sht'.split('_'), weekdaysMin : 'D_H_Ma_Më_E_P_Sh'.split('_'), weekdaysParseExact : true, meridiemParse: /PD|MD/, isPM: function (input) { return input.charAt(0) === 'M'; }, meridiem : function (hours, minutes, isLower) { return hours < 12 ? 'PD' : 'MD'; }, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay : '[Sot në] LT', nextDay : '[Nesër në] LT', nextWeek : 'dddd [në] LT', lastDay : '[Dje në] LT', lastWeek : 'dddd [e kaluar në] LT', sameElse : 'L' }, relativeTime : { future : 'në %s', past : '%s më parë', s : 'disa sekonda', m : 'një minutë', mm : '%d minuta', h : 'një orë', hh : '%d orë', d : 'një ditë', dd : '%d ditë', M : 'një muaj', MM : '%d muaj', y : 'një vit', yy : '%d vite' }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return sq; }))); /***/ }, /* 431 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Serbian [sr] //! author : Milan Janačković<[email protected]> : https://github.com/milan-j ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var translator = { words: { //Different grammatical cases m: ['jedan minut', 'jedne minute'], mm: ['minut', 'minute', 'minuta'], h: ['jedan sat', 'jednog sata'], hh: ['sat', 'sata', 'sati'], dd: ['dan', 'dana', 'dana'], MM: ['mesec', 'meseca', 'meseci'], yy: ['godina', 'godine', 'godina'] }, correctGrammaticalCase: function (number, wordKey) { return number === 1 ? wordKey[0] : (number >= 2 && number <= 4 ? wordKey[1] : wordKey[2]); }, translate: function (number, withoutSuffix, key) { var wordKey = translator.words[key]; if (key.length === 1) { return withoutSuffix ? wordKey[0] : wordKey[1]; } else { return number + ' ' + translator.correctGrammaticalCase(number, wordKey); } } }; var sr = moment.defineLocale('sr', { months: 'januar_februar_mart_april_maj_jun_jul_avgust_septembar_oktobar_novembar_decembar'.split('_'), monthsShort: 'jan._feb._mar._apr._maj_jun_jul_avg._sep._okt._nov._dec.'.split('_'), monthsParseExact: true, weekdays: 'nedelja_ponedeljak_utorak_sreda_četvrtak_petak_subota'.split('_'), weekdaysShort: 'ned._pon._uto._sre._čet._pet._sub.'.split('_'), weekdaysMin: 'ne_po_ut_sr_če_pe_su'.split('_'), weekdaysParseExact : true, longDateFormat: { LT: 'H:mm', LTS : 'H:mm:ss', L: 'DD.MM.YYYY', LL: 'D. MMMM YYYY', LLL: 'D. MMMM YYYY H:mm', LLLL: 'dddd, D. MMMM YYYY H:mm' }, calendar: { sameDay: '[danas u] LT', nextDay: '[sutra u] LT', nextWeek: function () { switch (this.day()) { case 0: return '[u] [nedelju] [u] LT'; case 3: return '[u] [sredu] [u] LT'; case 6: return '[u] [subotu] [u] LT'; case 1: case 2: case 4: case 5: return '[u] dddd [u] LT'; } }, lastDay : '[juče u] LT', lastWeek : function () { var lastWeekDays = [ '[prošle] [nedelje] [u] LT', '[prošlog] [ponedeljka] [u] LT', '[prošlog] [utorka] [u] LT', '[prošle] [srede] [u] LT', '[prošlog] [četvrtka] [u] LT', '[prošlog] [petka] [u] LT', '[prošle] [subote] [u] LT' ]; return lastWeekDays[this.day()]; }, sameElse : 'L' }, relativeTime : { future : 'za %s', past : 'pre %s', s : 'nekoliko sekundi', m : translator.translate, mm : translator.translate, h : translator.translate, hh : translator.translate, d : 'dan', dd : translator.translate, M : 'mesec', MM : translator.translate, y : 'godinu', yy : translator.translate }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return sr; }))); /***/ }, /* 432 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Serbian Cyrillic [sr-cyrl] //! author : Milan Janačković<[email protected]> : https://github.com/milan-j ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var translator = { words: { //Different grammatical cases m: ['један минут', 'једне минуте'], mm: ['минут', 'минуте', 'минута'], h: ['један сат', 'једног сата'], hh: ['сат', 'сата', 'сати'], dd: ['дан', 'дана', 'дана'], MM: ['месец', 'месеца', 'месеци'], yy: ['година', 'године', 'година'] }, correctGrammaticalCase: function (number, wordKey) { return number === 1 ? wordKey[0] : (number >= 2 && number <= 4 ? wordKey[1] : wordKey[2]); }, translate: function (number, withoutSuffix, key) { var wordKey = translator.words[key]; if (key.length === 1) { return withoutSuffix ? wordKey[0] : wordKey[1]; } else { return number + ' ' + translator.correctGrammaticalCase(number, wordKey); } } }; var srCyrl = moment.defineLocale('sr-cyrl', { months: 'јануар_фебруар_март_април_мај_јун_јул_август_септембар_октобар_новембар_децембар'.split('_'), monthsShort: 'јан._феб._мар._апр._мај_јун_јул_авг._сеп._окт._нов._дец.'.split('_'), monthsParseExact: true, weekdays: 'недеља_понедељак_уторак_среда_четвртак_петак_субота'.split('_'), weekdaysShort: 'нед._пон._уто._сре._чет._пет._суб.'.split('_'), weekdaysMin: 'не_по_ут_ср_че_пе_су'.split('_'), weekdaysParseExact : true, longDateFormat: { LT: 'H:mm', LTS : 'H:mm:ss', L: 'DD.MM.YYYY', LL: 'D. MMMM YYYY', LLL: 'D. MMMM YYYY H:mm', LLLL: 'dddd, D. MMMM YYYY H:mm' }, calendar: { sameDay: '[данас у] LT', nextDay: '[сутра у] LT', nextWeek: function () { switch (this.day()) { case 0: return '[у] [недељу] [у] LT'; case 3: return '[у] [среду] [у] LT'; case 6: return '[у] [суботу] [у] LT'; case 1: case 2: case 4: case 5: return '[у] dddd [у] LT'; } }, lastDay : '[јуче у] LT', lastWeek : function () { var lastWeekDays = [ '[прошле] [недеље] [у] LT', '[прошлог] [понедељка] [у] LT', '[прошлог] [уторка] [у] LT', '[прошле] [среде] [у] LT', '[прошлог] [четвртка] [у] LT', '[прошлог] [петка] [у] LT', '[прошле] [суботе] [у] LT' ]; return lastWeekDays[this.day()]; }, sameElse : 'L' }, relativeTime : { future : 'за %s', past : 'пре %s', s : 'неколико секунди', m : translator.translate, mm : translator.translate, h : translator.translate, hh : translator.translate, d : 'дан', dd : translator.translate, M : 'месец', MM : translator.translate, y : 'годину', yy : translator.translate }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return srCyrl; }))); /***/ }, /* 433 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : siSwati [ss] //! author : Nicolai Davies<[email protected]> : https://github.com/nicolaidavies ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var ss = moment.defineLocale('ss', { months : "Bhimbidvwane_Indlovana_Indlov'lenkhulu_Mabasa_Inkhwekhweti_Inhlaba_Kholwane_Ingci_Inyoni_Imphala_Lweti_Ingongoni".split('_'), monthsShort : 'Bhi_Ina_Inu_Mab_Ink_Inh_Kho_Igc_Iny_Imp_Lwe_Igo'.split('_'), weekdays : 'Lisontfo_Umsombuluko_Lesibili_Lesitsatfu_Lesine_Lesihlanu_Umgcibelo'.split('_'), weekdaysShort : 'Lis_Umb_Lsb_Les_Lsi_Lsh_Umg'.split('_'), weekdaysMin : 'Li_Us_Lb_Lt_Ls_Lh_Ug'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'h:mm A', LTS : 'h:mm:ss A', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY h:mm A', LLLL : 'dddd, D MMMM YYYY h:mm A' }, calendar : { sameDay : '[Namuhla nga] LT', nextDay : '[Kusasa nga] LT', nextWeek : 'dddd [nga] LT', lastDay : '[Itolo nga] LT', lastWeek : 'dddd [leliphelile] [nga] LT', sameElse : 'L' }, relativeTime : { future : 'nga %s', past : 'wenteka nga %s', s : 'emizuzwana lomcane', m : 'umzuzu', mm : '%d emizuzu', h : 'lihora', hh : '%d emahora', d : 'lilanga', dd : '%d emalanga', M : 'inyanga', MM : '%d tinyanga', y : 'umnyaka', yy : '%d iminyaka' }, meridiemParse: /ekuseni|emini|entsambama|ebusuku/, meridiem : function (hours, minutes, isLower) { if (hours < 11) { return 'ekuseni'; } else if (hours < 15) { return 'emini'; } else if (hours < 19) { return 'entsambama'; } else { return 'ebusuku'; } }, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === 'ekuseni') { return hour; } else if (meridiem === 'emini') { return hour >= 11 ? hour : hour + 12; } else if (meridiem === 'entsambama' || meridiem === 'ebusuku') { if (hour === 0) { return 0; } return hour + 12; } }, ordinalParse: /\d{1,2}/, ordinal : '%d', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return ss; }))); /***/ }, /* 434 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Swedish [sv] //! author : Jens Alm : https://github.com/ulmus ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var sv = moment.defineLocale('sv', { months : 'januari_februari_mars_april_maj_juni_juli_augusti_september_oktober_november_december'.split('_'), monthsShort : 'jan_feb_mar_apr_maj_jun_jul_aug_sep_okt_nov_dec'.split('_'), weekdays : 'söndag_måndag_tisdag_onsdag_torsdag_fredag_lördag'.split('_'), weekdaysShort : 'sön_mån_tis_ons_tor_fre_lör'.split('_'), weekdaysMin : 'sö_må_ti_on_to_fr_lö'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'YYYY-MM-DD', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY [kl.] HH:mm', LLLL : 'dddd D MMMM YYYY [kl.] HH:mm', lll : 'D MMM YYYY HH:mm', llll : 'ddd D MMM YYYY HH:mm' }, calendar : { sameDay: '[Idag] LT', nextDay: '[Imorgon] LT', lastDay: '[Igår] LT', nextWeek: '[På] dddd LT', lastWeek: '[I] dddd[s] LT', sameElse: 'L' }, relativeTime : { future : 'om %s', past : 'för %s sedan', s : 'några sekunder', m : 'en minut', mm : '%d minuter', h : 'en timme', hh : '%d timmar', d : 'en dag', dd : '%d dagar', M : 'en månad', MM : '%d månader', y : 'ett år', yy : '%d år' }, ordinalParse: /\d{1,2}(e|a)/, ordinal : function (number) { var b = number % 10, output = (~~(number % 100 / 10) === 1) ? 'e' : (b === 1) ? 'a' : (b === 2) ? 'a' : (b === 3) ? 'e' : 'e'; return number + output; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return sv; }))); /***/ }, /* 435 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Swahili [sw] //! author : Fahad Kassim : https://github.com/fadsel ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var sw = moment.defineLocale('sw', { months : 'Januari_Februari_Machi_Aprili_Mei_Juni_Julai_Agosti_Septemba_Oktoba_Novemba_Desemba'.split('_'), monthsShort : 'Jan_Feb_Mac_Apr_Mei_Jun_Jul_Ago_Sep_Okt_Nov_Des'.split('_'), weekdays : 'Jumapili_Jumatatu_Jumanne_Jumatano_Alhamisi_Ijumaa_Jumamosi'.split('_'), weekdaysShort : 'Jpl_Jtat_Jnne_Jtan_Alh_Ijm_Jmos'.split('_'), weekdaysMin : 'J2_J3_J4_J5_Al_Ij_J1'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay : '[leo saa] LT', nextDay : '[kesho saa] LT', nextWeek : '[wiki ijayo] dddd [saat] LT', lastDay : '[jana] LT', lastWeek : '[wiki iliyopita] dddd [saat] LT', sameElse : 'L' }, relativeTime : { future : '%s baadaye', past : 'tokea %s', s : 'hivi punde', m : 'dakika moja', mm : 'dakika %d', h : 'saa limoja', hh : 'masaa %d', d : 'siku moja', dd : 'masiku %d', M : 'mwezi mmoja', MM : 'miezi %d', y : 'mwaka mmoja', yy : 'miaka %d' }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return sw; }))); /***/ }, /* 436 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Tamil [ta] //! author : Arjunkumar Krishnamoorthy : https://github.com/tk120404 ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var symbolMap = { '1': '௧', '2': '௨', '3': '௩', '4': '௪', '5': '௫', '6': '௬', '7': '௭', '8': '௮', '9': '௯', '0': '௦' }; var numberMap = { '௧': '1', '௨': '2', '௩': '3', '௪': '4', '௫': '5', '௬': '6', '௭': '7', '௮': '8', '௯': '9', '௦': '0' }; var ta = moment.defineLocale('ta', { months : 'ஜனவரி_பிப்ரவரி_மார்ச்_ஏப்ரல்_மே_ஜூன்_ஜூலை_ஆகஸ்ட்_செப்டெம்பர்_அக்டோபர்_நவம்பர்_டிசம்பர்'.split('_'), monthsShort : 'ஜனவரி_பிப்ரவரி_மார்ச்_ஏப்ரல்_மே_ஜூன்_ஜூலை_ஆகஸ்ட்_செப்டெம்பர்_அக்டோபர்_நவம்பர்_டிசம்பர்'.split('_'), weekdays : 'ஞாயிற்றுக்கிழமை_திங்கட்கிழமை_செவ்வாய்கிழமை_புதன்கிழமை_வியாழக்கிழமை_வெள்ளிக்கிழமை_சனிக்கிழமை'.split('_'), weekdaysShort : 'ஞாயிறு_திங்கள்_செவ்வாய்_புதன்_வியாழன்_வெள்ளி_சனி'.split('_'), weekdaysMin : 'ஞா_தி_செ_பு_வி_வெ_ச'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY, HH:mm', LLLL : 'dddd, D MMMM YYYY, HH:mm' }, calendar : { sameDay : '[இன்று] LT', nextDay : '[நாளை] LT', nextWeek : 'dddd, LT', lastDay : '[நேற்று] LT', lastWeek : '[கடந்த வாரம்] dddd, LT', sameElse : 'L' }, relativeTime : { future : '%s இல்', past : '%s முன்', s : 'ஒரு சில விநாடிகள்', m : 'ஒரு நிமிடம்', mm : '%d நிமிடங்கள்', h : 'ஒரு மணி நேரம்', hh : '%d மணி நேரம்', d : 'ஒரு நாள்', dd : '%d நாட்கள்', M : 'ஒரு மாதம்', MM : '%d மாதங்கள்', y : 'ஒரு வருடம்', yy : '%d ஆண்டுகள்' }, ordinalParse: /\d{1,2}வது/, ordinal : function (number) { return number + 'வது'; }, preparse: function (string) { return string.replace(/[௧௨௩௪௫௬௭௮௯௦]/g, function (match) { return numberMap[match]; }); }, postformat: function (string) { return string.replace(/\d/g, function (match) { return symbolMap[match]; }); }, // refer http://ta.wikipedia.org/s/1er1 meridiemParse: /யாமம்|வைகறை|காலை|நண்பகல்|எற்பாடு|மாலை/, meridiem : function (hour, minute, isLower) { if (hour < 2) { return ' யாமம்'; } else if (hour < 6) { return ' வைகறை'; // வைகறை } else if (hour < 10) { return ' காலை'; // காலை } else if (hour < 14) { return ' நண்பகல்'; // நண்பகல் } else if (hour < 18) { return ' எற்பாடு'; // எற்பாடு } else if (hour < 22) { return ' மாலை'; // மாலை } else { return ' யாமம்'; } }, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === 'யாமம்') { return hour < 2 ? hour : hour + 12; } else if (meridiem === 'வைகறை' || meridiem === 'காலை') { return hour; } else if (meridiem === 'நண்பகல்') { return hour >= 10 ? hour : hour + 12; } else { return hour + 12; } }, week : { dow : 0, // Sunday is the first day of the week. doy : 6 // The week that contains Jan 1st is the first week of the year. } }); return ta; }))); /***/ }, /* 437 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Telugu [te] //! author : Krishna Chaitanya Thota : https://github.com/kcthota ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var te = moment.defineLocale('te', { months : 'జనవరి_ఫిబ్రవరి_మార్చి_ఏప్రిల్_మే_జూన్_జూలై_ఆగస్టు_సెప్టెంబర్_అక్టోబర్_నవంబర్_డిసెంబర్'.split('_'), monthsShort : 'జన._ఫిబ్ర._మార్చి_ఏప్రి._మే_జూన్_జూలై_ఆగ._సెప్._అక్టో._నవ._డిసె.'.split('_'), monthsParseExact : true, weekdays : 'ఆదివారం_సోమవారం_మంగళవారం_బుధవారం_గురువారం_శుక్రవారం_శనివారం'.split('_'), weekdaysShort : 'ఆది_సోమ_మంగళ_బుధ_గురు_శుక్ర_శని'.split('_'), weekdaysMin : 'ఆ_సో_మం_బు_గు_శు_శ'.split('_'), longDateFormat : { LT : 'A h:mm', LTS : 'A h:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY, A h:mm', LLLL : 'dddd, D MMMM YYYY, A h:mm' }, calendar : { sameDay : '[నేడు] LT', nextDay : '[రేపు] LT', nextWeek : 'dddd, LT', lastDay : '[నిన్న] LT', lastWeek : '[గత] dddd, LT', sameElse : 'L' }, relativeTime : { future : '%s లో', past : '%s క్రితం', s : 'కొన్ని క్షణాలు', m : 'ఒక నిమిషం', mm : '%d నిమిషాలు', h : 'ఒక గంట', hh : '%d గంటలు', d : 'ఒక రోజు', dd : '%d రోజులు', M : 'ఒక నెల', MM : '%d నెలలు', y : 'ఒక సంవత్సరం', yy : '%d సంవత్సరాలు' }, ordinalParse : /\d{1,2}వ/, ordinal : '%dవ', meridiemParse: /రాత్రి|ఉదయం|మధ్యాహ్నం|సాయంత్రం/, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === 'రాత్రి') { return hour < 4 ? hour : hour + 12; } else if (meridiem === 'ఉదయం') { return hour; } else if (meridiem === 'మధ్యాహ్నం') { return hour >= 10 ? hour : hour + 12; } else if (meridiem === 'సాయంత్రం') { return hour + 12; } }, meridiem : function (hour, minute, isLower) { if (hour < 4) { return 'రాత్రి'; } else if (hour < 10) { return 'ఉదయం'; } else if (hour < 17) { return 'మధ్యాహ్నం'; } else if (hour < 20) { return 'సాయంత్రం'; } else { return 'రాత్రి'; } }, week : { dow : 0, // Sunday is the first day of the week. doy : 6 // The week that contains Jan 1st is the first week of the year. } }); return te; }))); /***/ }, /* 438 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Tetun Dili (East Timor) [tet] //! author : Joshua Brooks : https://github.com/joshbrooks //! author : Onorio De J. Afonso : https://github.com/marobo ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var tet = moment.defineLocale('tet', { months : 'Janeiru_Fevereiru_Marsu_Abril_Maiu_Juniu_Juliu_Augustu_Setembru_Outubru_Novembru_Dezembru'.split('_'), monthsShort : 'Jan_Fev_Mar_Abr_Mai_Jun_Jul_Aug_Set_Out_Nov_Dez'.split('_'), weekdays : 'Domingu_Segunda_Tersa_Kuarta_Kinta_Sexta_Sabadu'.split('_'), weekdaysShort : 'Dom_Seg_Ters_Kua_Kint_Sext_Sab'.split('_'), weekdaysMin : 'Do_Seg_Te_Ku_Ki_Sex_Sa'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay: '[Ohin iha] LT', nextDay: '[Aban iha] LT', nextWeek: 'dddd [iha] LT', lastDay: '[Horiseik iha] LT', lastWeek: 'dddd [semana kotuk] [iha] LT', sameElse: 'L' }, relativeTime : { future : 'iha %s', past : '%s liuba', s : 'minutu balun', m : 'minutu ida', mm : 'minutus %d', h : 'horas ida', hh : 'horas %d', d : 'loron ida', dd : 'loron %d', M : 'fulan ida', MM : 'fulan %d', y : 'tinan ida', yy : 'tinan %d' }, ordinalParse: /\d{1,2}(st|nd|rd|th)/, ordinal : function (number) { var b = number % 10, output = (~~(number % 100 / 10) === 1) ? 'th' : (b === 1) ? 'st' : (b === 2) ? 'nd' : (b === 3) ? 'rd' : 'th'; return number + output; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return tet; }))); /***/ }, /* 439 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Thai [th] //! author : Kridsada Thanabulpong : https://github.com/sirn ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var th = moment.defineLocale('th', { months : 'มกราคม_กุมภาพันธ์_มีนาคม_เมษายน_พฤษภาคม_มิถุนายน_กรกฎาคม_สิงหาคม_กันยายน_ตุลาคม_พฤศจิกายน_ธันวาคม'.split('_'), monthsShort : 'ม.ค._ก.พ._มี.ค._เม.ย._พ.ค._มิ.ย._ก.ค._ส.ค._ก.ย._ต.ค._พ.ย._ธ.ค.'.split('_'), monthsParseExact: true, weekdays : 'อาทิตย์_จันทร์_อังคาร_พุธ_พฤหัสบดี_ศุกร์_เสาร์'.split('_'), weekdaysShort : 'อาทิตย์_จันทร์_อังคาร_พุธ_พฤหัส_ศุกร์_เสาร์'.split('_'), // yes, three characters difference weekdaysMin : 'อา._จ._อ._พ._พฤ._ศ._ส.'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'H:mm', LTS : 'H:mm:ss', L : 'YYYY/MM/DD', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY เวลา H:mm', LLLL : 'วันddddที่ D MMMM YYYY เวลา H:mm' }, meridiemParse: /ก่อนเที่ยง|หลังเที่ยง/, isPM: function (input) { return input === 'หลังเที่ยง'; }, meridiem : function (hour, minute, isLower) { if (hour < 12) { return 'ก่อนเที่ยง'; } else { return 'หลังเที่ยง'; } }, calendar : { sameDay : '[วันนี้ เวลา] LT', nextDay : '[พรุ่งนี้ เวลา] LT', nextWeek : 'dddd[หน้า เวลา] LT', lastDay : '[เมื่อวานนี้ เวลา] LT', lastWeek : '[วัน]dddd[ที่แล้ว เวลา] LT', sameElse : 'L' }, relativeTime : { future : 'อีก %s', past : '%sที่แล้ว', s : 'ไม่กี่วินาที', m : '1 นาที', mm : '%d นาที', h : '1 ชั่วโมง', hh : '%d ชั่วโมง', d : '1 วัน', dd : '%d วัน', M : '1 เดือน', MM : '%d เดือน', y : '1 ปี', yy : '%d ปี' } }); return th; }))); /***/ }, /* 440 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Tagalog (Philippines) [tl-ph] //! author : Dan Hagman : https://github.com/hagmandan ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var tlPh = moment.defineLocale('tl-ph', { months : 'Enero_Pebrero_Marso_Abril_Mayo_Hunyo_Hulyo_Agosto_Setyembre_Oktubre_Nobyembre_Disyembre'.split('_'), monthsShort : 'Ene_Peb_Mar_Abr_May_Hun_Hul_Ago_Set_Okt_Nob_Dis'.split('_'), weekdays : 'Linggo_Lunes_Martes_Miyerkules_Huwebes_Biyernes_Sabado'.split('_'), weekdaysShort : 'Lin_Lun_Mar_Miy_Huw_Biy_Sab'.split('_'), weekdaysMin : 'Li_Lu_Ma_Mi_Hu_Bi_Sab'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'MM/D/YYYY', LL : 'MMMM D, YYYY', LLL : 'MMMM D, YYYY HH:mm', LLLL : 'dddd, MMMM DD, YYYY HH:mm' }, calendar : { sameDay: 'LT [ngayong araw]', nextDay: '[Bukas ng] LT', nextWeek: 'LT [sa susunod na] dddd', lastDay: 'LT [kahapon]', lastWeek: 'LT [noong nakaraang] dddd', sameElse: 'L' }, relativeTime : { future : 'sa loob ng %s', past : '%s ang nakalipas', s : 'ilang segundo', m : 'isang minuto', mm : '%d minuto', h : 'isang oras', hh : '%d oras', d : 'isang araw', dd : '%d araw', M : 'isang buwan', MM : '%d buwan', y : 'isang taon', yy : '%d taon' }, ordinalParse: /\d{1,2}/, ordinal : function (number) { return number; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return tlPh; }))); /***/ }, /* 441 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Klingon [tlh] //! author : Dominika Kruk : https://github.com/amaranthrose ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var numbersNouns = 'pagh_wa’_cha’_wej_loS_vagh_jav_Soch_chorgh_Hut'.split('_'); function translateFuture(output) { var time = output; time = (output.indexOf('jaj') !== -1) ? time.slice(0, -3) + 'leS' : (output.indexOf('jar') !== -1) ? time.slice(0, -3) + 'waQ' : (output.indexOf('DIS') !== -1) ? time.slice(0, -3) + 'nem' : time + ' pIq'; return time; } function translatePast(output) { var time = output; time = (output.indexOf('jaj') !== -1) ? time.slice(0, -3) + 'Hu’' : (output.indexOf('jar') !== -1) ? time.slice(0, -3) + 'wen' : (output.indexOf('DIS') !== -1) ? time.slice(0, -3) + 'ben' : time + ' ret'; return time; } function translate(number, withoutSuffix, string, isFuture) { var numberNoun = numberAsNoun(number); switch (string) { case 'mm': return numberNoun + ' tup'; case 'hh': return numberNoun + ' rep'; case 'dd': return numberNoun + ' jaj'; case 'MM': return numberNoun + ' jar'; case 'yy': return numberNoun + ' DIS'; } } function numberAsNoun(number) { var hundred = Math.floor((number % 1000) / 100), ten = Math.floor((number % 100) / 10), one = number % 10, word = ''; if (hundred > 0) { word += numbersNouns[hundred] + 'vatlh'; } if (ten > 0) { word += ((word !== '') ? ' ' : '') + numbersNouns[ten] + 'maH'; } if (one > 0) { word += ((word !== '') ? ' ' : '') + numbersNouns[one]; } return (word === '') ? 'pagh' : word; } var tlh = moment.defineLocale('tlh', { months : 'tera’ jar wa’_tera’ jar cha’_tera’ jar wej_tera’ jar loS_tera’ jar vagh_tera’ jar jav_tera’ jar Soch_tera’ jar chorgh_tera’ jar Hut_tera’ jar wa’maH_tera’ jar wa’maH wa’_tera’ jar wa’maH cha’'.split('_'), monthsShort : 'jar wa’_jar cha’_jar wej_jar loS_jar vagh_jar jav_jar Soch_jar chorgh_jar Hut_jar wa’maH_jar wa’maH wa’_jar wa’maH cha’'.split('_'), monthsParseExact : true, weekdays : 'lojmItjaj_DaSjaj_povjaj_ghItlhjaj_loghjaj_buqjaj_ghInjaj'.split('_'), weekdaysShort : 'lojmItjaj_DaSjaj_povjaj_ghItlhjaj_loghjaj_buqjaj_ghInjaj'.split('_'), weekdaysMin : 'lojmItjaj_DaSjaj_povjaj_ghItlhjaj_loghjaj_buqjaj_ghInjaj'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay: '[DaHjaj] LT', nextDay: '[wa’leS] LT', nextWeek: 'LLL', lastDay: '[wa’Hu’] LT', lastWeek: 'LLL', sameElse: 'L' }, relativeTime : { future : translateFuture, past : translatePast, s : 'puS lup', m : 'wa’ tup', mm : translate, h : 'wa’ rep', hh : translate, d : 'wa’ jaj', dd : translate, M : 'wa’ jar', MM : translate, y : 'wa’ DIS', yy : translate }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return tlh; }))); /***/ }, /* 442 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Turkish [tr] //! authors : Erhan Gundogan : https://github.com/erhangundogan, //! Burak Yiğit Kaya: https://github.com/BYK ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var suffixes = { 1: '\'inci', 5: '\'inci', 8: '\'inci', 70: '\'inci', 80: '\'inci', 2: '\'nci', 7: '\'nci', 20: '\'nci', 50: '\'nci', 3: '\'üncü', 4: '\'üncü', 100: '\'üncü', 6: '\'ncı', 9: '\'uncu', 10: '\'uncu', 30: '\'uncu', 60: '\'ıncı', 90: '\'ıncı' }; var tr = moment.defineLocale('tr', { months : 'Ocak_Şubat_Mart_Nisan_Mayıs_Haziran_Temmuz_Ağustos_Eylül_Ekim_Kasım_Aralık'.split('_'), monthsShort : 'Oca_Şub_Mar_Nis_May_Haz_Tem_Ağu_Eyl_Eki_Kas_Ara'.split('_'), weekdays : 'Pazar_Pazartesi_Salı_Çarşamba_Perşembe_Cuma_Cumartesi'.split('_'), weekdaysShort : 'Paz_Pts_Sal_Çar_Per_Cum_Cts'.split('_'), weekdaysMin : 'Pz_Pt_Sa_Ça_Pe_Cu_Ct'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay : '[bugün saat] LT', nextDay : '[yarın saat] LT', nextWeek : '[haftaya] dddd [saat] LT', lastDay : '[dün] LT', lastWeek : '[geçen hafta] dddd [saat] LT', sameElse : 'L' }, relativeTime : { future : '%s sonra', past : '%s önce', s : 'birkaç saniye', m : 'bir dakika', mm : '%d dakika', h : 'bir saat', hh : '%d saat', d : 'bir gün', dd : '%d gün', M : 'bir ay', MM : '%d ay', y : 'bir yıl', yy : '%d yıl' }, ordinalParse: /\d{1,2}'(inci|nci|üncü|ncı|uncu|ıncı)/, ordinal : function (number) { if (number === 0) { // special case for zero return number + '\'ıncı'; } var a = number % 10, b = number % 100 - a, c = number >= 100 ? 100 : null; return number + (suffixes[a] || suffixes[b] || suffixes[c]); }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return tr; }))); /***/ }, /* 443 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Talossan [tzl] //! author : Robin van der Vliet : https://github.com/robin0van0der0v //! author : Iustì Canun ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; // After the year there should be a slash and the amount of years since December 26, 1979 in Roman numerals. // This is currently too difficult (maybe even impossible) to add. var tzl = moment.defineLocale('tzl', { months : 'Januar_Fevraglh_Març_Avrïu_Mai_Gün_Julia_Guscht_Setemvar_Listopäts_Noemvar_Zecemvar'.split('_'), monthsShort : 'Jan_Fev_Mar_Avr_Mai_Gün_Jul_Gus_Set_Lis_Noe_Zec'.split('_'), weekdays : 'Súladi_Lúneçi_Maitzi_Márcuri_Xhúadi_Viénerçi_Sáturi'.split('_'), weekdaysShort : 'Súl_Lún_Mai_Már_Xhú_Vié_Sát'.split('_'), weekdaysMin : 'Sú_Lú_Ma_Má_Xh_Vi_Sá'.split('_'), longDateFormat : { LT : 'HH.mm', LTS : 'HH.mm.ss', L : 'DD.MM.YYYY', LL : 'D. MMMM [dallas] YYYY', LLL : 'D. MMMM [dallas] YYYY HH.mm', LLLL : 'dddd, [li] D. MMMM [dallas] YYYY HH.mm' }, meridiemParse: /d\'o|d\'a/i, isPM : function (input) { return 'd\'o' === input.toLowerCase(); }, meridiem : function (hours, minutes, isLower) { if (hours > 11) { return isLower ? 'd\'o' : 'D\'O'; } else { return isLower ? 'd\'a' : 'D\'A'; } }, calendar : { sameDay : '[oxhi à] LT', nextDay : '[demà à] LT', nextWeek : 'dddd [à] LT', lastDay : '[ieiri à] LT', lastWeek : '[sür el] dddd [lasteu à] LT', sameElse : 'L' }, relativeTime : { future : 'osprei %s', past : 'ja%s', s : processRelativeTime, m : processRelativeTime, mm : processRelativeTime, h : processRelativeTime, hh : processRelativeTime, d : processRelativeTime, dd : processRelativeTime, M : processRelativeTime, MM : processRelativeTime, y : processRelativeTime, yy : processRelativeTime }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); function processRelativeTime(number, withoutSuffix, key, isFuture) { var format = { 's': ['viensas secunds', '\'iensas secunds'], 'm': ['\'n míut', '\'iens míut'], 'mm': [number + ' míuts', '' + number + ' míuts'], 'h': ['\'n þora', '\'iensa þora'], 'hh': [number + ' þoras', '' + number + ' þoras'], 'd': ['\'n ziua', '\'iensa ziua'], 'dd': [number + ' ziuas', '' + number + ' ziuas'], 'M': ['\'n mes', '\'iens mes'], 'MM': [number + ' mesen', '' + number + ' mesen'], 'y': ['\'n ar', '\'iens ar'], 'yy': [number + ' ars', '' + number + ' ars'] }; return isFuture ? format[key][0] : (withoutSuffix ? format[key][0] : format[key][1]); } return tzl; }))); /***/ }, /* 444 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Central Atlas Tamazight [tzm] //! author : Abdel Said : https://github.com/abdelsaid ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var tzm = moment.defineLocale('tzm', { months : 'ⵉⵏⵏⴰⵢⵔ_ⴱⵕⴰⵢⵕ_ⵎⴰⵕⵚ_ⵉⴱⵔⵉⵔ_ⵎⴰⵢⵢⵓ_ⵢⵓⵏⵢⵓ_ⵢⵓⵍⵢⵓⵣ_ⵖⵓⵛⵜ_ⵛⵓⵜⴰⵏⴱⵉⵔ_ⴽⵟⵓⴱⵕ_ⵏⵓⵡⴰⵏⴱⵉⵔ_ⴷⵓⵊⵏⴱⵉⵔ'.split('_'), monthsShort : 'ⵉⵏⵏⴰⵢⵔ_ⴱⵕⴰⵢⵕ_ⵎⴰⵕⵚ_ⵉⴱⵔⵉⵔ_ⵎⴰⵢⵢⵓ_ⵢⵓⵏⵢⵓ_ⵢⵓⵍⵢⵓⵣ_ⵖⵓⵛⵜ_ⵛⵓⵜⴰⵏⴱⵉⵔ_ⴽⵟⵓⴱⵕ_ⵏⵓⵡⴰⵏⴱⵉⵔ_ⴷⵓⵊⵏⴱⵉⵔ'.split('_'), weekdays : 'ⴰⵙⴰⵎⴰⵙ_ⴰⵢⵏⴰⵙ_ⴰⵙⵉⵏⴰⵙ_ⴰⴽⵔⴰⵙ_ⴰⴽⵡⴰⵙ_ⴰⵙⵉⵎⵡⴰⵙ_ⴰⵙⵉⴹⵢⴰⵙ'.split('_'), weekdaysShort : 'ⴰⵙⴰⵎⴰⵙ_ⴰⵢⵏⴰⵙ_ⴰⵙⵉⵏⴰⵙ_ⴰⴽⵔⴰⵙ_ⴰⴽⵡⴰⵙ_ⴰⵙⵉⵎⵡⴰⵙ_ⴰⵙⵉⴹⵢⴰⵙ'.split('_'), weekdaysMin : 'ⴰⵙⴰⵎⴰⵙ_ⴰⵢⵏⴰⵙ_ⴰⵙⵉⵏⴰⵙ_ⴰⴽⵔⴰⵙ_ⴰⴽⵡⴰⵙ_ⴰⵙⵉⵎⵡⴰⵙ_ⴰⵙⵉⴹⵢⴰⵙ'.split('_'), longDateFormat : { LT : 'HH:mm', LTS: 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, calendar : { sameDay: '[ⴰⵙⴷⵅ ⴴ] LT', nextDay: '[ⴰⵙⴽⴰ ⴴ] LT', nextWeek: 'dddd [ⴴ] LT', lastDay: '[ⴰⵚⴰⵏⵜ ⴴ] LT', lastWeek: 'dddd [ⴴ] LT', sameElse: 'L' }, relativeTime : { future : 'ⴷⴰⴷⵅ ⵙ ⵢⴰⵏ %s', past : 'ⵢⴰⵏ %s', s : 'ⵉⵎⵉⴽ', m : 'ⵎⵉⵏⵓⴺ', mm : '%d ⵎⵉⵏⵓⴺ', h : 'ⵙⴰⵄⴰ', hh : '%d ⵜⴰⵙⵙⴰⵄⵉⵏ', d : 'ⴰⵙⵙ', dd : '%d oⵙⵙⴰⵏ', M : 'ⴰⵢoⵓⵔ', MM : '%d ⵉⵢⵢⵉⵔⵏ', y : 'ⴰⵙⴳⴰⵙ', yy : '%d ⵉⵙⴳⴰⵙⵏ' }, week : { dow : 6, // Saturday is the first day of the week. doy : 12 // The week that contains Jan 1st is the first week of the year. } }); return tzm; }))); /***/ }, /* 445 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Central Atlas Tamazight Latin [tzm-latn] //! author : Abdel Said : https://github.com/abdelsaid ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var tzmLatn = moment.defineLocale('tzm-latn', { months : 'innayr_brˤayrˤ_marˤsˤ_ibrir_mayyw_ywnyw_ywlywz_ɣwšt_šwtanbir_ktˤwbrˤ_nwwanbir_dwjnbir'.split('_'), monthsShort : 'innayr_brˤayrˤ_marˤsˤ_ibrir_mayyw_ywnyw_ywlywz_ɣwšt_šwtanbir_ktˤwbrˤ_nwwanbir_dwjnbir'.split('_'), weekdays : 'asamas_aynas_asinas_akras_akwas_asimwas_asiḍyas'.split('_'), weekdaysShort : 'asamas_aynas_asinas_akras_akwas_asimwas_asiḍyas'.split('_'), weekdaysMin : 'asamas_aynas_asinas_akras_akwas_asimwas_asiḍyas'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd D MMMM YYYY HH:mm' }, calendar : { sameDay: '[asdkh g] LT', nextDay: '[aska g] LT', nextWeek: 'dddd [g] LT', lastDay: '[assant g] LT', lastWeek: 'dddd [g] LT', sameElse: 'L' }, relativeTime : { future : 'dadkh s yan %s', past : 'yan %s', s : 'imik', m : 'minuḍ', mm : '%d minuḍ', h : 'saɛa', hh : '%d tassaɛin', d : 'ass', dd : '%d ossan', M : 'ayowr', MM : '%d iyyirn', y : 'asgas', yy : '%d isgasn' }, week : { dow : 6, // Saturday is the first day of the week. doy : 12 // The week that contains Jan 1st is the first week of the year. } }); return tzmLatn; }))); /***/ }, /* 446 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Ukrainian [uk] //! author : zemlanin : https://github.com/zemlanin //! Author : Menelion Elensúle : https://github.com/Oire ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; function plural(word, num) { var forms = word.split('_'); return num % 10 === 1 && num % 100 !== 11 ? forms[0] : (num % 10 >= 2 && num % 10 <= 4 && (num % 100 < 10 || num % 100 >= 20) ? forms[1] : forms[2]); } function relativeTimeWithPlural(number, withoutSuffix, key) { var format = { 'mm': withoutSuffix ? 'хвилина_хвилини_хвилин' : 'хвилину_хвилини_хвилин', 'hh': withoutSuffix ? 'година_години_годин' : 'годину_години_годин', 'dd': 'день_дні_днів', 'MM': 'місяць_місяці_місяців', 'yy': 'рік_роки_років' }; if (key === 'm') { return withoutSuffix ? 'хвилина' : 'хвилину'; } else if (key === 'h') { return withoutSuffix ? 'година' : 'годину'; } else { return number + ' ' + plural(format[key], +number); } } function weekdaysCaseReplace(m, format) { var weekdays = { 'nominative': 'неділя_понеділок_вівторок_середа_четвер_п’ятниця_субота'.split('_'), 'accusative': 'неділю_понеділок_вівторок_середу_четвер_п’ятницю_суботу'.split('_'), 'genitive': 'неділі_понеділка_вівторка_середи_четверга_п’ятниці_суботи'.split('_') }, nounCase = (/(\[[ВвУу]\]) ?dddd/).test(format) ? 'accusative' : ((/\[?(?:минулої|наступної)? ?\] ?dddd/).test(format) ? 'genitive' : 'nominative'); return weekdays[nounCase][m.day()]; } function processHoursFunction(str) { return function () { return str + 'о' + (this.hours() === 11 ? 'б' : '') + '] LT'; }; } var uk = moment.defineLocale('uk', { months : { 'format': 'січня_лютого_березня_квітня_травня_червня_липня_серпня_вересня_жовтня_листопада_грудня'.split('_'), 'standalone': 'січень_лютий_березень_квітень_травень_червень_липень_серпень_вересень_жовтень_листопад_грудень'.split('_') }, monthsShort : 'січ_лют_бер_квіт_трав_черв_лип_серп_вер_жовт_лист_груд'.split('_'), weekdays : weekdaysCaseReplace, weekdaysShort : 'нд_пн_вт_ср_чт_пт_сб'.split('_'), weekdaysMin : 'нд_пн_вт_ср_чт_пт_сб'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD.MM.YYYY', LL : 'D MMMM YYYY р.', LLL : 'D MMMM YYYY р., HH:mm', LLLL : 'dddd, D MMMM YYYY р., HH:mm' }, calendar : { sameDay: processHoursFunction('[Сьогодні '), nextDay: processHoursFunction('[Завтра '), lastDay: processHoursFunction('[Вчора '), nextWeek: processHoursFunction('[У] dddd ['), lastWeek: function () { switch (this.day()) { case 0: case 3: case 5: case 6: return processHoursFunction('[Минулої] dddd [').call(this); case 1: case 2: case 4: return processHoursFunction('[Минулого] dddd [').call(this); } }, sameElse: 'L' }, relativeTime : { future : 'за %s', past : '%s тому', s : 'декілька секунд', m : relativeTimeWithPlural, mm : relativeTimeWithPlural, h : 'годину', hh : relativeTimeWithPlural, d : 'день', dd : relativeTimeWithPlural, M : 'місяць', MM : relativeTimeWithPlural, y : 'рік', yy : relativeTimeWithPlural }, // M. E.: those two are virtually unused but a user might want to implement them for his/her website for some reason meridiemParse: /ночі|ранку|дня|вечора/, isPM: function (input) { return /^(дня|вечора)$/.test(input); }, meridiem : function (hour, minute, isLower) { if (hour < 4) { return 'ночі'; } else if (hour < 12) { return 'ранку'; } else if (hour < 17) { return 'дня'; } else { return 'вечора'; } }, ordinalParse: /\d{1,2}-(й|го)/, ordinal: function (number, period) { switch (period) { case 'M': case 'd': case 'DDD': case 'w': case 'W': return number + '-й'; case 'D': return number + '-го'; default: return number; } }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 1st is the first week of the year. } }); return uk; }))); /***/ }, /* 447 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Uzbek [uz] //! author : Sardor Muminov : https://github.com/muminoff ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var uz = moment.defineLocale('uz', { months : 'январ_феврал_март_апрел_май_июн_июл_август_сентябр_октябр_ноябр_декабр'.split('_'), monthsShort : 'янв_фев_мар_апр_май_июн_июл_авг_сен_окт_ноя_дек'.split('_'), weekdays : 'Якшанба_Душанба_Сешанба_Чоршанба_Пайшанба_Жума_Шанба'.split('_'), weekdaysShort : 'Якш_Душ_Сеш_Чор_Пай_Жум_Шан'.split('_'), weekdaysMin : 'Як_Ду_Се_Чо_Па_Жу_Ша'.split('_'), longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'D MMMM YYYY, dddd HH:mm' }, calendar : { sameDay : '[Бугун соат] LT [да]', nextDay : '[Эртага] LT [да]', nextWeek : 'dddd [куни соат] LT [да]', lastDay : '[Кеча соат] LT [да]', lastWeek : '[Утган] dddd [куни соат] LT [да]', sameElse : 'L' }, relativeTime : { future : 'Якин %s ичида', past : 'Бир неча %s олдин', s : 'фурсат', m : 'бир дакика', mm : '%d дакика', h : 'бир соат', hh : '%d соат', d : 'бир кун', dd : '%d кун', M : 'бир ой', MM : '%d ой', y : 'бир йил', yy : '%d йил' }, week : { dow : 1, // Monday is the first day of the week. doy : 7 // The week that contains Jan 4th is the first week of the year. } }); return uz; }))); /***/ }, /* 448 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Vietnamese [vi] //! author : Bang Nguyen : https://github.com/bangnk ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var vi = moment.defineLocale('vi', { months : 'tháng 1_tháng 2_tháng 3_tháng 4_tháng 5_tháng 6_tháng 7_tháng 8_tháng 9_tháng 10_tháng 11_tháng 12'.split('_'), monthsShort : 'Th01_Th02_Th03_Th04_Th05_Th06_Th07_Th08_Th09_Th10_Th11_Th12'.split('_'), monthsParseExact : true, weekdays : 'chủ nhật_thứ hai_thứ ba_thứ tư_thứ năm_thứ sáu_thứ bảy'.split('_'), weekdaysShort : 'CN_T2_T3_T4_T5_T6_T7'.split('_'), weekdaysMin : 'CN_T2_T3_T4_T5_T6_T7'.split('_'), weekdaysParseExact : true, meridiemParse: /sa|ch/i, isPM : function (input) { return /^ch$/i.test(input); }, meridiem : function (hours, minutes, isLower) { if (hours < 12) { return isLower ? 'sa' : 'SA'; } else { return isLower ? 'ch' : 'CH'; } }, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM [năm] YYYY', LLL : 'D MMMM [năm] YYYY HH:mm', LLLL : 'dddd, D MMMM [năm] YYYY HH:mm', l : 'DD/M/YYYY', ll : 'D MMM YYYY', lll : 'D MMM YYYY HH:mm', llll : 'ddd, D MMM YYYY HH:mm' }, calendar : { sameDay: '[Hôm nay lúc] LT', nextDay: '[Ngày mai lúc] LT', nextWeek: 'dddd [tuần tới lúc] LT', lastDay: '[Hôm qua lúc] LT', lastWeek: 'dddd [tuần rồi lúc] LT', sameElse: 'L' }, relativeTime : { future : '%s tới', past : '%s trước', s : 'vài giây', m : 'một phút', mm : '%d phút', h : 'một giờ', hh : '%d giờ', d : 'một ngày', dd : '%d ngày', M : 'một tháng', MM : '%d tháng', y : 'một năm', yy : '%d năm' }, ordinalParse: /\d{1,2}/, ordinal : function (number) { return number; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return vi; }))); /***/ }, /* 449 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Pseudo [x-pseudo] //! author : Andrew Hood : https://github.com/andrewhood125 ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var xPseudo = moment.defineLocale('x-pseudo', { months : 'J~áñúá~rý_F~ébrú~árý_~Márc~h_Áp~ríl_~Máý_~Júñé~_Júl~ý_Áú~gúst~_Sép~témb~ér_Ó~ctób~ér_Ñ~óvém~bér_~Décé~mbér'.split('_'), monthsShort : 'J~áñ_~Féb_~Már_~Ápr_~Máý_~Júñ_~Júl_~Áúg_~Sép_~Óct_~Ñóv_~Déc'.split('_'), monthsParseExact : true, weekdays : 'S~úñdá~ý_Mó~ñdáý~_Túé~sdáý~_Wéd~ñésd~áý_T~húrs~dáý_~Fríd~áý_S~átúr~dáý'.split('_'), weekdaysShort : 'S~úñ_~Móñ_~Túé_~Wéd_~Thú_~Frí_~Sát'.split('_'), weekdaysMin : 'S~ú_Mó~_Tú_~Wé_T~h_Fr~_Sá'.split('_'), weekdaysParseExact : true, longDateFormat : { LT : 'HH:mm', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay : '[T~ódá~ý át] LT', nextDay : '[T~ómó~rró~w át] LT', nextWeek : 'dddd [át] LT', lastDay : '[Ý~ést~érdá~ý át] LT', lastWeek : '[L~ást] dddd [át] LT', sameElse : 'L' }, relativeTime : { future : 'í~ñ %s', past : '%s á~gó', s : 'á ~féw ~sécó~ñds', m : 'á ~míñ~úté', mm : '%d m~íñú~tés', h : 'á~ñ hó~úr', hh : '%d h~óúrs', d : 'á ~dáý', dd : '%d d~áýs', M : 'á ~móñ~th', MM : '%d m~óñt~hs', y : 'á ~ýéár', yy : '%d ý~éárs' }, ordinalParse: /\d{1,2}(th|st|nd|rd)/, ordinal : function (number) { var b = number % 10, output = (~~(number % 100 / 10) === 1) ? 'th' : (b === 1) ? 'st' : (b === 2) ? 'nd' : (b === 3) ? 'rd' : 'th'; return number + output; }, week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return xPseudo; }))); /***/ }, /* 450 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Yoruba Nigeria [yo] //! author : Atolagbe Abisoye : https://github.com/andela-batolagbe ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var yo = moment.defineLocale('yo', { months : 'Sẹ́rẹ́_Èrèlè_Ẹrẹ̀nà_Ìgbé_Èbibi_Òkùdu_Agẹmo_Ògún_Owewe_Ọ̀wàrà_Bélú_Ọ̀pẹ̀̀'.split('_'), monthsShort : 'Sẹ́r_Èrl_Ẹrn_Ìgb_Èbi_Òkù_Agẹ_Ògú_Owe_Ọ̀wà_Bél_Ọ̀pẹ̀̀'.split('_'), weekdays : 'Àìkú_Ajé_Ìsẹ́gun_Ọjọ́rú_Ọjọ́bọ_Ẹtì_Àbámẹ́ta'.split('_'), weekdaysShort : 'Àìk_Ajé_Ìsẹ́_Ọjr_Ọjb_Ẹtì_Àbá'.split('_'), weekdaysMin : 'Àì_Aj_Ìs_Ọr_Ọb_Ẹt_Àb'.split('_'), longDateFormat : { LT : 'h:mm A', LTS : 'h:mm:ss A', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY h:mm A', LLLL : 'dddd, D MMMM YYYY h:mm A' }, calendar : { sameDay : '[Ònì ni] LT', nextDay : '[Ọ̀la ni] LT', nextWeek : 'dddd [Ọsẹ̀ tón\'bọ] [ni] LT', lastDay : '[Àna ni] LT', lastWeek : 'dddd [Ọsẹ̀ tólọ́] [ni] LT', sameElse : 'L' }, relativeTime : { future : 'ní %s', past : '%s kọjá', s : 'ìsẹjú aayá die', m : 'ìsẹjú kan', mm : 'ìsẹjú %d', h : 'wákati kan', hh : 'wákati %d', d : 'ọjọ́ kan', dd : 'ọjọ́ %d', M : 'osù kan', MM : 'osù %d', y : 'ọdún kan', yy : 'ọdún %d' }, ordinalParse : /ọjọ́\s\d{1,2}/, ordinal : 'ọjọ́ %d', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return yo; }))); /***/ }, /* 451 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Chinese (China) [zh-cn] //! author : suupic : https://github.com/suupic //! author : Zeno Zeng : https://github.com/zenozeng ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var zhCn = moment.defineLocale('zh-cn', { months : '一月_二月_三月_四月_五月_六月_七月_八月_九月_十月_十一月_十二月'.split('_'), monthsShort : '1月_2月_3月_4月_5月_6月_7月_8月_9月_10月_11月_12月'.split('_'), weekdays : '星期日_星期一_星期二_星期三_星期四_星期五_星期六'.split('_'), weekdaysShort : '周日_周一_周二_周三_周四_周五_周六'.split('_'), weekdaysMin : '日_一_二_三_四_五_六'.split('_'), longDateFormat : { LT : 'Ah点mm分', LTS : 'Ah点m分s秒', L : 'YYYY-MM-DD', LL : 'YYYY年MMMD日', LLL : 'YYYY年MMMD日Ah点mm分', LLLL : 'YYYY年MMMD日ddddAh点mm分', l : 'YYYY-MM-DD', ll : 'YYYY年MMMD日', lll : 'YYYY年MMMD日Ah点mm分', llll : 'YYYY年MMMD日ddddAh点mm分' }, meridiemParse: /凌晨|早上|上午|中午|下午|晚上/, meridiemHour: function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === '凌晨' || meridiem === '早上' || meridiem === '上午') { return hour; } else if (meridiem === '下午' || meridiem === '晚上') { return hour + 12; } else { // '中午' return hour >= 11 ? hour : hour + 12; } }, meridiem : function (hour, minute, isLower) { var hm = hour * 100 + minute; if (hm < 600) { return '凌晨'; } else if (hm < 900) { return '早上'; } else if (hm < 1130) { return '上午'; } else if (hm < 1230) { return '中午'; } else if (hm < 1800) { return '下午'; } else { return '晚上'; } }, calendar : { sameDay : function () { return this.minutes() === 0 ? '[今天]Ah[点整]' : '[今天]LT'; }, nextDay : function () { return this.minutes() === 0 ? '[明天]Ah[点整]' : '[明天]LT'; }, lastDay : function () { return this.minutes() === 0 ? '[昨天]Ah[点整]' : '[昨天]LT'; }, nextWeek : function () { var startOfWeek, prefix; startOfWeek = moment().startOf('week'); prefix = this.diff(startOfWeek, 'days') >= 7 ? '[下]' : '[本]'; return this.minutes() === 0 ? prefix + 'dddAh点整' : prefix + 'dddAh点mm'; }, lastWeek : function () { var startOfWeek, prefix; startOfWeek = moment().startOf('week'); prefix = this.unix() < startOfWeek.unix() ? '[上]' : '[本]'; return this.minutes() === 0 ? prefix + 'dddAh点整' : prefix + 'dddAh点mm'; }, sameElse : 'LL' }, ordinalParse: /\d{1,2}(日|月|周)/, ordinal : function (number, period) { switch (period) { case 'd': case 'D': case 'DDD': return number + '日'; case 'M': return number + '月'; case 'w': case 'W': return number + '周'; default: return number; } }, relativeTime : { future : '%s内', past : '%s前', s : '几秒', m : '1 分钟', mm : '%d 分钟', h : '1 小时', hh : '%d 小时', d : '1 天', dd : '%d 天', M : '1 个月', MM : '%d 个月', y : '1 年', yy : '%d 年' }, week : { // GB/T 7408-1994《数据元和交换格式·信息交换·日期和时间表示法》与ISO 8601:1988等效 dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return zhCn; }))); /***/ }, /* 452 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Chinese (Hong Kong) [zh-hk] //! author : Ben : https://github.com/ben-lin //! author : Chris Lam : https://github.com/hehachris //! author : Konstantin : https://github.com/skfd ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var zhHk = moment.defineLocale('zh-hk', { months : '一月_二月_三月_四月_五月_六月_七月_八月_九月_十月_十一月_十二月'.split('_'), monthsShort : '1月_2月_3月_4月_5月_6月_7月_8月_9月_10月_11月_12月'.split('_'), weekdays : '星期日_星期一_星期二_星期三_星期四_星期五_星期六'.split('_'), weekdaysShort : '週日_週一_週二_週三_週四_週五_週六'.split('_'), weekdaysMin : '日_一_二_三_四_五_六'.split('_'), longDateFormat : { LT : 'Ah點mm分', LTS : 'Ah點m分s秒', L : 'YYYY年MMMD日', LL : 'YYYY年MMMD日', LLL : 'YYYY年MMMD日Ah點mm分', LLLL : 'YYYY年MMMD日ddddAh點mm分', l : 'YYYY年MMMD日', ll : 'YYYY年MMMD日', lll : 'YYYY年MMMD日Ah點mm分', llll : 'YYYY年MMMD日ddddAh點mm分' }, meridiemParse: /凌晨|早上|上午|中午|下午|晚上/, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === '凌晨' || meridiem === '早上' || meridiem === '上午') { return hour; } else if (meridiem === '中午') { return hour >= 11 ? hour : hour + 12; } else if (meridiem === '下午' || meridiem === '晚上') { return hour + 12; } }, meridiem : function (hour, minute, isLower) { var hm = hour * 100 + minute; if (hm < 600) { return '凌晨'; } else if (hm < 900) { return '早上'; } else if (hm < 1130) { return '上午'; } else if (hm < 1230) { return '中午'; } else if (hm < 1800) { return '下午'; } else { return '晚上'; } }, calendar : { sameDay : '[今天]LT', nextDay : '[明天]LT', nextWeek : '[下]ddddLT', lastDay : '[昨天]LT', lastWeek : '[上]ddddLT', sameElse : 'L' }, ordinalParse: /\d{1,2}(日|月|週)/, ordinal : function (number, period) { switch (period) { case 'd' : case 'D' : case 'DDD' : return number + '日'; case 'M' : return number + '月'; case 'w' : case 'W' : return number + '週'; default : return number; } }, relativeTime : { future : '%s內', past : '%s前', s : '幾秒', m : '1 分鐘', mm : '%d 分鐘', h : '1 小時', hh : '%d 小時', d : '1 天', dd : '%d 天', M : '1 個月', MM : '%d 個月', y : '1 年', yy : '%d 年' } }); return zhHk; }))); /***/ }, /* 453 */ /***/ function(module, exports, __webpack_require__) { //! moment.js locale configuration //! locale : Chinese (Taiwan) [zh-tw] //! author : Ben : https://github.com/ben-lin //! author : Chris Lam : https://github.com/hehachris ;(function (global, factory) { true ? factory(__webpack_require__(343)) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; var zhTw = moment.defineLocale('zh-tw', { months : '一月_二月_三月_四月_五月_六月_七月_八月_九月_十月_十一月_十二月'.split('_'), monthsShort : '1月_2月_3月_4月_5月_6月_7月_8月_9月_10月_11月_12月'.split('_'), weekdays : '星期日_星期一_星期二_星期三_星期四_星期五_星期六'.split('_'), weekdaysShort : '週日_週一_週二_週三_週四_週五_週六'.split('_'), weekdaysMin : '日_一_二_三_四_五_六'.split('_'), longDateFormat : { LT : 'Ah點mm分', LTS : 'Ah點m分s秒', L : 'YYYY年MMMD日', LL : 'YYYY年MMMD日', LLL : 'YYYY年MMMD日Ah點mm分', LLLL : 'YYYY年MMMD日ddddAh點mm分', l : 'YYYY年MMMD日', ll : 'YYYY年MMMD日', lll : 'YYYY年MMMD日Ah點mm分', llll : 'YYYY年MMMD日ddddAh點mm分' }, meridiemParse: /凌晨|早上|上午|中午|下午|晚上/, meridiemHour : function (hour, meridiem) { if (hour === 12) { hour = 0; } if (meridiem === '凌晨' || meridiem === '早上' || meridiem === '上午') { return hour; } else if (meridiem === '中午') { return hour >= 11 ? hour : hour + 12; } else if (meridiem === '下午' || meridiem === '晚上') { return hour + 12; } }, meridiem : function (hour, minute, isLower) { var hm = hour * 100 + minute; if (hm < 600) { return '凌晨'; } else if (hm < 900) { return '早上'; } else if (hm < 1130) { return '上午'; } else if (hm < 1230) { return '中午'; } else if (hm < 1800) { return '下午'; } else { return '晚上'; } }, calendar : { sameDay : '[今天]LT', nextDay : '[明天]LT', nextWeek : '[下]ddddLT', lastDay : '[昨天]LT', lastWeek : '[上]ddddLT', sameElse : 'L' }, ordinalParse: /\d{1,2}(日|月|週)/, ordinal : function (number, period) { switch (period) { case 'd' : case 'D' : case 'DDD' : return number + '日'; case 'M' : return number + '月'; case 'w' : case 'W' : return number + '週'; default : return number; } }, relativeTime : { future : '%s內', past : '%s前', s : '幾秒', m : '1 分鐘', mm : '%d 分鐘', h : '1 小時', hh : '%d 小時', d : '1 天', dd : '%d 天', M : '1 個月', MM : '%d 個月', y : '1 年', yy : '%d 年' } }); return zhTw; }))); /***/ }, /* 454 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.isSameDay = isSameDay; exports.isSameUtcOffset = isSameUtcOffset; exports.isDayInRange = isDayInRange; exports.isDayDisabled = isDayDisabled; exports.allDaysDisabledBefore = allDaysDisabledBefore; exports.allDaysDisabledAfter = allDaysDisabledAfter; exports.getEffectiveMinDate = getEffectiveMinDate; exports.getEffectiveMaxDate = getEffectiveMaxDate; var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function isSameDay(moment1, moment2) { if (moment1 && moment2) { return moment1.isSame(moment2, 'day'); } else { return !moment1 && !moment2; } } function isSameUtcOffset(moment1, moment2) { if (moment1 && moment2) { return moment1.utcOffset() === moment2.utcOffset(); } else { return !moment1 && !moment2; } } function isDayInRange(day, startDate, endDate) { var before = startDate.clone().startOf('day').subtract(1, 'seconds'); var after = endDate.clone().startOf('day').add(1, 'seconds'); return day.clone().startOf('day').isBetween(before, after); } function isDayDisabled(day) { var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, minDate = _ref.minDate, maxDate = _ref.maxDate, excludeDates = _ref.excludeDates, includeDates = _ref.includeDates, filterDate = _ref.filterDate; return minDate && day.isBefore(minDate, 'day') || maxDate && day.isAfter(maxDate, 'day') || excludeDates && excludeDates.some(function (excludeDate) { return isSameDay(day, excludeDate); }) || includeDates && !includeDates.some(function (includeDate) { return isSameDay(day, includeDate); }) || filterDate && !filterDate(day.clone()) || false; } function allDaysDisabledBefore(day, unit) { var _ref2 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, minDate = _ref2.minDate, includeDates = _ref2.includeDates; var dateBefore = day.clone().subtract(1, unit); return minDate && dateBefore.isBefore(minDate, unit) || includeDates && includeDates.every(function (includeDate) { return dateBefore.isBefore(includeDate, unit); }) || false; } function allDaysDisabledAfter(day, unit) { var _ref3 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, maxDate = _ref3.maxDate, includeDates = _ref3.includeDates; var dateAfter = day.clone().add(1, unit); return maxDate && dateAfter.isAfter(maxDate, unit) || includeDates && includeDates.every(function (includeDate) { return dateAfter.isAfter(includeDate, unit); }) || false; } function getEffectiveMinDate(_ref4) { var minDate = _ref4.minDate, includeDates = _ref4.includeDates; if (includeDates && minDate) { return _moment2.default.min(includeDates.filter(function (includeDate) { return minDate.isSameOrBefore(includeDate, 'day'); })); } else if (includeDates) { return _moment2.default.min(includeDates); } else { return minDate; } } function getEffectiveMaxDate(_ref5) { var maxDate = _ref5.maxDate, includeDates = _ref5.includeDates; if (includeDates && maxDate) { return _moment2.default.max(includeDates.filter(function (includeDate) { return maxDate.isSameOrAfter(includeDate, 'day'); })); } else if (includeDates) { return _moment2.default.max(includeDates); } else { return maxDate; } } /***/ }, /* 455 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); var _find = __webpack_require__(456); var _find2 = _interopRequireDefault(_find); var _year_dropdown = __webpack_require__(577); var _year_dropdown2 = _interopRequireDefault(_year_dropdown); var _month_dropdown = __webpack_require__(581); var _month_dropdown2 = _interopRequireDefault(_month_dropdown); var _month = __webpack_require__(587); var _month2 = _interopRequireDefault(_month); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _classnames = __webpack_require__(579); var _classnames2 = _interopRequireDefault(_classnames); var _date_utils = __webpack_require__(454); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var DROPDOWN_FOCUS_CLASSNAMES = ['react-datepicker__year-select', 'react-datepicker__month-select']; var isDropdownSelect = function isDropdownSelect() { var element = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; var classNames = (element.className || '').split(/\s+/); return !!(0, _find2.default)(DROPDOWN_FOCUS_CLASSNAMES, function (testClassname) { return classNames.indexOf(testClassname) >= 0; }); }; var Calendar = _react2.default.createClass({ displayName: 'Calendar', propTypes: { className: _react2.default.PropTypes.string, children: _react2.default.PropTypes.node, dateFormat: _react2.default.PropTypes.oneOfType([_react2.default.PropTypes.string, _react2.default.PropTypes.array]).isRequired, dropdownMode: _react2.default.PropTypes.oneOf(['scroll', 'select']).isRequired, endDate: _react2.default.PropTypes.object, excludeDates: _react2.default.PropTypes.array, filterDate: _react2.default.PropTypes.func, fixedHeight: _react2.default.PropTypes.bool, highlightDates: _react2.default.PropTypes.array, includeDates: _react2.default.PropTypes.array, locale: _react2.default.PropTypes.string, maxDate: _react2.default.PropTypes.object, minDate: _react2.default.PropTypes.object, monthsShown: _react2.default.PropTypes.number, onClickOutside: _react2.default.PropTypes.func.isRequired, onMonthChange: _react2.default.PropTypes.func, forceShowMonthNavigation: _react2.default.PropTypes.bool, onDropdownFocus: _react2.default.PropTypes.func, onSelect: _react2.default.PropTypes.func.isRequired, openToDate: _react2.default.PropTypes.object, peekNextMonth: _react2.default.PropTypes.bool, scrollableYearDropdown: _react2.default.PropTypes.bool, preSelection: _react2.default.PropTypes.object, selected: _react2.default.PropTypes.object, selectsEnd: _react2.default.PropTypes.bool, selectsStart: _react2.default.PropTypes.bool, showMonthDropdown: _react2.default.PropTypes.bool, showWeekNumbers: _react2.default.PropTypes.bool, showYearDropdown: _react2.default.PropTypes.bool, startDate: _react2.default.PropTypes.object, todayButton: _react2.default.PropTypes.string, utcOffset: _react2.default.PropTypes.number }, defaultProps: { onDropdownFocus: function onDropdownFocus() {} }, getDefaultProps: function getDefaultProps() { return { utcOffset: _moment2.default.utc().utcOffset(), monthsShown: 1, forceShowMonthNavigation: false }; }, getInitialState: function getInitialState() { return { date: this.localizeMoment(this.getDateInView()), selectingDate: null }; }, componentWillReceiveProps: function componentWillReceiveProps(nextProps) { if (nextProps.preSelection && !(0, _date_utils.isSameDay)(nextProps.preSelection, this.props.preSelection)) { this.setState({ date: this.localizeMoment(nextProps.preSelection) }); } else if (nextProps.openToDate && !(0, _date_utils.isSameDay)(nextProps.openToDate, this.props.openToDate)) { this.setState({ date: this.localizeMoment(nextProps.openToDate) }); } }, handleClickOutside: function handleClickOutside(event) { this.props.onClickOutside(event); }, handleDropdownFocus: function handleDropdownFocus(event) { if (isDropdownSelect(event.target)) { this.props.onDropdownFocus(); } }, getDateInView: function getDateInView() { var _props = this.props, preSelection = _props.preSelection, selected = _props.selected, openToDate = _props.openToDate, utcOffset = _props.utcOffset; var minDate = (0, _date_utils.getEffectiveMinDate)(this.props); var maxDate = (0, _date_utils.getEffectiveMaxDate)(this.props); var current = _moment2.default.utc().utcOffset(utcOffset); var initialDate = preSelection || selected; if (initialDate) { return initialDate; } else if (minDate && maxDate && openToDate && openToDate.isBetween(minDate, maxDate)) { return openToDate; } else if (minDate && openToDate && openToDate.isAfter(minDate)) { return openToDate; } else if (minDate && minDate.isAfter(current)) { return minDate; } else if (maxDate && openToDate && openToDate.isBefore(maxDate)) { return openToDate; } else if (maxDate && maxDate.isBefore(current)) { return maxDate; } else if (openToDate) { return openToDate; } else { return current; } }, localizeMoment: function localizeMoment(date) { return date.clone().locale(this.props.locale || _moment2.default.locale()); }, increaseMonth: function increaseMonth() { var _this = this; this.setState({ date: this.state.date.clone().add(1, 'month') }, function () { return _this.handleMonthChange(_this.state.date); }); }, decreaseMonth: function decreaseMonth() { var _this2 = this; this.setState({ date: this.state.date.clone().subtract(1, 'month') }, function () { return _this2.handleMonthChange(_this2.state.date); }); }, handleDayClick: function handleDayClick(day, event) { this.props.onSelect(day, event); }, handleDayMouseEnter: function handleDayMouseEnter(day) { this.setState({ selectingDate: day }); }, handleMonthMouseLeave: function handleMonthMouseLeave() { this.setState({ selectingDate: null }); }, handleMonthChange: function handleMonthChange(date) { if (this.props.onMonthChange) { this.props.onMonthChange(date); } }, changeYear: function changeYear(year) { this.setState({ date: this.state.date.clone().set('year', year) }); }, changeMonth: function changeMonth(month) { var _this3 = this; this.setState({ date: this.state.date.clone().set('month', month) }, function () { return _this3.handleMonthChange(_this3.state.date); }); }, header: function header() { var date = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this.state.date; var startOfWeek = date.clone().startOf('week'); var dayNames = []; if (this.props.showWeekNumbers) { dayNames.push(_react2.default.createElement( 'div', { key: 'W', className: 'react-datepicker__day-name' }, '#' )); } return dayNames.concat([0, 1, 2, 3, 4, 5, 6].map(function (offset) { var day = startOfWeek.clone().add(offset, 'days'); return _react2.default.createElement( 'div', { key: offset, className: 'react-datepicker__day-name' }, day.localeData().weekdaysMin(day) ); })); }, renderPreviousMonthButton: function renderPreviousMonthButton() { if (!this.props.forceShowMonthNavigation && (0, _date_utils.allDaysDisabledBefore)(this.state.date, 'month', this.props)) { return; } return _react2.default.createElement('a', { className: 'react-datepicker__navigation react-datepicker__navigation--previous', onClick: this.decreaseMonth }); }, renderNextMonthButton: function renderNextMonthButton() { if (!this.props.forceShowMonthNavigation && (0, _date_utils.allDaysDisabledAfter)(this.state.date, 'month', this.props)) { return; } return _react2.default.createElement('a', { className: 'react-datepicker__navigation react-datepicker__navigation--next', onClick: this.increaseMonth }); }, renderCurrentMonth: function renderCurrentMonth() { var date = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this.state.date; var classes = ['react-datepicker__current-month']; if (this.props.showYearDropdown) { classes.push('react-datepicker__current-month--hasYearDropdown'); } if (this.props.showMonthDropdown) { classes.push('react-datepicker__current-month--hasMonthDropdown'); } return _react2.default.createElement( 'div', { className: classes.join(' ') }, date.format(this.props.dateFormat) ); }, renderYearDropdown: function renderYearDropdown() { var overrideHide = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; if (!this.props.showYearDropdown || overrideHide) { return; } return _react2.default.createElement(_year_dropdown2.default, { dropdownMode: this.props.dropdownMode, onChange: this.changeYear, minDate: this.props.minDate, maxDate: this.props.maxDate, year: this.state.date.year(), scrollableYearDropdown: this.props.scrollableYearDropdown }); }, renderMonthDropdown: function renderMonthDropdown() { var overrideHide = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; if (!this.props.showMonthDropdown) { return; } return _react2.default.createElement(_month_dropdown2.default, { dropdownMode: this.props.dropdownMode, locale: this.props.locale, onChange: this.changeMonth, month: this.state.date.month() }); }, renderTodayButton: function renderTodayButton() { var _this4 = this; if (!this.props.todayButton) { return; } return _react2.default.createElement( 'div', { className: 'react-datepicker__today-button', onClick: function onClick(event) { return _this4.props.onSelect(_moment2.default.utc().utcOffset(_this4.props.utcOffset).startOf('date'), event); } }, this.props.todayButton ); }, renderMonths: function renderMonths() { var monthList = []; for (var i = 0; i < this.props.monthsShown; ++i) { var monthDate = this.state.date.clone().add(i, 'M'); var monthKey = 'month-' + i; monthList.push(_react2.default.createElement( 'div', { key: monthKey, className: 'react-datepicker__month-container' }, _react2.default.createElement( 'div', { className: 'react-datepicker__header' }, this.renderCurrentMonth(monthDate), _react2.default.createElement( 'div', { className: 'react-datepicker__header__dropdown react-datepicker__header__dropdown--' + this.props.dropdownMode, onFocus: this.handleDropdownFocus }, this.renderMonthDropdown(i !== 0), this.renderYearDropdown(i !== 0) ), _react2.default.createElement( 'div', { className: 'react-datepicker__day-names' }, this.header(monthDate) ) ), _react2.default.createElement(_month2.default, { day: monthDate, onDayClick: this.handleDayClick, onDayMouseEnter: this.handleDayMouseEnter, onMouseLeave: this.handleMonthMouseLeave, minDate: this.props.minDate, maxDate: this.props.maxDate, excludeDates: this.props.excludeDates, highlightDates: this.props.highlightDates, selectingDate: this.state.selectingDate, includeDates: this.props.includeDates, fixedHeight: this.props.fixedHeight, filterDate: this.props.filterDate, preSelection: this.props.preSelection, selected: this.props.selected, selectsStart: this.props.selectsStart, selectsEnd: this.props.selectsEnd, showWeekNumbers: this.props.showWeekNumbers, startDate: this.props.startDate, endDate: this.props.endDate, peekNextMonth: this.props.peekNextMonth, utcOffset: this.props.utcOffset }) )); } return monthList; }, render: function render() { return _react2.default.createElement( 'div', { className: (0, _classnames2.default)('react-datepicker', this.props.className) }, _react2.default.createElement('div', { className: 'react-datepicker__triangle' }), this.renderPreviousMonthButton(), this.renderNextMonthButton(), this.renderMonths(), this.renderTodayButton(), this.props.children ); } }); module.exports = Calendar; /***/ }, /* 456 */ /***/ function(module, exports, __webpack_require__) { var createFind = __webpack_require__(457), findIndex = __webpack_require__(572); /** * Iterates over elements of `collection`, returning the first element * `predicate` returns truthy for. The predicate is invoked with three * arguments: (value, index|key, collection). * * @static * @memberOf _ * @since 0.1.0 * @category Collection * @param {Array|Object} collection The collection to inspect. * @param {Function} [predicate=_.identity] The function invoked per iteration. * @param {number} [fromIndex=0] The index to search from. * @returns {*} Returns the matched element, else `undefined`. * @example * * var users = [ * { 'user': 'barney', 'age': 36, 'active': true }, * { 'user': 'fred', 'age': 40, 'active': false }, * { 'user': 'pebbles', 'age': 1, 'active': true } * ]; * * _.find(users, function(o) { return o.age < 40; }); * // => object for 'barney' * * // The `_.matches` iteratee shorthand. * _.find(users, { 'age': 1, 'active': true }); * // => object for 'pebbles' * * // The `_.matchesProperty` iteratee shorthand. * _.find(users, ['active', false]); * // => object for 'fred' * * // The `_.property` iteratee shorthand. * _.find(users, 'active'); * // => object for 'barney' */ var find = createFind(findIndex); module.exports = find; /***/ }, /* 457 */ /***/ function(module, exports, __webpack_require__) { var baseIteratee = __webpack_require__(458), isArrayLike = __webpack_require__(543), keys = __webpack_require__(525); /** * Creates a `_.find` or `_.findLast` function. * * @private * @param {Function} findIndexFunc The function to find the collection index. * @returns {Function} Returns the new find function. */ function createFind(findIndexFunc) { return function(collection, predicate, fromIndex) { var iterable = Object(collection); if (!isArrayLike(collection)) { var iteratee = baseIteratee(predicate, 3); collection = keys(collection); predicate = function(key) { return iteratee(iterable[key], key, iterable); }; } var index = findIndexFunc(collection, predicate, fromIndex); return index > -1 ? iterable[iteratee ? collection[index] : index] : undefined; }; } module.exports = createFind; /***/ }, /* 458 */ /***/ function(module, exports, __webpack_require__) { var baseMatches = __webpack_require__(459), baseMatchesProperty = __webpack_require__(552), identity = __webpack_require__(568), isArray = __webpack_require__(521), property = __webpack_require__(569); /** * The base implementation of `_.iteratee`. * * @private * @param {*} [value=_.identity] The value to convert to an iteratee. * @returns {Function} Returns the iteratee. */ function baseIteratee(value) { // Don't store the `typeof` result in a variable to avoid a JIT bug in Safari 9. // See https://bugs.webkit.org/show_bug.cgi?id=156034 for more details. if (typeof value == 'function') { return value; } if (value == null) { return identity; } if (typeof value == 'object') { return isArray(value) ? baseMatchesProperty(value[0], value[1]) : baseMatches(value); } return property(value); } module.exports = baseIteratee; /***/ }, /* 459 */ /***/ function(module, exports, __webpack_require__) { var baseIsMatch = __webpack_require__(460), getMatchData = __webpack_require__(549), matchesStrictComparable = __webpack_require__(551); /** * The base implementation of `_.matches` which doesn't clone `source`. * * @private * @param {Object} source The object of property values to match. * @returns {Function} Returns the new spec function. */ function baseMatches(source) { var matchData = getMatchData(source); if (matchData.length == 1 && matchData[0][2]) { return matchesStrictComparable(matchData[0][0], matchData[0][1]); } return function(object) { return object === source || baseIsMatch(object, source, matchData); }; } module.exports = baseMatches; /***/ }, /* 460 */ /***/ function(module, exports, __webpack_require__) { var Stack = __webpack_require__(461), baseIsEqual = __webpack_require__(505); /** Used to compose bitmasks for value comparisons. */ var COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2; /** * The base implementation of `_.isMatch` without support for iteratee shorthands. * * @private * @param {Object} object The object to inspect. * @param {Object} source The object of property values to match. * @param {Array} matchData The property names, values, and compare flags to match. * @param {Function} [customizer] The function to customize comparisons. * @returns {boolean} Returns `true` if `object` is a match, else `false`. */ function baseIsMatch(object, source, matchData, customizer) { var index = matchData.length, length = index, noCustomizer = !customizer; if (object == null) { return !length; } object = Object(object); while (index--) { var data = matchData[index]; if ((noCustomizer && data[2]) ? data[1] !== object[data[0]] : !(data[0] in object) ) { return false; } } while (++index < length) { data = matchData[index]; var key = data[0], objValue = object[key], srcValue = data[1]; if (noCustomizer && data[2]) { if (objValue === undefined && !(key in object)) { return false; } } else { var stack = new Stack; if (customizer) { var result = customizer(objValue, srcValue, key, object, source, stack); } if (!(result === undefined ? baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG, customizer, stack) : result )) { return false; } } } return true; } module.exports = baseIsMatch; /***/ }, /* 461 */ /***/ function(module, exports, __webpack_require__) { var ListCache = __webpack_require__(462), stackClear = __webpack_require__(470), stackDelete = __webpack_require__(471), stackGet = __webpack_require__(472), stackHas = __webpack_require__(473), stackSet = __webpack_require__(474); /** * Creates a stack cache object to store key-value pairs. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function Stack(entries) { var data = this.__data__ = new ListCache(entries); this.size = data.size; } // Add methods to `Stack`. Stack.prototype.clear = stackClear; Stack.prototype['delete'] = stackDelete; Stack.prototype.get = stackGet; Stack.prototype.has = stackHas; Stack.prototype.set = stackSet; module.exports = Stack; /***/ }, /* 462 */ /***/ function(module, exports, __webpack_require__) { var listCacheClear = __webpack_require__(463), listCacheDelete = __webpack_require__(464), listCacheGet = __webpack_require__(467), listCacheHas = __webpack_require__(468), listCacheSet = __webpack_require__(469); /** * Creates an list cache object. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function ListCache(entries) { var index = -1, length = entries == null ? 0 : entries.length; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } // Add methods to `ListCache`. ListCache.prototype.clear = listCacheClear; ListCache.prototype['delete'] = listCacheDelete; ListCache.prototype.get = listCacheGet; ListCache.prototype.has = listCacheHas; ListCache.prototype.set = listCacheSet; module.exports = ListCache; /***/ }, /* 463 */ /***/ function(module, exports) { /** * Removes all key-value entries from the list cache. * * @private * @name clear * @memberOf ListCache */ function listCacheClear() { this.__data__ = []; this.size = 0; } module.exports = listCacheClear; /***/ }, /* 464 */ /***/ function(module, exports, __webpack_require__) { var assocIndexOf = __webpack_require__(465); /** Used for built-in method references. */ var arrayProto = Array.prototype; /** Built-in value references. */ var splice = arrayProto.splice; /** * Removes `key` and its value from the list cache. * * @private * @name delete * @memberOf ListCache * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function listCacheDelete(key) { var data = this.__data__, index = assocIndexOf(data, key); if (index < 0) { return false; } var lastIndex = data.length - 1; if (index == lastIndex) { data.pop(); } else { splice.call(data, index, 1); } --this.size; return true; } module.exports = listCacheDelete; /***/ }, /* 465 */ /***/ function(module, exports, __webpack_require__) { var eq = __webpack_require__(466); /** * Gets the index at which the `key` is found in `array` of key-value pairs. * * @private * @param {Array} array The array to inspect. * @param {*} key The key to search for. * @returns {number} Returns the index of the matched value, else `-1`. */ function assocIndexOf(array, key) { var length = array.length; while (length--) { if (eq(array[length][0], key)) { return length; } } return -1; } module.exports = assocIndexOf; /***/ }, /* 466 */ /***/ function(module, exports) { /** * Performs a * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) * comparison between two values to determine if they are equivalent. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to compare. * @param {*} other The other value to compare. * @returns {boolean} Returns `true` if the values are equivalent, else `false`. * @example * * var object = { 'a': 1 }; * var other = { 'a': 1 }; * * _.eq(object, object); * // => true * * _.eq(object, other); * // => false * * _.eq('a', 'a'); * // => true * * _.eq('a', Object('a')); * // => false * * _.eq(NaN, NaN); * // => true */ function eq(value, other) { return value === other || (value !== value && other !== other); } module.exports = eq; /***/ }, /* 467 */ /***/ function(module, exports, __webpack_require__) { var assocIndexOf = __webpack_require__(465); /** * Gets the list cache value for `key`. * * @private * @name get * @memberOf ListCache * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function listCacheGet(key) { var data = this.__data__, index = assocIndexOf(data, key); return index < 0 ? undefined : data[index][1]; } module.exports = listCacheGet; /***/ }, /* 468 */ /***/ function(module, exports, __webpack_require__) { var assocIndexOf = __webpack_require__(465); /** * Checks if a list cache value for `key` exists. * * @private * @name has * @memberOf ListCache * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function listCacheHas(key) { return assocIndexOf(this.__data__, key) > -1; } module.exports = listCacheHas; /***/ }, /* 469 */ /***/ function(module, exports, __webpack_require__) { var assocIndexOf = __webpack_require__(465); /** * Sets the list cache `key` to `value`. * * @private * @name set * @memberOf ListCache * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the list cache instance. */ function listCacheSet(key, value) { var data = this.__data__, index = assocIndexOf(data, key); if (index < 0) { ++this.size; data.push([key, value]); } else { data[index][1] = value; } return this; } module.exports = listCacheSet; /***/ }, /* 470 */ /***/ function(module, exports, __webpack_require__) { var ListCache = __webpack_require__(462); /** * Removes all key-value entries from the stack. * * @private * @name clear * @memberOf Stack */ function stackClear() { this.__data__ = new ListCache; this.size = 0; } module.exports = stackClear; /***/ }, /* 471 */ /***/ function(module, exports) { /** * Removes `key` and its value from the stack. * * @private * @name delete * @memberOf Stack * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function stackDelete(key) { var data = this.__data__, result = data['delete'](key); this.size = data.size; return result; } module.exports = stackDelete; /***/ }, /* 472 */ /***/ function(module, exports) { /** * Gets the stack value for `key`. * * @private * @name get * @memberOf Stack * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function stackGet(key) { return this.__data__.get(key); } module.exports = stackGet; /***/ }, /* 473 */ /***/ function(module, exports) { /** * Checks if a stack value for `key` exists. * * @private * @name has * @memberOf Stack * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function stackHas(key) { return this.__data__.has(key); } module.exports = stackHas; /***/ }, /* 474 */ /***/ function(module, exports, __webpack_require__) { var ListCache = __webpack_require__(462), Map = __webpack_require__(475), MapCache = __webpack_require__(490); /** Used as the size to enable large array optimizations. */ var LARGE_ARRAY_SIZE = 200; /** * Sets the stack `key` to `value`. * * @private * @name set * @memberOf Stack * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the stack cache instance. */ function stackSet(key, value) { var data = this.__data__; if (data instanceof ListCache) { var pairs = data.__data__; if (!Map || (pairs.length < LARGE_ARRAY_SIZE - 1)) { pairs.push([key, value]); this.size = ++data.size; return this; } data = this.__data__ = new MapCache(pairs); } data.set(key, value); this.size = data.size; return this; } module.exports = stackSet; /***/ }, /* 475 */ /***/ function(module, exports, __webpack_require__) { var getNative = __webpack_require__(476), root = __webpack_require__(481); /* Built-in method references that are verified to be native. */ var Map = getNative(root, 'Map'); module.exports = Map; /***/ }, /* 476 */ /***/ function(module, exports, __webpack_require__) { var baseIsNative = __webpack_require__(477), getValue = __webpack_require__(489); /** * Gets the native function at `key` of `object`. * * @private * @param {Object} object The object to query. * @param {string} key The key of the method to get. * @returns {*} Returns the function if it's native, else `undefined`. */ function getNative(object, key) { var value = getValue(object, key); return baseIsNative(value) ? value : undefined; } module.exports = getNative; /***/ }, /* 477 */ /***/ function(module, exports, __webpack_require__) { var isFunction = __webpack_require__(478), isMasked = __webpack_require__(486), isObject = __webpack_require__(485), toSource = __webpack_require__(488); /** * Used to match `RegExp` * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). */ var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; /** Used to detect host constructors (Safari). */ var reIsHostCtor = /^\[object .+?Constructor\]$/; /** Used for built-in method references. */ var funcProto = Function.prototype, objectProto = Object.prototype; /** Used to resolve the decompiled source of functions. */ var funcToString = funcProto.toString; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** Used to detect if a method is native. */ var reIsNative = RegExp('^' + funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' ); /** * The base implementation of `_.isNative` without bad shim checks. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a native function, * else `false`. */ function baseIsNative(value) { if (!isObject(value) || isMasked(value)) { return false; } var pattern = isFunction(value) ? reIsNative : reIsHostCtor; return pattern.test(toSource(value)); } module.exports = baseIsNative; /***/ }, /* 478 */ /***/ function(module, exports, __webpack_require__) { var baseGetTag = __webpack_require__(479), isObject = __webpack_require__(485); /** `Object#toString` result references. */ var asyncTag = '[object AsyncFunction]', funcTag = '[object Function]', genTag = '[object GeneratorFunction]', proxyTag = '[object Proxy]'; /** * Checks if `value` is classified as a `Function` object. * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a function, else `false`. * @example * * _.isFunction(_); * // => true * * _.isFunction(/abc/); * // => false */ function isFunction(value) { if (!isObject(value)) { return false; } // The use of `Object#toString` avoids issues with the `typeof` operator // in Safari 9 which returns 'object' for typed arrays and other constructors. var tag = baseGetTag(value); return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag; } module.exports = isFunction; /***/ }, /* 479 */ /***/ function(module, exports, __webpack_require__) { var Symbol = __webpack_require__(480), getRawTag = __webpack_require__(483), objectToString = __webpack_require__(484); /** `Object#toString` result references. */ var nullTag = '[object Null]', undefinedTag = '[object Undefined]'; /** Built-in value references. */ var symToStringTag = Symbol ? Symbol.toStringTag : undefined; /** * The base implementation of `getTag` without fallbacks for buggy environments. * * @private * @param {*} value The value to query. * @returns {string} Returns the `toStringTag`. */ function baseGetTag(value) { if (value == null) { return value === undefined ? undefinedTag : nullTag; } return (symToStringTag && symToStringTag in Object(value)) ? getRawTag(value) : objectToString(value); } module.exports = baseGetTag; /***/ }, /* 480 */ /***/ function(module, exports, __webpack_require__) { var root = __webpack_require__(481); /** Built-in value references. */ var Symbol = root.Symbol; module.exports = Symbol; /***/ }, /* 481 */ /***/ function(module, exports, __webpack_require__) { var freeGlobal = __webpack_require__(482); /** Detect free variable `self`. */ var freeSelf = typeof self == 'object' && self && self.Object === Object && self; /** Used as a reference to the global object. */ var root = freeGlobal || freeSelf || Function('return this')(); module.exports = root; /***/ }, /* 482 */ /***/ function(module, exports) { /* WEBPACK VAR INJECTION */(function(global) {/** Detect free variable `global` from Node.js. */ var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; module.exports = freeGlobal; /* WEBPACK VAR INJECTION */}.call(exports, (function() { return this; }()))) /***/ }, /* 483 */ /***/ function(module, exports, __webpack_require__) { var Symbol = __webpack_require__(480); /** Used for built-in method references. */ var objectProto = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * Used to resolve the * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) * of values. */ var nativeObjectToString = objectProto.toString; /** Built-in value references. */ var symToStringTag = Symbol ? Symbol.toStringTag : undefined; /** * A specialized version of `baseGetTag` which ignores `Symbol.toStringTag` values. * * @private * @param {*} value The value to query. * @returns {string} Returns the raw `toStringTag`. */ function getRawTag(value) { var isOwn = hasOwnProperty.call(value, symToStringTag), tag = value[symToStringTag]; try { value[symToStringTag] = undefined; var unmasked = true; } catch (e) {} var result = nativeObjectToString.call(value); if (unmasked) { if (isOwn) { value[symToStringTag] = tag; } else { delete value[symToStringTag]; } } return result; } module.exports = getRawTag; /***/ }, /* 484 */ /***/ function(module, exports) { /** Used for built-in method references. */ var objectProto = Object.prototype; /** * Used to resolve the * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) * of values. */ var nativeObjectToString = objectProto.toString; /** * Converts `value` to a string using `Object.prototype.toString`. * * @private * @param {*} value The value to convert. * @returns {string} Returns the converted string. */ function objectToString(value) { return nativeObjectToString.call(value); } module.exports = objectToString; /***/ }, /* 485 */ /***/ function(module, exports) { /** * Checks if `value` is the * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an object, else `false`. * @example * * _.isObject({}); * // => true * * _.isObject([1, 2, 3]); * // => true * * _.isObject(_.noop); * // => true * * _.isObject(null); * // => false */ function isObject(value) { var type = typeof value; return value != null && (type == 'object' || type == 'function'); } module.exports = isObject; /***/ }, /* 486 */ /***/ function(module, exports, __webpack_require__) { var coreJsData = __webpack_require__(487); /** Used to detect methods masquerading as native. */ var maskSrcKey = (function() { var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); return uid ? ('Symbol(src)_1.' + uid) : ''; }()); /** * Checks if `func` has its source masked. * * @private * @param {Function} func The function to check. * @returns {boolean} Returns `true` if `func` is masked, else `false`. */ function isMasked(func) { return !!maskSrcKey && (maskSrcKey in func); } module.exports = isMasked; /***/ }, /* 487 */ /***/ function(module, exports, __webpack_require__) { var root = __webpack_require__(481); /** Used to detect overreaching core-js shims. */ var coreJsData = root['__core-js_shared__']; module.exports = coreJsData; /***/ }, /* 488 */ /***/ function(module, exports) { /** Used for built-in method references. */ var funcProto = Function.prototype; /** Used to resolve the decompiled source of functions. */ var funcToString = funcProto.toString; /** * Converts `func` to its source code. * * @private * @param {Function} func The function to convert. * @returns {string} Returns the source code. */ function toSource(func) { if (func != null) { try { return funcToString.call(func); } catch (e) {} try { return (func + ''); } catch (e) {} } return ''; } module.exports = toSource; /***/ }, /* 489 */ /***/ function(module, exports) { /** * Gets the value at `key` of `object`. * * @private * @param {Object} [object] The object to query. * @param {string} key The key of the property to get. * @returns {*} Returns the property value. */ function getValue(object, key) { return object == null ? undefined : object[key]; } module.exports = getValue; /***/ }, /* 490 */ /***/ function(module, exports, __webpack_require__) { var mapCacheClear = __webpack_require__(491), mapCacheDelete = __webpack_require__(499), mapCacheGet = __webpack_require__(502), mapCacheHas = __webpack_require__(503), mapCacheSet = __webpack_require__(504); /** * Creates a map cache object to store key-value pairs. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function MapCache(entries) { var index = -1, length = entries == null ? 0 : entries.length; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } // Add methods to `MapCache`. MapCache.prototype.clear = mapCacheClear; MapCache.prototype['delete'] = mapCacheDelete; MapCache.prototype.get = mapCacheGet; MapCache.prototype.has = mapCacheHas; MapCache.prototype.set = mapCacheSet; module.exports = MapCache; /***/ }, /* 491 */ /***/ function(module, exports, __webpack_require__) { var Hash = __webpack_require__(492), ListCache = __webpack_require__(462), Map = __webpack_require__(475); /** * Removes all key-value entries from the map. * * @private * @name clear * @memberOf MapCache */ function mapCacheClear() { this.size = 0; this.__data__ = { 'hash': new Hash, 'map': new (Map || ListCache), 'string': new Hash }; } module.exports = mapCacheClear; /***/ }, /* 492 */ /***/ function(module, exports, __webpack_require__) { var hashClear = __webpack_require__(493), hashDelete = __webpack_require__(495), hashGet = __webpack_require__(496), hashHas = __webpack_require__(497), hashSet = __webpack_require__(498); /** * Creates a hash object. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function Hash(entries) { var index = -1, length = entries == null ? 0 : entries.length; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } // Add methods to `Hash`. Hash.prototype.clear = hashClear; Hash.prototype['delete'] = hashDelete; Hash.prototype.get = hashGet; Hash.prototype.has = hashHas; Hash.prototype.set = hashSet; module.exports = Hash; /***/ }, /* 493 */ /***/ function(module, exports, __webpack_require__) { var nativeCreate = __webpack_require__(494); /** * Removes all key-value entries from the hash. * * @private * @name clear * @memberOf Hash */ function hashClear() { this.__data__ = nativeCreate ? nativeCreate(null) : {}; this.size = 0; } module.exports = hashClear; /***/ }, /* 494 */ /***/ function(module, exports, __webpack_require__) { var getNative = __webpack_require__(476); /* Built-in method references that are verified to be native. */ var nativeCreate = getNative(Object, 'create'); module.exports = nativeCreate; /***/ }, /* 495 */ /***/ function(module, exports) { /** * Removes `key` and its value from the hash. * * @private * @name delete * @memberOf Hash * @param {Object} hash The hash to modify. * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function hashDelete(key) { var result = this.has(key) && delete this.__data__[key]; this.size -= result ? 1 : 0; return result; } module.exports = hashDelete; /***/ }, /* 496 */ /***/ function(module, exports, __webpack_require__) { var nativeCreate = __webpack_require__(494); /** Used to stand-in for `undefined` hash values. */ var HASH_UNDEFINED = '__lodash_hash_undefined__'; /** Used for built-in method references. */ var objectProto = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * Gets the hash value for `key`. * * @private * @name get * @memberOf Hash * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function hashGet(key) { var data = this.__data__; if (nativeCreate) { var result = data[key]; return result === HASH_UNDEFINED ? undefined : result; } return hasOwnProperty.call(data, key) ? data[key] : undefined; } module.exports = hashGet; /***/ }, /* 497 */ /***/ function(module, exports, __webpack_require__) { var nativeCreate = __webpack_require__(494); /** Used for built-in method references. */ var objectProto = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * Checks if a hash value for `key` exists. * * @private * @name has * @memberOf Hash * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function hashHas(key) { var data = this.__data__; return nativeCreate ? (data[key] !== undefined) : hasOwnProperty.call(data, key); } module.exports = hashHas; /***/ }, /* 498 */ /***/ function(module, exports, __webpack_require__) { var nativeCreate = __webpack_require__(494); /** Used to stand-in for `undefined` hash values. */ var HASH_UNDEFINED = '__lodash_hash_undefined__'; /** * Sets the hash `key` to `value`. * * @private * @name set * @memberOf Hash * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the hash instance. */ function hashSet(key, value) { var data = this.__data__; this.size += this.has(key) ? 0 : 1; data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; return this; } module.exports = hashSet; /***/ }, /* 499 */ /***/ function(module, exports, __webpack_require__) { var getMapData = __webpack_require__(500); /** * Removes `key` and its value from the map. * * @private * @name delete * @memberOf MapCache * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function mapCacheDelete(key) { var result = getMapData(this, key)['delete'](key); this.size -= result ? 1 : 0; return result; } module.exports = mapCacheDelete; /***/ }, /* 500 */ /***/ function(module, exports, __webpack_require__) { var isKeyable = __webpack_require__(501); /** * Gets the data for `map`. * * @private * @param {Object} map The map to query. * @param {string} key The reference key. * @returns {*} Returns the map data. */ function getMapData(map, key) { var data = map.__data__; return isKeyable(key) ? data[typeof key == 'string' ? 'string' : 'hash'] : data.map; } module.exports = getMapData; /***/ }, /* 501 */ /***/ function(module, exports) { /** * Checks if `value` is suitable for use as unique object key. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is suitable, else `false`. */ function isKeyable(value) { var type = typeof value; return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') ? (value !== '__proto__') : (value === null); } module.exports = isKeyable; /***/ }, /* 502 */ /***/ function(module, exports, __webpack_require__) { var getMapData = __webpack_require__(500); /** * Gets the map value for `key`. * * @private * @name get * @memberOf MapCache * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function mapCacheGet(key) { return getMapData(this, key).get(key); } module.exports = mapCacheGet; /***/ }, /* 503 */ /***/ function(module, exports, __webpack_require__) { var getMapData = __webpack_require__(500); /** * Checks if a map value for `key` exists. * * @private * @name has * @memberOf MapCache * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function mapCacheHas(key) { return getMapData(this, key).has(key); } module.exports = mapCacheHas; /***/ }, /* 504 */ /***/ function(module, exports, __webpack_require__) { var getMapData = __webpack_require__(500); /** * Sets the map `key` to `value`. * * @private * @name set * @memberOf MapCache * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the map cache instance. */ function mapCacheSet(key, value) { var data = getMapData(this, key), size = data.size; data.set(key, value); this.size += data.size == size ? 0 : 1; return this; } module.exports = mapCacheSet; /***/ }, /* 505 */ /***/ function(module, exports, __webpack_require__) { var baseIsEqualDeep = __webpack_require__(506), isObjectLike = __webpack_require__(530); /** * The base implementation of `_.isEqual` which supports partial comparisons * and tracks traversed objects. * * @private * @param {*} value The value to compare. * @param {*} other The other value to compare. * @param {boolean} bitmask The bitmask flags. * 1 - Unordered comparison * 2 - Partial comparison * @param {Function} [customizer] The function to customize comparisons. * @param {Object} [stack] Tracks traversed `value` and `other` objects. * @returns {boolean} Returns `true` if the values are equivalent, else `false`. */ function baseIsEqual(value, other, bitmask, customizer, stack) { if (value === other) { return true; } if (value == null || other == null || (!isObjectLike(value) && !isObjectLike(other))) { return value !== value && other !== other; } return baseIsEqualDeep(value, other, bitmask, customizer, baseIsEqual, stack); } module.exports = baseIsEqual; /***/ }, /* 506 */ /***/ function(module, exports, __webpack_require__) { var Stack = __webpack_require__(461), equalArrays = __webpack_require__(507), equalByTag = __webpack_require__(513), equalObjects = __webpack_require__(517), getTag = __webpack_require__(544), isArray = __webpack_require__(521), isBuffer = __webpack_require__(531), isTypedArray = __webpack_require__(534); /** Used to compose bitmasks for value comparisons. */ var COMPARE_PARTIAL_FLAG = 1; /** `Object#toString` result references. */ var argsTag = '[object Arguments]', arrayTag = '[object Array]', objectTag = '[object Object]'; /** Used for built-in method references. */ var objectProto = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * A specialized version of `baseIsEqual` for arrays and objects which performs * deep comparisons and tracks traversed objects enabling objects with circular * references to be compared. * * @private * @param {Object} object The object to compare. * @param {Object} other The other object to compare. * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. * @param {Function} customizer The function to customize comparisons. * @param {Function} equalFunc The function to determine equivalents of values. * @param {Object} [stack] Tracks traversed `object` and `other` objects. * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. */ function baseIsEqualDeep(object, other, bitmask, customizer, equalFunc, stack) { var objIsArr = isArray(object), othIsArr = isArray(other), objTag = objIsArr ? arrayTag : getTag(object), othTag = othIsArr ? arrayTag : getTag(other); objTag = objTag == argsTag ? objectTag : objTag; othTag = othTag == argsTag ? objectTag : othTag; var objIsObj = objTag == objectTag, othIsObj = othTag == objectTag, isSameTag = objTag == othTag; if (isSameTag && isBuffer(object)) { if (!isBuffer(other)) { return false; } objIsArr = true; objIsObj = false; } if (isSameTag && !objIsObj) { stack || (stack = new Stack); return (objIsArr || isTypedArray(object)) ? equalArrays(object, other, bitmask, customizer, equalFunc, stack) : equalByTag(object, other, objTag, bitmask, customizer, equalFunc, stack); } if (!(bitmask & COMPARE_PARTIAL_FLAG)) { var objIsWrapped = objIsObj && hasOwnProperty.call(object, '__wrapped__'), othIsWrapped = othIsObj && hasOwnProperty.call(other, '__wrapped__'); if (objIsWrapped || othIsWrapped) { var objUnwrapped = objIsWrapped ? object.value() : object, othUnwrapped = othIsWrapped ? other.value() : other; stack || (stack = new Stack); return equalFunc(objUnwrapped, othUnwrapped, bitmask, customizer, stack); } } if (!isSameTag) { return false; } stack || (stack = new Stack); return equalObjects(object, other, bitmask, customizer, equalFunc, stack); } module.exports = baseIsEqualDeep; /***/ }, /* 507 */ /***/ function(module, exports, __webpack_require__) { var SetCache = __webpack_require__(508), arraySome = __webpack_require__(511), cacheHas = __webpack_require__(512); /** Used to compose bitmasks for value comparisons. */ var COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2; /** * A specialized version of `baseIsEqualDeep` for arrays with support for * partial deep comparisons. * * @private * @param {Array} array The array to compare. * @param {Array} other The other array to compare. * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. * @param {Function} customizer The function to customize comparisons. * @param {Function} equalFunc The function to determine equivalents of values. * @param {Object} stack Tracks traversed `array` and `other` objects. * @returns {boolean} Returns `true` if the arrays are equivalent, else `false`. */ function equalArrays(array, other, bitmask, customizer, equalFunc, stack) { var isPartial = bitmask & COMPARE_PARTIAL_FLAG, arrLength = array.length, othLength = other.length; if (arrLength != othLength && !(isPartial && othLength > arrLength)) { return false; } // Assume cyclic values are equal. var stacked = stack.get(array); if (stacked && stack.get(other)) { return stacked == other; } var index = -1, result = true, seen = (bitmask & COMPARE_UNORDERED_FLAG) ? new SetCache : undefined; stack.set(array, other); stack.set(other, array); // Ignore non-index properties. while (++index < arrLength) { var arrValue = array[index], othValue = other[index]; if (customizer) { var compared = isPartial ? customizer(othValue, arrValue, index, other, array, stack) : customizer(arrValue, othValue, index, array, other, stack); } if (compared !== undefined) { if (compared) { continue; } result = false; break; } // Recursively compare arrays (susceptible to call stack limits). if (seen) { if (!arraySome(other, function(othValue, othIndex) { if (!cacheHas(seen, othIndex) && (arrValue === othValue || equalFunc(arrValue, othValue, bitmask, customizer, stack))) { return seen.push(othIndex); } })) { result = false; break; } } else if (!( arrValue === othValue || equalFunc(arrValue, othValue, bitmask, customizer, stack) )) { result = false; break; } } stack['delete'](array); stack['delete'](other); return result; } module.exports = equalArrays; /***/ }, /* 508 */ /***/ function(module, exports, __webpack_require__) { var MapCache = __webpack_require__(490), setCacheAdd = __webpack_require__(509), setCacheHas = __webpack_require__(510); /** * * Creates an array cache object to store unique values. * * @private * @constructor * @param {Array} [values] The values to cache. */ function SetCache(values) { var index = -1, length = values == null ? 0 : values.length; this.__data__ = new MapCache; while (++index < length) { this.add(values[index]); } } // Add methods to `SetCache`. SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; SetCache.prototype.has = setCacheHas; module.exports = SetCache; /***/ }, /* 509 */ /***/ function(module, exports) { /** Used to stand-in for `undefined` hash values. */ var HASH_UNDEFINED = '__lodash_hash_undefined__'; /** * Adds `value` to the array cache. * * @private * @name add * @memberOf SetCache * @alias push * @param {*} value The value to cache. * @returns {Object} Returns the cache instance. */ function setCacheAdd(value) { this.__data__.set(value, HASH_UNDEFINED); return this; } module.exports = setCacheAdd; /***/ }, /* 510 */ /***/ function(module, exports) { /** * Checks if `value` is in the array cache. * * @private * @name has * @memberOf SetCache * @param {*} value The value to search for. * @returns {number} Returns `true` if `value` is found, else `false`. */ function setCacheHas(value) { return this.__data__.has(value); } module.exports = setCacheHas; /***/ }, /* 511 */ /***/ function(module, exports) { /** * A specialized version of `_.some` for arrays without support for iteratee * shorthands. * * @private * @param {Array} [array] The array to iterate over. * @param {Function} predicate The function invoked per iteration. * @returns {boolean} Returns `true` if any element passes the predicate check, * else `false`. */ function arraySome(array, predicate) { var index = -1, length = array == null ? 0 : array.length; while (++index < length) { if (predicate(array[index], index, array)) { return true; } } return false; } module.exports = arraySome; /***/ }, /* 512 */ /***/ function(module, exports) { /** * Checks if a `cache` value for `key` exists. * * @private * @param {Object} cache The cache to query. * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function cacheHas(cache, key) { return cache.has(key); } module.exports = cacheHas; /***/ }, /* 513 */ /***/ function(module, exports, __webpack_require__) { var Symbol = __webpack_require__(480), Uint8Array = __webpack_require__(514), eq = __webpack_require__(466), equalArrays = __webpack_require__(507), mapToArray = __webpack_require__(515), setToArray = __webpack_require__(516); /** Used to compose bitmasks for value comparisons. */ var COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2; /** `Object#toString` result references. */ var boolTag = '[object Boolean]', dateTag = '[object Date]', errorTag = '[object Error]', mapTag = '[object Map]', numberTag = '[object Number]', regexpTag = '[object RegExp]', setTag = '[object Set]', stringTag = '[object String]', symbolTag = '[object Symbol]'; var arrayBufferTag = '[object ArrayBuffer]', dataViewTag = '[object DataView]'; /** Used to convert symbols to primitives and strings. */ var symbolProto = Symbol ? Symbol.prototype : undefined, symbolValueOf = symbolProto ? symbolProto.valueOf : undefined; /** * A specialized version of `baseIsEqualDeep` for comparing objects of * the same `toStringTag`. * * **Note:** This function only supports comparing values with tags of * `Boolean`, `Date`, `Error`, `Number`, `RegExp`, or `String`. * * @private * @param {Object} object The object to compare. * @param {Object} other The other object to compare. * @param {string} tag The `toStringTag` of the objects to compare. * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. * @param {Function} customizer The function to customize comparisons. * @param {Function} equalFunc The function to determine equivalents of values. * @param {Object} stack Tracks traversed `object` and `other` objects. * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. */ function equalByTag(object, other, tag, bitmask, customizer, equalFunc, stack) { switch (tag) { case dataViewTag: if ((object.byteLength != other.byteLength) || (object.byteOffset != other.byteOffset)) { return false; } object = object.buffer; other = other.buffer; case arrayBufferTag: if ((object.byteLength != other.byteLength) || !equalFunc(new Uint8Array(object), new Uint8Array(other))) { return false; } return true; case boolTag: case dateTag: case numberTag: // Coerce booleans to `1` or `0` and dates to milliseconds. // Invalid dates are coerced to `NaN`. return eq(+object, +other); case errorTag: return object.name == other.name && object.message == other.message; case regexpTag: case stringTag: // Coerce regexes to strings and treat strings, primitives and objects, // as equal. See http://www.ecma-international.org/ecma-262/7.0/#sec-regexp.prototype.tostring // for more details. return object == (other + ''); case mapTag: var convert = mapToArray; case setTag: var isPartial = bitmask & COMPARE_PARTIAL_FLAG; convert || (convert = setToArray); if (object.size != other.size && !isPartial) { return false; } // Assume cyclic values are equal. var stacked = stack.get(object); if (stacked) { return stacked == other; } bitmask |= COMPARE_UNORDERED_FLAG; // Recursively compare objects (susceptible to call stack limits). stack.set(object, other); var result = equalArrays(convert(object), convert(other), bitmask, customizer, equalFunc, stack); stack['delete'](object); return result; case symbolTag: if (symbolValueOf) { return symbolValueOf.call(object) == symbolValueOf.call(other); } } return false; } module.exports = equalByTag; /***/ }, /* 514 */ /***/ function(module, exports, __webpack_require__) { var root = __webpack_require__(481); /** Built-in value references. */ var Uint8Array = root.Uint8Array; module.exports = Uint8Array; /***/ }, /* 515 */ /***/ function(module, exports) { /** * Converts `map` to its key-value pairs. * * @private * @param {Object} map The map to convert. * @returns {Array} Returns the key-value pairs. */ function mapToArray(map) { var index = -1, result = Array(map.size); map.forEach(function(value, key) { result[++index] = [key, value]; }); return result; } module.exports = mapToArray; /***/ }, /* 516 */ /***/ function(module, exports) { /** * Converts `set` to an array of its values. * * @private * @param {Object} set The set to convert. * @returns {Array} Returns the values. */ function setToArray(set) { var index = -1, result = Array(set.size); set.forEach(function(value) { result[++index] = value; }); return result; } module.exports = setToArray; /***/ }, /* 517 */ /***/ function(module, exports, __webpack_require__) { var getAllKeys = __webpack_require__(518); /** Used to compose bitmasks for value comparisons. */ var COMPARE_PARTIAL_FLAG = 1; /** Used for built-in method references. */ var objectProto = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * A specialized version of `baseIsEqualDeep` for objects with support for * partial deep comparisons. * * @private * @param {Object} object The object to compare. * @param {Object} other The other object to compare. * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. * @param {Function} customizer The function to customize comparisons. * @param {Function} equalFunc The function to determine equivalents of values. * @param {Object} stack Tracks traversed `object` and `other` objects. * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. */ function equalObjects(object, other, bitmask, customizer, equalFunc, stack) { var isPartial = bitmask & COMPARE_PARTIAL_FLAG, objProps = getAllKeys(object), objLength = objProps.length, othProps = getAllKeys(other), othLength = othProps.length; if (objLength != othLength && !isPartial) { return false; } var index = objLength; while (index--) { var key = objProps[index]; if (!(isPartial ? key in other : hasOwnProperty.call(other, key))) { return false; } } // Assume cyclic values are equal. var stacked = stack.get(object); if (stacked && stack.get(other)) { return stacked == other; } var result = true; stack.set(object, other); stack.set(other, object); var skipCtor = isPartial; while (++index < objLength) { key = objProps[index]; var objValue = object[key], othValue = other[key]; if (customizer) { var compared = isPartial ? customizer(othValue, objValue, key, other, object, stack) : customizer(objValue, othValue, key, object, other, stack); } // Recursively compare objects (susceptible to call stack limits). if (!(compared === undefined ? (objValue === othValue || equalFunc(objValue, othValue, bitmask, customizer, stack)) : compared )) { result = false; break; } skipCtor || (skipCtor = key == 'constructor'); } if (result && !skipCtor) { var objCtor = object.constructor, othCtor = other.constructor; // Non `Object` object instances with different constructors are not equal. if (objCtor != othCtor && ('constructor' in object && 'constructor' in other) && !(typeof objCtor == 'function' && objCtor instanceof objCtor && typeof othCtor == 'function' && othCtor instanceof othCtor)) { result = false; } } stack['delete'](object); stack['delete'](other); return result; } module.exports = equalObjects; /***/ }, /* 518 */ /***/ function(module, exports, __webpack_require__) { var baseGetAllKeys = __webpack_require__(519), getSymbols = __webpack_require__(522), keys = __webpack_require__(525); /** * Creates an array of own enumerable property names and symbols of `object`. * * @private * @param {Object} object The object to query. * @returns {Array} Returns the array of property names and symbols. */ function getAllKeys(object) { return baseGetAllKeys(object, keys, getSymbols); } module.exports = getAllKeys; /***/ }, /* 519 */ /***/ function(module, exports, __webpack_require__) { var arrayPush = __webpack_require__(520), isArray = __webpack_require__(521); /** * The base implementation of `getAllKeys` and `getAllKeysIn` which uses * `keysFunc` and `symbolsFunc` to get the enumerable property names and * symbols of `object`. * * @private * @param {Object} object The object to query. * @param {Function} keysFunc The function to get the keys of `object`. * @param {Function} symbolsFunc The function to get the symbols of `object`. * @returns {Array} Returns the array of property names and symbols. */ function baseGetAllKeys(object, keysFunc, symbolsFunc) { var result = keysFunc(object); return isArray(object) ? result : arrayPush(result, symbolsFunc(object)); } module.exports = baseGetAllKeys; /***/ }, /* 520 */ /***/ function(module, exports) { /** * Appends the elements of `values` to `array`. * * @private * @param {Array} array The array to modify. * @param {Array} values The values to append. * @returns {Array} Returns `array`. */ function arrayPush(array, values) { var index = -1, length = values.length, offset = array.length; while (++index < length) { array[offset + index] = values[index]; } return array; } module.exports = arrayPush; /***/ }, /* 521 */ /***/ function(module, exports) { /** * Checks if `value` is classified as an `Array` object. * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an array, else `false`. * @example * * _.isArray([1, 2, 3]); * // => true * * _.isArray(document.body.children); * // => false * * _.isArray('abc'); * // => false * * _.isArray(_.noop); * // => false */ var isArray = Array.isArray; module.exports = isArray; /***/ }, /* 522 */ /***/ function(module, exports, __webpack_require__) { var arrayFilter = __webpack_require__(523), stubArray = __webpack_require__(524); /** Used for built-in method references. */ var objectProto = Object.prototype; /** Built-in value references. */ var propertyIsEnumerable = objectProto.propertyIsEnumerable; /* Built-in method references for those with the same name as other `lodash` methods. */ var nativeGetSymbols = Object.getOwnPropertySymbols; /** * Creates an array of the own enumerable symbols of `object`. * * @private * @param {Object} object The object to query. * @returns {Array} Returns the array of symbols. */ var getSymbols = !nativeGetSymbols ? stubArray : function(object) { if (object == null) { return []; } object = Object(object); return arrayFilter(nativeGetSymbols(object), function(symbol) { return propertyIsEnumerable.call(object, symbol); }); }; module.exports = getSymbols; /***/ }, /* 523 */ /***/ function(module, exports) { /** * A specialized version of `_.filter` for arrays without support for * iteratee shorthands. * * @private * @param {Array} [array] The array to iterate over. * @param {Function} predicate The function invoked per iteration. * @returns {Array} Returns the new filtered array. */ function arrayFilter(array, predicate) { var index = -1, length = array == null ? 0 : array.length, resIndex = 0, result = []; while (++index < length) { var value = array[index]; if (predicate(value, index, array)) { result[resIndex++] = value; } } return result; } module.exports = arrayFilter; /***/ }, /* 524 */ /***/ function(module, exports) { /** * This method returns a new empty array. * * @static * @memberOf _ * @since 4.13.0 * @category Util * @returns {Array} Returns the new empty array. * @example * * var arrays = _.times(2, _.stubArray); * * console.log(arrays); * // => [[], []] * * console.log(arrays[0] === arrays[1]); * // => false */ function stubArray() { return []; } module.exports = stubArray; /***/ }, /* 525 */ /***/ function(module, exports, __webpack_require__) { var arrayLikeKeys = __webpack_require__(526), baseKeys = __webpack_require__(539), isArrayLike = __webpack_require__(543); /** * Creates an array of the own enumerable property names of `object`. * * **Note:** Non-object values are coerced to objects. See the * [ES spec](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) * for more details. * * @static * @since 0.1.0 * @memberOf _ * @category Object * @param {Object} object The object to query. * @returns {Array} Returns the array of property names. * @example * * function Foo() { * this.a = 1; * this.b = 2; * } * * Foo.prototype.c = 3; * * _.keys(new Foo); * // => ['a', 'b'] (iteration order is not guaranteed) * * _.keys('hi'); * // => ['0', '1'] */ function keys(object) { return isArrayLike(object) ? arrayLikeKeys(object) : baseKeys(object); } module.exports = keys; /***/ }, /* 526 */ /***/ function(module, exports, __webpack_require__) { var baseTimes = __webpack_require__(527), isArguments = __webpack_require__(528), isArray = __webpack_require__(521), isBuffer = __webpack_require__(531), isIndex = __webpack_require__(533), isTypedArray = __webpack_require__(534); /** Used for built-in method references. */ var objectProto = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * Creates an array of the enumerable property names of the array-like `value`. * * @private * @param {*} value The value to query. * @param {boolean} inherited Specify returning inherited property names. * @returns {Array} Returns the array of property names. */ function arrayLikeKeys(value, inherited) { var isArr = isArray(value), isArg = !isArr && isArguments(value), isBuff = !isArr && !isArg && isBuffer(value), isType = !isArr && !isArg && !isBuff && isTypedArray(value), skipIndexes = isArr || isArg || isBuff || isType, result = skipIndexes ? baseTimes(value.length, String) : [], length = result.length; for (var key in value) { if ((inherited || hasOwnProperty.call(value, key)) && !(skipIndexes && ( // Safari 9 has enumerable `arguments.length` in strict mode. key == 'length' || // Node.js 0.10 has enumerable non-index properties on buffers. (isBuff && (key == 'offset' || key == 'parent')) || // PhantomJS 2 has enumerable non-index properties on typed arrays. (isType && (key == 'buffer' || key == 'byteLength' || key == 'byteOffset')) || // Skip index properties. isIndex(key, length) ))) { result.push(key); } } return result; } module.exports = arrayLikeKeys; /***/ }, /* 527 */ /***/ function(module, exports) { /** * The base implementation of `_.times` without support for iteratee shorthands * or max array length checks. * * @private * @param {number} n The number of times to invoke `iteratee`. * @param {Function} iteratee The function invoked per iteration. * @returns {Array} Returns the array of results. */ function baseTimes(n, iteratee) { var index = -1, result = Array(n); while (++index < n) { result[index] = iteratee(index); } return result; } module.exports = baseTimes; /***/ }, /* 528 */ /***/ function(module, exports, __webpack_require__) { var baseIsArguments = __webpack_require__(529), isObjectLike = __webpack_require__(530); /** Used for built-in method references. */ var objectProto = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** Built-in value references. */ var propertyIsEnumerable = objectProto.propertyIsEnumerable; /** * Checks if `value` is likely an `arguments` object. * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an `arguments` object, * else `false`. * @example * * _.isArguments(function() { return arguments; }()); * // => true * * _.isArguments([1, 2, 3]); * // => false */ var isArguments = baseIsArguments(function() { return arguments; }()) ? baseIsArguments : function(value) { return isObjectLike(value) && hasOwnProperty.call(value, 'callee') && !propertyIsEnumerable.call(value, 'callee'); }; module.exports = isArguments; /***/ }, /* 529 */ /***/ function(module, exports, __webpack_require__) { var baseGetTag = __webpack_require__(479), isObjectLike = __webpack_require__(530); /** `Object#toString` result references. */ var argsTag = '[object Arguments]'; /** * The base implementation of `_.isArguments`. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an `arguments` object, */ function baseIsArguments(value) { return isObjectLike(value) && baseGetTag(value) == argsTag; } module.exports = baseIsArguments; /***/ }, /* 530 */ /***/ function(module, exports) { /** * Checks if `value` is object-like. A value is object-like if it's not `null` * and has a `typeof` result of "object". * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is object-like, else `false`. * @example * * _.isObjectLike({}); * // => true * * _.isObjectLike([1, 2, 3]); * // => true * * _.isObjectLike(_.noop); * // => false * * _.isObjectLike(null); * // => false */ function isObjectLike(value) { return value != null && typeof value == 'object'; } module.exports = isObjectLike; /***/ }, /* 531 */ /***/ function(module, exports, __webpack_require__) { /* WEBPACK VAR INJECTION */(function(module) {var root = __webpack_require__(481), stubFalse = __webpack_require__(532); /** Detect free variable `exports`. */ var freeExports = typeof exports == 'object' && exports && !exports.nodeType && exports; /** Detect free variable `module`. */ var freeModule = freeExports && typeof module == 'object' && module && !module.nodeType && module; /** Detect the popular CommonJS extension `module.exports`. */ var moduleExports = freeModule && freeModule.exports === freeExports; /** Built-in value references. */ var Buffer = moduleExports ? root.Buffer : undefined; /* Built-in method references for those with the same name as other `lodash` methods. */ var nativeIsBuffer = Buffer ? Buffer.isBuffer : undefined; /** * Checks if `value` is a buffer. * * @static * @memberOf _ * @since 4.3.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a buffer, else `false`. * @example * * _.isBuffer(new Buffer(2)); * // => true * * _.isBuffer(new Uint8Array(2)); * // => false */ var isBuffer = nativeIsBuffer || stubFalse; module.exports = isBuffer; /* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(344)(module))) /***/ }, /* 532 */ /***/ function(module, exports) { /** * This method returns `false`. * * @static * @memberOf _ * @since 4.13.0 * @category Util * @returns {boolean} Returns `false`. * @example * * _.times(2, _.stubFalse); * // => [false, false] */ function stubFalse() { return false; } module.exports = stubFalse; /***/ }, /* 533 */ /***/ function(module, exports) { /** Used as references for various `Number` constants. */ var MAX_SAFE_INTEGER = 9007199254740991; /** Used to detect unsigned integer values. */ var reIsUint = /^(?:0|[1-9]\d*)$/; /** * Checks if `value` is a valid array-like index. * * @private * @param {*} value The value to check. * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. */ function isIndex(value, length) { length = length == null ? MAX_SAFE_INTEGER : length; return !!length && (typeof value == 'number' || reIsUint.test(value)) && (value > -1 && value % 1 == 0 && value < length); } module.exports = isIndex; /***/ }, /* 534 */ /***/ function(module, exports, __webpack_require__) { var baseIsTypedArray = __webpack_require__(535), baseUnary = __webpack_require__(537), nodeUtil = __webpack_require__(538); /* Node.js helper references. */ var nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray; /** * Checks if `value` is classified as a typed array. * * @static * @memberOf _ * @since 3.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. * @example * * _.isTypedArray(new Uint8Array); * // => true * * _.isTypedArray([]); * // => false */ var isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray; module.exports = isTypedArray; /***/ }, /* 535 */ /***/ function(module, exports, __webpack_require__) { var baseGetTag = __webpack_require__(479), isLength = __webpack_require__(536), isObjectLike = __webpack_require__(530); /** `Object#toString` result references. */ var argsTag = '[object Arguments]', arrayTag = '[object Array]', boolTag = '[object Boolean]', dateTag = '[object Date]', errorTag = '[object Error]', funcTag = '[object Function]', mapTag = '[object Map]', numberTag = '[object Number]', objectTag = '[object Object]', regexpTag = '[object RegExp]', setTag = '[object Set]', stringTag = '[object String]', weakMapTag = '[object WeakMap]'; var arrayBufferTag = '[object ArrayBuffer]', dataViewTag = '[object DataView]', float32Tag = '[object Float32Array]', float64Tag = '[object Float64Array]', int8Tag = '[object Int8Array]', int16Tag = '[object Int16Array]', int32Tag = '[object Int32Array]', uint8Tag = '[object Uint8Array]', uint8ClampedTag = '[object Uint8ClampedArray]', uint16Tag = '[object Uint16Array]', uint32Tag = '[object Uint32Array]'; /** Used to identify `toStringTag` values of typed arrays. */ var typedArrayTags = {}; typedArrayTags[float32Tag] = typedArrayTags[float64Tag] = typedArrayTags[int8Tag] = typedArrayTags[int16Tag] = typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] = typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] = typedArrayTags[uint32Tag] = true; typedArrayTags[argsTag] = typedArrayTags[arrayTag] = typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] = typedArrayTags[dataViewTag] = typedArrayTags[dateTag] = typedArrayTags[errorTag] = typedArrayTags[funcTag] = typedArrayTags[mapTag] = typedArrayTags[numberTag] = typedArrayTags[objectTag] = typedArrayTags[regexpTag] = typedArrayTags[setTag] = typedArrayTags[stringTag] = typedArrayTags[weakMapTag] = false; /** * The base implementation of `_.isTypedArray` without Node.js optimizations. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. */ function baseIsTypedArray(value) { return isObjectLike(value) && isLength(value.length) && !!typedArrayTags[baseGetTag(value)]; } module.exports = baseIsTypedArray; /***/ }, /* 536 */ /***/ function(module, exports) { /** Used as references for various `Number` constants. */ var MAX_SAFE_INTEGER = 9007199254740991; /** * Checks if `value` is a valid array-like length. * * **Note:** This method is loosely based on * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. * @example * * _.isLength(3); * // => true * * _.isLength(Number.MIN_VALUE); * // => false * * _.isLength(Infinity); * // => false * * _.isLength('3'); * // => false */ function isLength(value) { return typeof value == 'number' && value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; } module.exports = isLength; /***/ }, /* 537 */ /***/ function(module, exports) { /** * The base implementation of `_.unary` without support for storing metadata. * * @private * @param {Function} func The function to cap arguments for. * @returns {Function} Returns the new capped function. */ function baseUnary(func) { return function(value) { return func(value); }; } module.exports = baseUnary; /***/ }, /* 538 */ /***/ function(module, exports, __webpack_require__) { /* WEBPACK VAR INJECTION */(function(module) {var freeGlobal = __webpack_require__(482); /** Detect free variable `exports`. */ var freeExports = typeof exports == 'object' && exports && !exports.nodeType && exports; /** Detect free variable `module`. */ var freeModule = freeExports && typeof module == 'object' && module && !module.nodeType && module; /** Detect the popular CommonJS extension `module.exports`. */ var moduleExports = freeModule && freeModule.exports === freeExports; /** Detect free variable `process` from Node.js. */ var freeProcess = moduleExports && freeGlobal.process; /** Used to access faster Node.js helpers. */ var nodeUtil = (function() { try { return freeProcess && freeProcess.binding && freeProcess.binding('util'); } catch (e) {} }()); module.exports = nodeUtil; /* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(344)(module))) /***/ }, /* 539 */ /***/ function(module, exports, __webpack_require__) { var isPrototype = __webpack_require__(540), nativeKeys = __webpack_require__(541); /** Used for built-in method references. */ var objectProto = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * The base implementation of `_.keys` which doesn't treat sparse arrays as dense. * * @private * @param {Object} object The object to query. * @returns {Array} Returns the array of property names. */ function baseKeys(object) { if (!isPrototype(object)) { return nativeKeys(object); } var result = []; for (var key in Object(object)) { if (hasOwnProperty.call(object, key) && key != 'constructor') { result.push(key); } } return result; } module.exports = baseKeys; /***/ }, /* 540 */ /***/ function(module, exports) { /** Used for built-in method references. */ var objectProto = Object.prototype; /** * Checks if `value` is likely a prototype object. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. */ function isPrototype(value) { var Ctor = value && value.constructor, proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto; return value === proto; } module.exports = isPrototype; /***/ }, /* 541 */ /***/ function(module, exports, __webpack_require__) { var overArg = __webpack_require__(542); /* Built-in method references for those with the same name as other `lodash` methods. */ var nativeKeys = overArg(Object.keys, Object); module.exports = nativeKeys; /***/ }, /* 542 */ /***/ function(module, exports) { /** * Creates a unary function that invokes `func` with its argument transformed. * * @private * @param {Function} func The function to wrap. * @param {Function} transform The argument transform. * @returns {Function} Returns the new function. */ function overArg(func, transform) { return function(arg) { return func(transform(arg)); }; } module.exports = overArg; /***/ }, /* 543 */ /***/ function(module, exports, __webpack_require__) { var isFunction = __webpack_require__(478), isLength = __webpack_require__(536); /** * Checks if `value` is array-like. A value is considered array-like if it's * not a function and has a `value.length` that's an integer greater than or * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is array-like, else `false`. * @example * * _.isArrayLike([1, 2, 3]); * // => true * * _.isArrayLike(document.body.children); * // => true * * _.isArrayLike('abc'); * // => true * * _.isArrayLike(_.noop); * // => false */ function isArrayLike(value) { return value != null && isLength(value.length) && !isFunction(value); } module.exports = isArrayLike; /***/ }, /* 544 */ /***/ function(module, exports, __webpack_require__) { var DataView = __webpack_require__(545), Map = __webpack_require__(475), Promise = __webpack_require__(546), Set = __webpack_require__(547), WeakMap = __webpack_require__(548), baseGetTag = __webpack_require__(479), toSource = __webpack_require__(488); /** `Object#toString` result references. */ var mapTag = '[object Map]', objectTag = '[object Object]', promiseTag = '[object Promise]', setTag = '[object Set]', weakMapTag = '[object WeakMap]'; var dataViewTag = '[object DataView]'; /** Used to detect maps, sets, and weakmaps. */ var dataViewCtorString = toSource(DataView), mapCtorString = toSource(Map), promiseCtorString = toSource(Promise), setCtorString = toSource(Set), weakMapCtorString = toSource(WeakMap); /** * Gets the `toStringTag` of `value`. * * @private * @param {*} value The value to query. * @returns {string} Returns the `toStringTag`. */ var getTag = baseGetTag; // Fallback for data views, maps, sets, and weak maps in IE 11 and promises in Node.js < 6. if ((DataView && getTag(new DataView(new ArrayBuffer(1))) != dataViewTag) || (Map && getTag(new Map) != mapTag) || (Promise && getTag(Promise.resolve()) != promiseTag) || (Set && getTag(new Set) != setTag) || (WeakMap && getTag(new WeakMap) != weakMapTag)) { getTag = function(value) { var result = baseGetTag(value), Ctor = result == objectTag ? value.constructor : undefined, ctorString = Ctor ? toSource(Ctor) : ''; if (ctorString) { switch (ctorString) { case dataViewCtorString: return dataViewTag; case mapCtorString: return mapTag; case promiseCtorString: return promiseTag; case setCtorString: return setTag; case weakMapCtorString: return weakMapTag; } } return result; }; } module.exports = getTag; /***/ }, /* 545 */ /***/ function(module, exports, __webpack_require__) { var getNative = __webpack_require__(476), root = __webpack_require__(481); /* Built-in method references that are verified to be native. */ var DataView = getNative(root, 'DataView'); module.exports = DataView; /***/ }, /* 546 */ /***/ function(module, exports, __webpack_require__) { var getNative = __webpack_require__(476), root = __webpack_require__(481); /* Built-in method references that are verified to be native. */ var Promise = getNative(root, 'Promise'); module.exports = Promise; /***/ }, /* 547 */ /***/ function(module, exports, __webpack_require__) { var getNative = __webpack_require__(476), root = __webpack_require__(481); /* Built-in method references that are verified to be native. */ var Set = getNative(root, 'Set'); module.exports = Set; /***/ }, /* 548 */ /***/ function(module, exports, __webpack_require__) { var getNative = __webpack_require__(476), root = __webpack_require__(481); /* Built-in method references that are verified to be native. */ var WeakMap = getNative(root, 'WeakMap'); module.exports = WeakMap; /***/ }, /* 549 */ /***/ function(module, exports, __webpack_require__) { var isStrictComparable = __webpack_require__(550), keys = __webpack_require__(525); /** * Gets the property names, values, and compare flags of `object`. * * @private * @param {Object} object The object to query. * @returns {Array} Returns the match data of `object`. */ function getMatchData(object) { var result = keys(object), length = result.length; while (length--) { var key = result[length], value = object[key]; result[length] = [key, value, isStrictComparable(value)]; } return result; } module.exports = getMatchData; /***/ }, /* 550 */ /***/ function(module, exports, __webpack_require__) { var isObject = __webpack_require__(485); /** * Checks if `value` is suitable for strict equality comparisons, i.e. `===`. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` if suitable for strict * equality comparisons, else `false`. */ function isStrictComparable(value) { return value === value && !isObject(value); } module.exports = isStrictComparable; /***/ }, /* 551 */ /***/ function(module, exports) { /** * A specialized version of `matchesProperty` for source values suitable * for strict equality comparisons, i.e. `===`. * * @private * @param {string} key The key of the property to get. * @param {*} srcValue The value to match. * @returns {Function} Returns the new spec function. */ function matchesStrictComparable(key, srcValue) { return function(object) { if (object == null) { return false; } return object[key] === srcValue && (srcValue !== undefined || (key in Object(object))); }; } module.exports = matchesStrictComparable; /***/ }, /* 552 */ /***/ function(module, exports, __webpack_require__) { var baseIsEqual = __webpack_require__(505), get = __webpack_require__(553), hasIn = __webpack_require__(565), isKey = __webpack_require__(556), isStrictComparable = __webpack_require__(550), matchesStrictComparable = __webpack_require__(551), toKey = __webpack_require__(564); /** Used to compose bitmasks for value comparisons. */ var COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2; /** * The base implementation of `_.matchesProperty` which doesn't clone `srcValue`. * * @private * @param {string} path The path of the property to get. * @param {*} srcValue The value to match. * @returns {Function} Returns the new spec function. */ function baseMatchesProperty(path, srcValue) { if (isKey(path) && isStrictComparable(srcValue)) { return matchesStrictComparable(toKey(path), srcValue); } return function(object) { var objValue = get(object, path); return (objValue === undefined && objValue === srcValue) ? hasIn(object, path) : baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG); }; } module.exports = baseMatchesProperty; /***/ }, /* 553 */ /***/ function(module, exports, __webpack_require__) { var baseGet = __webpack_require__(554); /** * Gets the value at `path` of `object`. If the resolved value is * `undefined`, the `defaultValue` is returned in its place. * * @static * @memberOf _ * @since 3.7.0 * @category Object * @param {Object} object The object to query. * @param {Array|string} path The path of the property to get. * @param {*} [defaultValue] The value returned for `undefined` resolved values. * @returns {*} Returns the resolved value. * @example * * var object = { 'a': [{ 'b': { 'c': 3 } }] }; * * _.get(object, 'a[0].b.c'); * // => 3 * * _.get(object, ['a', '0', 'b', 'c']); * // => 3 * * _.get(object, 'a.b.c', 'default'); * // => 'default' */ function get(object, path, defaultValue) { var result = object == null ? undefined : baseGet(object, path); return result === undefined ? defaultValue : result; } module.exports = get; /***/ }, /* 554 */ /***/ function(module, exports, __webpack_require__) { var castPath = __webpack_require__(555), toKey = __webpack_require__(564); /** * The base implementation of `_.get` without support for default values. * * @private * @param {Object} object The object to query. * @param {Array|string} path The path of the property to get. * @returns {*} Returns the resolved value. */ function baseGet(object, path) { path = castPath(path, object); var index = 0, length = path.length; while (object != null && index < length) { object = object[toKey(path[index++])]; } return (index && index == length) ? object : undefined; } module.exports = baseGet; /***/ }, /* 555 */ /***/ function(module, exports, __webpack_require__) { var isArray = __webpack_require__(521), isKey = __webpack_require__(556), stringToPath = __webpack_require__(558), toString = __webpack_require__(561); /** * Casts `value` to a path array if it's not one. * * @private * @param {*} value The value to inspect. * @param {Object} [object] The object to query keys on. * @returns {Array} Returns the cast property path array. */ function castPath(value, object) { if (isArray(value)) { return value; } return isKey(value, object) ? [value] : stringToPath(toString(value)); } module.exports = castPath; /***/ }, /* 556 */ /***/ function(module, exports, __webpack_require__) { var isArray = __webpack_require__(521), isSymbol = __webpack_require__(557); /** Used to match property names within property paths. */ var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, reIsPlainProp = /^\w*$/; /** * Checks if `value` is a property name and not a property path. * * @private * @param {*} value The value to check. * @param {Object} [object] The object to query keys on. * @returns {boolean} Returns `true` if `value` is a property name, else `false`. */ function isKey(value, object) { if (isArray(value)) { return false; } var type = typeof value; if (type == 'number' || type == 'symbol' || type == 'boolean' || value == null || isSymbol(value)) { return true; } return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || (object != null && value in Object(object)); } module.exports = isKey; /***/ }, /* 557 */ /***/ function(module, exports, __webpack_require__) { var baseGetTag = __webpack_require__(479), isObjectLike = __webpack_require__(530); /** `Object#toString` result references. */ var symbolTag = '[object Symbol]'; /** * Checks if `value` is classified as a `Symbol` primitive or object. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. * @example * * _.isSymbol(Symbol.iterator); * // => true * * _.isSymbol('abc'); * // => false */ function isSymbol(value) { return typeof value == 'symbol' || (isObjectLike(value) && baseGetTag(value) == symbolTag); } module.exports = isSymbol; /***/ }, /* 558 */ /***/ function(module, exports, __webpack_require__) { var memoizeCapped = __webpack_require__(559); /** Used to match property names within property paths. */ var reLeadingDot = /^\./, rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; /** Used to match backslashes in property paths. */ var reEscapeChar = /\\(\\)?/g; /** * Converts `string` to a property path array. * * @private * @param {string} string The string to convert. * @returns {Array} Returns the property path array. */ var stringToPath = memoizeCapped(function(string) { var result = []; if (reLeadingDot.test(string)) { result.push(''); } string.replace(rePropName, function(match, number, quote, string) { result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match)); }); return result; }); module.exports = stringToPath; /***/ }, /* 559 */ /***/ function(module, exports, __webpack_require__) { var memoize = __webpack_require__(560); /** Used as the maximum memoize cache size. */ var MAX_MEMOIZE_SIZE = 500; /** * A specialized version of `_.memoize` which clears the memoized function's * cache when it exceeds `MAX_MEMOIZE_SIZE`. * * @private * @param {Function} func The function to have its output memoized. * @returns {Function} Returns the new memoized function. */ function memoizeCapped(func) { var result = memoize(func, function(key) { if (cache.size === MAX_MEMOIZE_SIZE) { cache.clear(); } return key; }); var cache = result.cache; return result; } module.exports = memoizeCapped; /***/ }, /* 560 */ /***/ function(module, exports, __webpack_require__) { var MapCache = __webpack_require__(490); /** Error message constants. */ var FUNC_ERROR_TEXT = 'Expected a function'; /** * Creates a function that memoizes the result of `func`. If `resolver` is * provided, it determines the cache key for storing the result based on the * arguments provided to the memoized function. By default, the first argument * provided to the memoized function is used as the map cache key. The `func` * is invoked with the `this` binding of the memoized function. * * **Note:** The cache is exposed as the `cache` property on the memoized * function. Its creation may be customized by replacing the `_.memoize.Cache` * constructor with one whose instances implement the * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) * method interface of `clear`, `delete`, `get`, `has`, and `set`. * * @static * @memberOf _ * @since 0.1.0 * @category Function * @param {Function} func The function to have its output memoized. * @param {Function} [resolver] The function to resolve the cache key. * @returns {Function} Returns the new memoized function. * @example * * var object = { 'a': 1, 'b': 2 }; * var other = { 'c': 3, 'd': 4 }; * * var values = _.memoize(_.values); * values(object); * // => [1, 2] * * values(other); * // => [3, 4] * * object.a = 2; * values(object); * // => [1, 2] * * // Modify the result cache. * values.cache.set(object, ['a', 'b']); * values(object); * // => ['a', 'b'] * * // Replace `_.memoize.Cache`. * _.memoize.Cache = WeakMap; */ function memoize(func, resolver) { if (typeof func != 'function' || (resolver != null && typeof resolver != 'function')) { throw new TypeError(FUNC_ERROR_TEXT); } var memoized = function() { var args = arguments, key = resolver ? resolver.apply(this, args) : args[0], cache = memoized.cache; if (cache.has(key)) { return cache.get(key); } var result = func.apply(this, args); memoized.cache = cache.set(key, result) || cache; return result; }; memoized.cache = new (memoize.Cache || MapCache); return memoized; } // Expose `MapCache`. memoize.Cache = MapCache; module.exports = memoize; /***/ }, /* 561 */ /***/ function(module, exports, __webpack_require__) { var baseToString = __webpack_require__(562); /** * Converts `value` to a string. An empty string is returned for `null` * and `undefined` values. The sign of `-0` is preserved. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to convert. * @returns {string} Returns the converted string. * @example * * _.toString(null); * // => '' * * _.toString(-0); * // => '-0' * * _.toString([1, 2, 3]); * // => '1,2,3' */ function toString(value) { return value == null ? '' : baseToString(value); } module.exports = toString; /***/ }, /* 562 */ /***/ function(module, exports, __webpack_require__) { var Symbol = __webpack_require__(480), arrayMap = __webpack_require__(563), isArray = __webpack_require__(521), isSymbol = __webpack_require__(557); /** Used as references for various `Number` constants. */ var INFINITY = 1 / 0; /** Used to convert symbols to primitives and strings. */ var symbolProto = Symbol ? Symbol.prototype : undefined, symbolToString = symbolProto ? symbolProto.toString : undefined; /** * The base implementation of `_.toString` which doesn't convert nullish * values to empty strings. * * @private * @param {*} value The value to process. * @returns {string} Returns the string. */ function baseToString(value) { // Exit early for strings to avoid a performance hit in some environments. if (typeof value == 'string') { return value; } if (isArray(value)) { // Recursively convert values (susceptible to call stack limits). return arrayMap(value, baseToString) + ''; } if (isSymbol(value)) { return symbolToString ? symbolToString.call(value) : ''; } var result = (value + ''); return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; } module.exports = baseToString; /***/ }, /* 563 */ /***/ function(module, exports) { /** * A specialized version of `_.map` for arrays without support for iteratee * shorthands. * * @private * @param {Array} [array] The array to iterate over. * @param {Function} iteratee The function invoked per iteration. * @returns {Array} Returns the new mapped array. */ function arrayMap(array, iteratee) { var index = -1, length = array == null ? 0 : array.length, result = Array(length); while (++index < length) { result[index] = iteratee(array[index], index, array); } return result; } module.exports = arrayMap; /***/ }, /* 564 */ /***/ function(module, exports, __webpack_require__) { var isSymbol = __webpack_require__(557); /** Used as references for various `Number` constants. */ var INFINITY = 1 / 0; /** * Converts `value` to a string key if it's not a string or symbol. * * @private * @param {*} value The value to inspect. * @returns {string|symbol} Returns the key. */ function toKey(value) { if (typeof value == 'string' || isSymbol(value)) { return value; } var result = (value + ''); return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; } module.exports = toKey; /***/ }, /* 565 */ /***/ function(module, exports, __webpack_require__) { var baseHasIn = __webpack_require__(566), hasPath = __webpack_require__(567); /** * Checks if `path` is a direct or inherited property of `object`. * * @static * @memberOf _ * @since 4.0.0 * @category Object * @param {Object} object The object to query. * @param {Array|string} path The path to check. * @returns {boolean} Returns `true` if `path` exists, else `false`. * @example * * var object = _.create({ 'a': _.create({ 'b': 2 }) }); * * _.hasIn(object, 'a'); * // => true * * _.hasIn(object, 'a.b'); * // => true * * _.hasIn(object, ['a', 'b']); * // => true * * _.hasIn(object, 'b'); * // => false */ function hasIn(object, path) { return object != null && hasPath(object, path, baseHasIn); } module.exports = hasIn; /***/ }, /* 566 */ /***/ function(module, exports) { /** * The base implementation of `_.hasIn` without support for deep paths. * * @private * @param {Object} [object] The object to query. * @param {Array|string} key The key to check. * @returns {boolean} Returns `true` if `key` exists, else `false`. */ function baseHasIn(object, key) { return object != null && key in Object(object); } module.exports = baseHasIn; /***/ }, /* 567 */ /***/ function(module, exports, __webpack_require__) { var castPath = __webpack_require__(555), isArguments = __webpack_require__(528), isArray = __webpack_require__(521), isIndex = __webpack_require__(533), isLength = __webpack_require__(536), toKey = __webpack_require__(564); /** * Checks if `path` exists on `object`. * * @private * @param {Object} object The object to query. * @param {Array|string} path The path to check. * @param {Function} hasFunc The function to check properties. * @returns {boolean} Returns `true` if `path` exists, else `false`. */ function hasPath(object, path, hasFunc) { path = castPath(path, object); var index = -1, length = path.length, result = false; while (++index < length) { var key = toKey(path[index]); if (!(result = object != null && hasFunc(object, key))) { break; } object = object[key]; } if (result || ++index != length) { return result; } length = object == null ? 0 : object.length; return !!length && isLength(length) && isIndex(key, length) && (isArray(object) || isArguments(object)); } module.exports = hasPath; /***/ }, /* 568 */ /***/ function(module, exports) { /** * This method returns the first argument it receives. * * @static * @since 0.1.0 * @memberOf _ * @category Util * @param {*} value Any value. * @returns {*} Returns `value`. * @example * * var object = { 'a': 1 }; * * console.log(_.identity(object) === object); * // => true */ function identity(value) { return value; } module.exports = identity; /***/ }, /* 569 */ /***/ function(module, exports, __webpack_require__) { var baseProperty = __webpack_require__(570), basePropertyDeep = __webpack_require__(571), isKey = __webpack_require__(556), toKey = __webpack_require__(564); /** * Creates a function that returns the value at `path` of a given object. * * @static * @memberOf _ * @since 2.4.0 * @category Util * @param {Array|string} path The path of the property to get. * @returns {Function} Returns the new accessor function. * @example * * var objects = [ * { 'a': { 'b': 2 } }, * { 'a': { 'b': 1 } } * ]; * * _.map(objects, _.property('a.b')); * // => [2, 1] * * _.map(_.sortBy(objects, _.property(['a', 'b'])), 'a.b'); * // => [1, 2] */ function property(path) { return isKey(path) ? baseProperty(toKey(path)) : basePropertyDeep(path); } module.exports = property; /***/ }, /* 570 */ /***/ function(module, exports) { /** * The base implementation of `_.property` without support for deep paths. * * @private * @param {string} key The key of the property to get. * @returns {Function} Returns the new accessor function. */ function baseProperty(key) { return function(object) { return object == null ? undefined : object[key]; }; } module.exports = baseProperty; /***/ }, /* 571 */ /***/ function(module, exports, __webpack_require__) { var baseGet = __webpack_require__(554); /** * A specialized version of `baseProperty` which supports deep paths. * * @private * @param {Array|string} path The path of the property to get. * @returns {Function} Returns the new accessor function. */ function basePropertyDeep(path) { return function(object) { return baseGet(object, path); }; } module.exports = basePropertyDeep; /***/ }, /* 572 */ /***/ function(module, exports, __webpack_require__) { var baseFindIndex = __webpack_require__(573), baseIteratee = __webpack_require__(458), toInteger = __webpack_require__(574); /* Built-in method references for those with the same name as other `lodash` methods. */ var nativeMax = Math.max; /** * This method is like `_.find` except that it returns the index of the first * element `predicate` returns truthy for instead of the element itself. * * @static * @memberOf _ * @since 1.1.0 * @category Array * @param {Array} array The array to inspect. * @param {Function} [predicate=_.identity] The function invoked per iteration. * @param {number} [fromIndex=0] The index to search from. * @returns {number} Returns the index of the found element, else `-1`. * @example * * var users = [ * { 'user': 'barney', 'active': false }, * { 'user': 'fred', 'active': false }, * { 'user': 'pebbles', 'active': true } * ]; * * _.findIndex(users, function(o) { return o.user == 'barney'; }); * // => 0 * * // The `_.matches` iteratee shorthand. * _.findIndex(users, { 'user': 'fred', 'active': false }); * // => 1 * * // The `_.matchesProperty` iteratee shorthand. * _.findIndex(users, ['active', false]); * // => 0 * * // The `_.property` iteratee shorthand. * _.findIndex(users, 'active'); * // => 2 */ function findIndex(array, predicate, fromIndex) { var length = array == null ? 0 : array.length; if (!length) { return -1; } var index = fromIndex == null ? 0 : toInteger(fromIndex); if (index < 0) { index = nativeMax(length + index, 0); } return baseFindIndex(array, baseIteratee(predicate, 3), index); } module.exports = findIndex; /***/ }, /* 573 */ /***/ function(module, exports) { /** * The base implementation of `_.findIndex` and `_.findLastIndex` without * support for iteratee shorthands. * * @private * @param {Array} array The array to inspect. * @param {Function} predicate The function invoked per iteration. * @param {number} fromIndex The index to search from. * @param {boolean} [fromRight] Specify iterating from right to left. * @returns {number} Returns the index of the matched value, else `-1`. */ function baseFindIndex(array, predicate, fromIndex, fromRight) { var length = array.length, index = fromIndex + (fromRight ? 1 : -1); while ((fromRight ? index-- : ++index < length)) { if (predicate(array[index], index, array)) { return index; } } return -1; } module.exports = baseFindIndex; /***/ }, /* 574 */ /***/ function(module, exports, __webpack_require__) { var toFinite = __webpack_require__(575); /** * Converts `value` to an integer. * * **Note:** This method is loosely based on * [`ToInteger`](http://www.ecma-international.org/ecma-262/7.0/#sec-tointeger). * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to convert. * @returns {number} Returns the converted integer. * @example * * _.toInteger(3.2); * // => 3 * * _.toInteger(Number.MIN_VALUE); * // => 0 * * _.toInteger(Infinity); * // => 1.7976931348623157e+308 * * _.toInteger('3.2'); * // => 3 */ function toInteger(value) { var result = toFinite(value), remainder = result % 1; return result === result ? (remainder ? result - remainder : result) : 0; } module.exports = toInteger; /***/ }, /* 575 */ /***/ function(module, exports, __webpack_require__) { var toNumber = __webpack_require__(576); /** Used as references for various `Number` constants. */ var INFINITY = 1 / 0, MAX_INTEGER = 1.7976931348623157e+308; /** * Converts `value` to a finite number. * * @static * @memberOf _ * @since 4.12.0 * @category Lang * @param {*} value The value to convert. * @returns {number} Returns the converted number. * @example * * _.toFinite(3.2); * // => 3.2 * * _.toFinite(Number.MIN_VALUE); * // => 5e-324 * * _.toFinite(Infinity); * // => 1.7976931348623157e+308 * * _.toFinite('3.2'); * // => 3.2 */ function toFinite(value) { if (!value) { return value === 0 ? value : 0; } value = toNumber(value); if (value === INFINITY || value === -INFINITY) { var sign = (value < 0 ? -1 : 1); return sign * MAX_INTEGER; } return value === value ? value : 0; } module.exports = toFinite; /***/ }, /* 576 */ /***/ function(module, exports, __webpack_require__) { var isObject = __webpack_require__(485), isSymbol = __webpack_require__(557); /** Used as references for various `Number` constants. */ var NAN = 0 / 0; /** Used to match leading and trailing whitespace. */ var reTrim = /^\s+|\s+$/g; /** Used to detect bad signed hexadecimal string values. */ var reIsBadHex = /^[-+]0x[0-9a-f]+$/i; /** Used to detect binary string values. */ var reIsBinary = /^0b[01]+$/i; /** Used to detect octal string values. */ var reIsOctal = /^0o[0-7]+$/i; /** Built-in method references without a dependency on `root`. */ var freeParseInt = parseInt; /** * Converts `value` to a number. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to process. * @returns {number} Returns the number. * @example * * _.toNumber(3.2); * // => 3.2 * * _.toNumber(Number.MIN_VALUE); * // => 5e-324 * * _.toNumber(Infinity); * // => Infinity * * _.toNumber('3.2'); * // => 3.2 */ function toNumber(value) { if (typeof value == 'number') { return value; } if (isSymbol(value)) { return NAN; } if (isObject(value)) { var other = typeof value.valueOf == 'function' ? value.valueOf() : value; value = isObject(other) ? (other + '') : other; } if (typeof value != 'string') { return value === 0 ? value : +value; } value = value.replace(reTrim, ''); var isBinary = reIsBinary.test(value); return (isBinary || reIsOctal.test(value)) ? freeParseInt(value.slice(2), isBinary ? 2 : 8) : (reIsBadHex.test(value) ? NAN : +value); } module.exports = toNumber; /***/ }, /* 577 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _year_dropdown_options = __webpack_require__(578); var _year_dropdown_options2 = _interopRequireDefault(_year_dropdown_options); var _reactOnclickoutside = __webpack_require__(580); var _reactOnclickoutside2 = _interopRequireDefault(_reactOnclickoutside); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var WrappedYearDropdownOptions = (0, _reactOnclickoutside2.default)(_year_dropdown_options2.default); var YearDropdown = _react2.default.createClass({ displayName: 'YearDropdown', propTypes: { dropdownMode: _react2.default.PropTypes.oneOf(['scroll', 'select']).isRequired, maxDate: _react2.default.PropTypes.object, minDate: _react2.default.PropTypes.object, onChange: _react2.default.PropTypes.func.isRequired, scrollableYearDropdown: _react2.default.PropTypes.bool, year: _react2.default.PropTypes.number.isRequired }, getInitialState: function getInitialState() { return { dropdownVisible: false }; }, renderSelectOptions: function renderSelectOptions() { var minYear = this.props.minDate ? this.props.minDate.year() : 1900; var maxYear = this.props.maxDate ? this.props.maxDate.year() : 2100; var options = []; for (var i = minYear; i <= maxYear; i++) { options.push(_react2.default.createElement( 'option', { key: i, value: i }, i )); } return options; }, onSelectChange: function onSelectChange(e) { this.onChange(e.target.value); }, renderSelectMode: function renderSelectMode() { return _react2.default.createElement( 'select', { value: this.props.year, className: 'react-datepicker__year-select', onChange: this.onSelectChange }, this.renderSelectOptions() ); }, renderReadView: function renderReadView(visible) { return _react2.default.createElement( 'div', { key: 'read', style: { visibility: visible ? 'visible' : 'hidden' }, className: 'react-datepicker__year-read-view', onClick: this.toggleDropdown }, _react2.default.createElement('span', { className: 'react-datepicker__year-read-view--down-arrow' }), _react2.default.createElement( 'span', { className: 'react-datepicker__year-read-view--selected-year' }, this.props.year ) ); }, renderDropdown: function renderDropdown() { return _react2.default.createElement(WrappedYearDropdownOptions, { key: 'dropdown', ref: 'options', year: this.props.year, onChange: this.onChange, onCancel: this.toggleDropdown, scrollableYearDropdown: this.props.scrollableYearDropdown }); }, renderScrollMode: function renderScrollMode() { var dropdownVisible = this.state.dropdownVisible; var result = [this.renderReadView(!dropdownVisible)]; if (dropdownVisible) { result.unshift(this.renderDropdown()); } return result; }, onChange: function onChange(year) { this.toggleDropdown(); if (year === this.props.year) return; this.props.onChange(year); }, toggleDropdown: function toggleDropdown() { this.setState({ dropdownVisible: !this.state.dropdownVisible }); }, render: function render() { var renderedDropdown = void 0; switch (this.props.dropdownMode) { case 'scroll': renderedDropdown = this.renderScrollMode(); break; case 'select': renderedDropdown = this.renderSelectMode(); break; } return _react2.default.createElement( 'div', { className: 'react-datepicker__year-dropdown-container react-datepicker__year-dropdown-container--' + this.props.dropdownMode }, renderedDropdown ); } }); module.exports = YearDropdown; /***/ }, /* 578 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _classnames = __webpack_require__(579); var _classnames2 = _interopRequireDefault(_classnames); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function generateYears(year, noOfYear) { var list = []; for (var i = 0; i < 2 * noOfYear; i++) { list.push(year + noOfYear - i); } return list; } var YearDropdownOptions = _react2.default.createClass({ displayName: 'YearDropdownOptions', propTypes: { onCancel: _react2.default.PropTypes.func.isRequired, onChange: _react2.default.PropTypes.func.isRequired, scrollableYearDropdown: _react2.default.PropTypes.bool, year: _react2.default.PropTypes.number.isRequired }, getInitialState: function getInitialState() { return { yearsList: this.props.scrollableYearDropdown ? generateYears(this.props.year, 10) : generateYears(this.props.year, 5) }; }, renderOptions: function renderOptions() { var _this = this; var selectedYear = this.props.year; var options = this.state.yearsList.map(function (year) { return _react2.default.createElement( 'div', { className: 'react-datepicker__year-option', key: year, ref: year, onClick: _this.onChange.bind(_this, year) }, selectedYear === year ? _react2.default.createElement( 'span', { className: 'react-datepicker__year-option--selected' }, '\u2713' ) : '', year ); }); options.unshift(_react2.default.createElement( 'div', { className: 'react-datepicker__year-option', ref: 'upcoming', key: 'upcoming', onClick: this.incrementYears }, _react2.default.createElement('a', { className: 'react-datepicker__navigation react-datepicker__navigation--years react-datepicker__navigation--years-upcoming' }) )); options.push(_react2.default.createElement( 'div', { className: 'react-datepicker__year-option', ref: 'previous', key: 'previous', onClick: this.decrementYears }, _react2.default.createElement('a', { className: 'react-datepicker__navigation react-datepicker__navigation--years react-datepicker__navigation--years-previous' }) )); return options; }, onChange: function onChange(year) { this.props.onChange(year); }, handleClickOutside: function handleClickOutside() { this.props.onCancel(); }, shiftYears: function shiftYears(amount) { var years = this.state.yearsList.map(function (year) { return year + amount; }); this.setState({ yearsList: years }); }, incrementYears: function incrementYears() { return this.shiftYears(1); }, decrementYears: function decrementYears() { return this.shiftYears(-1); }, render: function render() { var dropdownClass = (0, _classnames2.default)({ 'react-datepicker__year-dropdown': true, 'react-datepicker__year-dropdown--scrollable': this.props.scrollableYearDropdown }); return _react2.default.createElement( 'div', { className: dropdownClass }, this.renderOptions() ); } }); module.exports = YearDropdownOptions; /***/ }, /* 579 */ /***/ function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;/*! Copyright (c) 2016 Jed Watson. Licensed under the MIT License (MIT), see http://jedwatson.github.io/classnames */ /* global define */ (function () { 'use strict'; var hasOwn = {}.hasOwnProperty; function classNames () { var classes = []; for (var i = 0; i < arguments.length; i++) { var arg = arguments[i]; if (!arg) continue; var argType = typeof arg; if (argType === 'string' || argType === 'number') { classes.push(arg); } else if (Array.isArray(arg)) { classes.push(classNames.apply(null, arg)); } else if (argType === 'object') { for (var key in arg) { if (hasOwn.call(arg, key) && arg[key]) { classes.push(key); } } } } return classes.join(' '); } if (typeof module !== 'undefined' && module.exports) { module.exports = classNames; } else if (true) { // register as 'classnames', consistent with npm package name !(__WEBPACK_AMD_DEFINE_ARRAY__ = [], __WEBPACK_AMD_DEFINE_RESULT__ = function () { return classNames; }.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); } else { window.classNames = classNames; } }()); /***/ }, /* 580 */ /***/ function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;/** * A higher-order-component for handling onClickOutside for React components. */ (function(root) { // administrative var registeredComponents = []; var handlers = []; var IGNORE_CLASS = 'ignore-react-onclickoutside'; var DEFAULT_EVENTS = ['mousedown', 'touchstart']; /** * Check whether some DOM node is our Component's node. */ var isNodeFound = function(current, componentNode, ignoreClass) { if (current === componentNode) { return true; } // SVG <use/> elements do not technically reside in the rendered DOM, so // they do not have classList directly, but they offer a link to their // corresponding element, which can have classList. This extra check is for // that case. // See: http://www.w3.org/TR/SVG11/struct.html#InterfaceSVGUseElement // Discussion: https://github.com/Pomax/react-onclickoutside/pull/17 if (current.correspondingElement) { return current.correspondingElement.classList.contains(ignoreClass); } return current.classList.contains(ignoreClass); }; /** * Try to find our node in a hierarchy of nodes, returning the document * node as highest noode if our node is not found in the path up. */ var findHighest = function(current, componentNode, ignoreClass) { if (current === componentNode) { return true; } // If source=local then this event came from 'somewhere' // inside and should be ignored. We could handle this with // a layered approach, too, but that requires going back to // thinking in terms of Dom node nesting, running counter // to React's 'you shouldn't care about the DOM' philosophy. while(current.parentNode) { if (isNodeFound(current, componentNode, ignoreClass)) { return true; } current = current.parentNode; } return current; }; /** * Check if the browser scrollbar was clicked */ var clickedScrollbar = function(evt) { return document.documentElement.clientWidth <= evt.clientX; }; /** * Generate the event handler that checks whether a clicked DOM node * is inside of, or lives outside of, our Component's node tree. */ var generateOutsideCheck = function(componentNode, componentInstance, eventHandler, ignoreClass, excludeScrollbar, preventDefault, stopPropagation) { return function(evt) { if (preventDefault) { evt.preventDefault(); } if (stopPropagation) { evt.stopPropagation(); } var current = evt.target; if((excludeScrollbar && clickedScrollbar(evt)) || (findHighest(current, componentNode, ignoreClass) !== document)) { return; } eventHandler(evt); }; }; /** * This function generates the HOC function that you'll use * in order to impart onOutsideClick listening to an * arbitrary component. It gets called at the end of the * bootstrapping code to yield an instance of the * onClickOutsideHOC function defined inside setupHOC(). */ function setupHOC(root, React, ReactDOM) { // The actual Component-wrapping HOC: return function onClickOutsideHOC(Component, config) { var wrapComponentWithOnClickOutsideHandling = React.createClass({ statics: { /** * Access the wrapped Component's class. */ getClass: function() { if (Component.getClass) { return Component.getClass(); } return Component; } }, /** * Access the wrapped Component's instance. */ getInstance: function() { return Component.prototype.isReactComponent ? this.refs.instance : this; }, // this is given meaning in componentDidMount __outsideClickHandler: function() {}, /** * Add click listeners to the current document, * linked to this component's state. */ componentDidMount: function() { // If we are in an environment without a DOM such // as shallow rendering or snapshots then we exit // early to prevent any unhandled errors being thrown. if (typeof document === 'undefined' || !document.createElement){ return; } var instance = this.getInstance(); var clickOutsideHandler; if(config && typeof config.handleClickOutside === 'function') { clickOutsideHandler = config.handleClickOutside(instance); if(typeof clickOutsideHandler !== 'function') { throw new Error('Component lacks a function for processing outside click events specified by the handleClickOutside config option.'); } } else if(typeof instance.handleClickOutside === 'function') { if (React.Component.prototype.isPrototypeOf(instance)) { clickOutsideHandler = instance.handleClickOutside.bind(instance); } else { clickOutsideHandler = instance.handleClickOutside; } } else if(typeof instance.props.handleClickOutside === 'function') { clickOutsideHandler = instance.props.handleClickOutside; } else { throw new Error('Component lacks a handleClickOutside(event) function for processing outside click events.'); } var componentNode = ReactDOM.findDOMNode(instance); if (componentNode === null) { console.warn('Antipattern warning: there was no DOM node associated with the component that is being wrapped by outsideClick.'); console.warn([ 'This is typically caused by having a component that starts life with a render function that', 'returns `null` (due to a state or props value), so that the component \'exist\' in the React', 'chain of components, but not in the DOM.\n\nInstead, you need to refactor your code so that the', 'decision of whether or not to show your component is handled by the parent, in their render()', 'function.\n\nIn code, rather than:\n\n A{render(){return check? <.../> : null;}\n B{render(){<A check=... />}\n\nmake sure that you', 'use:\n\n A{render(){return <.../>}\n B{render(){return <...>{ check ? <A/> : null }<...>}}\n\nThat is:', 'the parent is always responsible for deciding whether or not to render any of its children.', 'It is not the child\'s responsibility to decide whether a render instruction from above should', 'get ignored or not by returning `null`.\n\nWhen any component gets its render() function called,', 'that is the signal that it should be rendering its part of the UI. It may in turn decide not to', 'render all of *its* children, but it should never return `null` for itself. It is not responsible', 'for that decision.' ].join(' ')); } var fn = this.__outsideClickHandler = generateOutsideCheck( componentNode, instance, clickOutsideHandler, this.props.outsideClickIgnoreClass || IGNORE_CLASS, this.props.excludeScrollbar || false, this.props.preventDefault || false, this.props.stopPropagation || false ); var pos = registeredComponents.length; registeredComponents.push(this); handlers[pos] = fn; // If there is a truthy disableOnClickOutside property for this // component, don't immediately start listening for outside events. if (!this.props.disableOnClickOutside) { this.enableOnClickOutside(); } }, /** * Track for disableOnClickOutside props changes and enable/disable click outside */ componentWillReceiveProps: function(nextProps) { if (this.props.disableOnClickOutside && !nextProps.disableOnClickOutside) { this.enableOnClickOutside(); } else if (!this.props.disableOnClickOutside && nextProps.disableOnClickOutside) { this.disableOnClickOutside(); } }, /** * Remove the document's event listeners */ componentWillUnmount: function() { this.disableOnClickOutside(); this.__outsideClickHandler = false; var pos = registeredComponents.indexOf(this); if( pos>-1) { // clean up so we don't leak memory if (handlers[pos]) { handlers.splice(pos, 1); } registeredComponents.splice(pos, 1); } }, /** * Can be called to explicitly enable event listening * for clicks and touches outside of this element. */ enableOnClickOutside: function() { var fn = this.__outsideClickHandler; if (typeof document !== 'undefined') { var events = this.props.eventTypes || DEFAULT_EVENTS; if (!events.forEach) { events = [events]; } events.forEach(function (eventName) { document.addEventListener(eventName, fn); }); } }, /** * Can be called to explicitly disable event listening * for clicks and touches outside of this element. */ disableOnClickOutside: function() { var fn = this.__outsideClickHandler; if (typeof document !== 'undefined') { var events = this.props.eventTypes || DEFAULT_EVENTS; if (!events.forEach) { events = [events]; } events.forEach(function (eventName) { document.removeEventListener(eventName, fn); }); } }, /** * Pass-through render */ render: function() { var passedProps = this.props; var props = {}; Object.keys(this.props).forEach(function(key) { if (key !== 'excludeScrollbar') { props[key] = passedProps[key]; } }); if (Component.prototype.isReactComponent) { props.ref = 'instance'; } props.disableOnClickOutside = this.disableOnClickOutside; props.enableOnClickOutside = this.enableOnClickOutside; return React.createElement(Component, props); } }); // Add display name for React devtools (function bindWrappedComponentName(c, wrapper) { var componentName = c.displayName || c.name || 'Component'; wrapper.displayName = 'OnClickOutside(' + componentName + ')'; }(Component, wrapComponentWithOnClickOutsideHandling)); return wrapComponentWithOnClickOutsideHandling; }; } /** * This function sets up the library in ways that * work with the various modulde loading solutions * used in JavaScript land today. */ function setupBinding(root, factory) { if (true) { // AMD. Register as an anonymous module. !(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(2),__webpack_require__(29)], __WEBPACK_AMD_DEFINE_RESULT__ = function(React, ReactDom) { return factory(root, React, ReactDom); }.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); } else if (typeof exports === 'object') { // Node. Note that this does not work with strict // CommonJS, but only CommonJS-like environments // that support module.exports module.exports = factory(root, require('react'), require('react-dom')); } else { // Browser globals (root is window) root.onClickOutside = factory(root, React, ReactDOM); } } // Make it all happen setupBinding(root, setupHOC); }(this)); /***/ }, /* 581 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _month_dropdown_options = __webpack_require__(582); var _month_dropdown_options2 = _interopRequireDefault(_month_dropdown_options); var _reactOnclickoutside = __webpack_require__(580); var _reactOnclickoutside2 = _interopRequireDefault(_reactOnclickoutside); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); var _range = __webpack_require__(583); var _range2 = _interopRequireDefault(_range); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var WrappedMonthDropdownOptions = (0, _reactOnclickoutside2.default)(_month_dropdown_options2.default); var MonthDropdown = _react2.default.createClass({ displayName: 'MonthDropdown', propTypes: { dropdownMode: _react2.default.PropTypes.oneOf(['scroll', 'select']).isRequired, locale: _react2.default.PropTypes.string, month: _react2.default.PropTypes.number.isRequired, onChange: _react2.default.PropTypes.func.isRequired }, getInitialState: function getInitialState() { return { dropdownVisible: false }; }, renderSelectOptions: function renderSelectOptions(monthNames) { return monthNames.map(function (M, i) { return _react2.default.createElement( 'option', { key: i, value: i }, M ); }); }, renderSelectMode: function renderSelectMode(monthNames) { var _this = this; return _react2.default.createElement( 'select', { value: this.props.month, className: 'react-datepicker__month-select', onChange: function onChange(e) { return _this.onChange(e.target.value); } }, this.renderSelectOptions(monthNames) ); }, renderReadView: function renderReadView(visible, monthNames) { return _react2.default.createElement( 'div', { key: 'read', style: { visibility: visible ? 'visible' : 'hidden' }, className: 'react-datepicker__month-read-view', onClick: this.toggleDropdown }, _react2.default.createElement( 'span', { className: 'react-datepicker__month-read-view--selected-month' }, monthNames[this.props.month] ), _react2.default.createElement('span', { className: 'react-datepicker__month-read-view--down-arrow' }) ); }, renderDropdown: function renderDropdown(monthNames) { return _react2.default.createElement(WrappedMonthDropdownOptions, { key: 'dropdown', ref: 'options', month: this.props.month, monthNames: monthNames, onChange: this.onChange, onCancel: this.toggleDropdown }); }, renderScrollMode: function renderScrollMode(monthNames) { var dropdownVisible = this.state.dropdownVisible; var result = [this.renderReadView(!dropdownVisible, monthNames)]; if (dropdownVisible) { result.unshift(this.renderDropdown(monthNames)); } return result; }, onChange: function onChange(month) { this.toggleDropdown(); if (month !== this.props.month) { this.props.onChange(month); } }, toggleDropdown: function toggleDropdown() { this.setState({ dropdownVisible: !this.state.dropdownVisible }); }, render: function render() { var localeData = _moment2.default.localeData(this.props.locale); var monthNames = (0, _range2.default)(0, 12).map(function (M) { return localeData.months((0, _moment2.default)({ M: M })); }); var renderedDropdown = void 0; switch (this.props.dropdownMode) { case 'scroll': renderedDropdown = this.renderScrollMode(monthNames); break; case 'select': renderedDropdown = this.renderSelectMode(monthNames); break; } return _react2.default.createElement( 'div', { className: 'react-datepicker__month-dropdown-container react-datepicker__month-dropdown-container--' + this.props.dropdownMode }, renderedDropdown ); } }); module.exports = MonthDropdown; /***/ }, /* 582 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var MonthDropdownOptions = _react2.default.createClass({ displayName: 'MonthDropdownOptions', propTypes: { onCancel: _react2.default.PropTypes.func.isRequired, onChange: _react2.default.PropTypes.func.isRequired, month: _react2.default.PropTypes.number.isRequired, monthNames: _react2.default.PropTypes.arrayOf(_react2.default.PropTypes.string.isRequired).isRequired }, renderOptions: function renderOptions() { var _this = this; var selectedMonth = this.props.month; var options = this.props.monthNames.map(function (month, i) { return _react2.default.createElement( 'div', { className: 'react-datepicker__month-option', key: month, ref: month, onClick: _this.onChange.bind(_this, i) }, selectedMonth === i ? _react2.default.createElement( 'span', { className: 'react-datepicker__month-option--selected' }, '\u2713' ) : '', month ); }); return options; }, onChange: function onChange(month) { this.props.onChange(month); }, handleClickOutside: function handleClickOutside() { this.props.onCancel(); }, render: function render() { return _react2.default.createElement( 'div', { className: 'react-datepicker__month-dropdown' }, this.renderOptions() ); } }); module.exports = MonthDropdownOptions; /***/ }, /* 583 */ /***/ function(module, exports, __webpack_require__) { var createRange = __webpack_require__(584); /** * Creates an array of numbers (positive and/or negative) progressing from * `start` up to, but not including, `end`. A step of `-1` is used if a negative * `start` is specified without an `end` or `step`. If `end` is not specified, * it's set to `start` with `start` then set to `0`. * * **Note:** JavaScript follows the IEEE-754 standard for resolving * floating-point values which can produce unexpected results. * * @static * @since 0.1.0 * @memberOf _ * @category Util * @param {number} [start=0] The start of the range. * @param {number} end The end of the range. * @param {number} [step=1] The value to increment or decrement by. * @returns {Array} Returns the range of numbers. * @see _.inRange, _.rangeRight * @example * * _.range(4); * // => [0, 1, 2, 3] * * _.range(-4); * // => [0, -1, -2, -3] * * _.range(1, 5); * // => [1, 2, 3, 4] * * _.range(0, 20, 5); * // => [0, 5, 10, 15] * * _.range(0, -4, -1); * // => [0, -1, -2, -3] * * _.range(1, 4, 0); * // => [1, 1, 1] * * _.range(0); * // => [] */ var range = createRange(); module.exports = range; /***/ }, /* 584 */ /***/ function(module, exports, __webpack_require__) { var baseRange = __webpack_require__(585), isIterateeCall = __webpack_require__(586), toFinite = __webpack_require__(575); /** * Creates a `_.range` or `_.rangeRight` function. * * @private * @param {boolean} [fromRight] Specify iterating from right to left. * @returns {Function} Returns the new range function. */ function createRange(fromRight) { return function(start, end, step) { if (step && typeof step != 'number' && isIterateeCall(start, end, step)) { end = step = undefined; } // Ensure the sign of `-0` is preserved. start = toFinite(start); if (end === undefined) { end = start; start = 0; } else { end = toFinite(end); } step = step === undefined ? (start < end ? 1 : -1) : toFinite(step); return baseRange(start, end, step, fromRight); }; } module.exports = createRange; /***/ }, /* 585 */ /***/ function(module, exports) { /* Built-in method references for those with the same name as other `lodash` methods. */ var nativeCeil = Math.ceil, nativeMax = Math.max; /** * The base implementation of `_.range` and `_.rangeRight` which doesn't * coerce arguments. * * @private * @param {number} start The start of the range. * @param {number} end The end of the range. * @param {number} step The value to increment or decrement by. * @param {boolean} [fromRight] Specify iterating from right to left. * @returns {Array} Returns the range of numbers. */ function baseRange(start, end, step, fromRight) { var index = -1, length = nativeMax(nativeCeil((end - start) / (step || 1)), 0), result = Array(length); while (length--) { result[fromRight ? length : ++index] = start; start += step; } return result; } module.exports = baseRange; /***/ }, /* 586 */ /***/ function(module, exports, __webpack_require__) { var eq = __webpack_require__(466), isArrayLike = __webpack_require__(543), isIndex = __webpack_require__(533), isObject = __webpack_require__(485); /** * Checks if the given arguments are from an iteratee call. * * @private * @param {*} value The potential iteratee value argument. * @param {*} index The potential iteratee index or key argument. * @param {*} object The potential iteratee object argument. * @returns {boolean} Returns `true` if the arguments are from an iteratee call, * else `false`. */ function isIterateeCall(value, index, object) { if (!isObject(object)) { return false; } var type = typeof index; if (type == 'number' ? (isArrayLike(object) && isIndex(index, object.length)) : (type == 'string' && index in object) ) { return eq(object[index], value); } return false; } module.exports = isIterateeCall; /***/ }, /* 587 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _classnames = __webpack_require__(579); var _classnames2 = _interopRequireDefault(_classnames); var _week = __webpack_require__(588); var _week2 = _interopRequireDefault(_week); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var FIXED_HEIGHT_STANDARD_WEEK_COUNT = 6; var Month = _react2.default.createClass({ displayName: 'Month', propTypes: { day: _react2.default.PropTypes.object.isRequired, endDate: _react2.default.PropTypes.object, excludeDates: _react2.default.PropTypes.array, filterDate: _react2.default.PropTypes.func, fixedHeight: _react2.default.PropTypes.bool, highlightDates: _react2.default.PropTypes.array, includeDates: _react2.default.PropTypes.array, maxDate: _react2.default.PropTypes.object, minDate: _react2.default.PropTypes.object, onDayClick: _react2.default.PropTypes.func, onDayMouseEnter: _react2.default.PropTypes.func, onMouseLeave: _react2.default.PropTypes.func, peekNextMonth: _react2.default.PropTypes.bool, preSelection: _react2.default.PropTypes.object, selected: _react2.default.PropTypes.object, selectingDate: _react2.default.PropTypes.object, selectsEnd: _react2.default.PropTypes.bool, selectsStart: _react2.default.PropTypes.bool, showWeekNumbers: _react2.default.PropTypes.bool, startDate: _react2.default.PropTypes.object, utcOffset: _react2.default.PropTypes.number }, handleDayClick: function handleDayClick(day, event) { if (this.props.onDayClick) { this.props.onDayClick(day, event); } }, handleDayMouseEnter: function handleDayMouseEnter(day) { if (this.props.onDayMouseEnter) { this.props.onDayMouseEnter(day); } }, handleMouseLeave: function handleMouseLeave() { if (this.props.onMouseLeave) { this.props.onMouseLeave(); } }, isWeekInMonth: function isWeekInMonth(startOfWeek) { var day = this.props.day; var endOfWeek = startOfWeek.clone().add(6, 'days'); return startOfWeek.isSame(day, 'month') || endOfWeek.isSame(day, 'month'); }, renderWeeks: function renderWeeks() { var weeks = []; var isFixedHeight = this.props.fixedHeight; var currentWeekStart = this.props.day.clone().startOf('month').startOf('week'); var i = 0; var breakAfterNextPush = false; while (true) { weeks.push(_react2.default.createElement(_week2.default, { key: i, day: currentWeekStart, month: this.props.day.month(), onDayClick: this.handleDayClick, onDayMouseEnter: this.handleDayMouseEnter, minDate: this.props.minDate, maxDate: this.props.maxDate, excludeDates: this.props.excludeDates, includeDates: this.props.includeDates, highlightDates: this.props.highlightDates, selectingDate: this.props.selectingDate, filterDate: this.props.filterDate, preSelection: this.props.preSelection, selected: this.props.selected, selectsStart: this.props.selectsStart, selectsEnd: this.props.selectsEnd, showWeekNumber: this.props.showWeekNumbers, startDate: this.props.startDate, endDate: this.props.endDate, utcOffset: this.props.utcOffset })); if (breakAfterNextPush) break; i++; currentWeekStart = currentWeekStart.clone().add(1, 'weeks'); // If one of these conditions is true, we will either break on this week // or break on the next week var isFixedAndFinalWeek = isFixedHeight && i >= FIXED_HEIGHT_STANDARD_WEEK_COUNT; var isNonFixedAndOutOfMonth = !isFixedHeight && !this.isWeekInMonth(currentWeekStart); if (isFixedAndFinalWeek || isNonFixedAndOutOfMonth) { if (this.props.peekNextMonth) { breakAfterNextPush = true; } else { break; } } } return weeks; }, getClassNames: function getClassNames() { var _props = this.props, selectingDate = _props.selectingDate, selectsStart = _props.selectsStart, selectsEnd = _props.selectsEnd; return (0, _classnames2.default)('react-datepicker__month', { 'react-datepicker__month--selecting-range': selectingDate && (selectsStart || selectsEnd) }); }, render: function render() { return _react2.default.createElement( 'div', { className: this.getClassNames(), onMouseLeave: this.handleMouseLeave, role: 'listbox' }, this.renderWeeks() ); } }); module.exports = Month; /***/ }, /* 588 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _day = __webpack_require__(589); var _day2 = _interopRequireDefault(_day); var _week_number = __webpack_require__(590); var _week_number2 = _interopRequireDefault(_week_number); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var Week = _react2.default.createClass({ displayName: 'Week', propTypes: { day: _react2.default.PropTypes.object.isRequired, endDate: _react2.default.PropTypes.object, excludeDates: _react2.default.PropTypes.array, filterDate: _react2.default.PropTypes.func, highlightDates: _react2.default.PropTypes.array, includeDates: _react2.default.PropTypes.array, maxDate: _react2.default.PropTypes.object, minDate: _react2.default.PropTypes.object, month: _react2.default.PropTypes.number, onDayClick: _react2.default.PropTypes.func, onDayMouseEnter: _react2.default.PropTypes.func, preSelection: _react2.default.PropTypes.object, selected: _react2.default.PropTypes.object, selectingDate: _react2.default.PropTypes.object, selectsEnd: _react2.default.PropTypes.bool, selectsStart: _react2.default.PropTypes.bool, showWeekNumber: _react2.default.PropTypes.bool, startDate: _react2.default.PropTypes.object, utcOffset: _react2.default.PropTypes.number }, handleDayClick: function handleDayClick(day, event) { if (this.props.onDayClick) { this.props.onDayClick(day, event); } }, handleDayMouseEnter: function handleDayMouseEnter(day) { if (this.props.onDayMouseEnter) { this.props.onDayMouseEnter(day); } }, renderDays: function renderDays() { var _this = this; var startOfWeek = this.props.day.clone().startOf('week'); var days = []; if (this.props.showWeekNumber) { days.push(_react2.default.createElement(_week_number2.default, { key: 'W', weekNumber: parseInt(startOfWeek.format('w'), 10) })); } return days.concat([0, 1, 2, 3, 4, 5, 6].map(function (offset) { var day = startOfWeek.clone().add(offset, 'days'); return _react2.default.createElement(_day2.default, { key: offset, day: day, month: _this.props.month, onClick: _this.handleDayClick.bind(_this, day), onMouseEnter: _this.handleDayMouseEnter.bind(_this, day), minDate: _this.props.minDate, maxDate: _this.props.maxDate, excludeDates: _this.props.excludeDates, includeDates: _this.props.includeDates, highlightDates: _this.props.highlightDates, selectingDate: _this.props.selectingDate, filterDate: _this.props.filterDate, preSelection: _this.props.preSelection, selected: _this.props.selected, selectsStart: _this.props.selectsStart, selectsEnd: _this.props.selectsEnd, startDate: _this.props.startDate, endDate: _this.props.endDate, utcOffset: _this.props.utcOffset }); })); }, render: function render() { return _react2.default.createElement( 'div', { className: 'react-datepicker__week' }, this.renderDays() ); } }); module.exports = Week; /***/ }, /* 589 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _classnames = __webpack_require__(579); var _classnames2 = _interopRequireDefault(_classnames); var _date_utils = __webpack_require__(454); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var Day = _react2.default.createClass({ displayName: 'Day', propTypes: { day: _react2.default.PropTypes.object.isRequired, endDate: _react2.default.PropTypes.object, highlightDates: _react2.default.PropTypes.array, month: _react2.default.PropTypes.number, onClick: _react2.default.PropTypes.func, onMouseEnter: _react2.default.PropTypes.func, preSelection: _react2.default.PropTypes.object, selected: _react2.default.PropTypes.object, selectingDate: _react2.default.PropTypes.object, selectsEnd: _react2.default.PropTypes.bool, selectsStart: _react2.default.PropTypes.bool, startDate: _react2.default.PropTypes.object, utcOffset: _react2.default.PropTypes.number }, getDefaultProps: function getDefaultProps() { return { utcOffset: _moment2.default.utc().utcOffset() }; }, handleClick: function handleClick(event) { if (!this.isDisabled() && this.props.onClick) { this.props.onClick(event); } }, handleMouseEnter: function handleMouseEnter(event) { if (!this.isDisabled() && this.props.onMouseEnter) { this.props.onMouseEnter(event); } }, isSameDay: function isSameDay(other) { return (0, _date_utils.isSameDay)(this.props.day, other); }, isKeyboardSelected: function isKeyboardSelected() { return !this.isSameDay(this.props.selected) && this.isSameDay(this.props.preSelection); }, isDisabled: function isDisabled() { return (0, _date_utils.isDayDisabled)(this.props.day, this.props); }, isHighlighted: function isHighlighted() { var _props = this.props, day = _props.day, highlightDates = _props.highlightDates; if (!highlightDates) return false; return highlightDates.some(function (testDay) { return (0, _date_utils.isSameDay)(day, testDay); }); }, isInRange: function isInRange() { var _props2 = this.props, day = _props2.day, startDate = _props2.startDate, endDate = _props2.endDate; if (!startDate || !endDate) return false; return (0, _date_utils.isDayInRange)(day, startDate, endDate); }, isInSelectingRange: function isInSelectingRange() { var _props3 = this.props, day = _props3.day, selectsStart = _props3.selectsStart, selectsEnd = _props3.selectsEnd, selectingDate = _props3.selectingDate, startDate = _props3.startDate, endDate = _props3.endDate; if (!(selectsStart || selectsEnd) || !selectingDate || this.isDisabled()) { return false; } if (selectsStart && endDate && selectingDate.isSameOrBefore(endDate)) { return (0, _date_utils.isDayInRange)(day, selectingDate, endDate); } if (selectsEnd && startDate && selectingDate.isSameOrAfter(startDate)) { return (0, _date_utils.isDayInRange)(day, startDate, selectingDate); } return false; }, isSelectingRangeStart: function isSelectingRangeStart() { if (!this.isInSelectingRange()) { return false; } var _props4 = this.props, day = _props4.day, selectingDate = _props4.selectingDate, startDate = _props4.startDate, selectsStart = _props4.selectsStart; if (selectsStart) { return (0, _date_utils.isSameDay)(day, selectingDate); } else { return (0, _date_utils.isSameDay)(day, startDate); } }, isSelectingRangeEnd: function isSelectingRangeEnd() { if (!this.isInSelectingRange()) { return false; } var _props5 = this.props, day = _props5.day, selectingDate = _props5.selectingDate, endDate = _props5.endDate, selectsEnd = _props5.selectsEnd; if (selectsEnd) { return (0, _date_utils.isSameDay)(day, selectingDate); } else { return (0, _date_utils.isSameDay)(day, endDate); } }, isRangeStart: function isRangeStart() { var _props6 = this.props, day = _props6.day, startDate = _props6.startDate, endDate = _props6.endDate; if (!startDate || !endDate) return false; return (0, _date_utils.isSameDay)(startDate, day); }, isRangeEnd: function isRangeEnd() { var _props7 = this.props, day = _props7.day, startDate = _props7.startDate, endDate = _props7.endDate; if (!startDate || !endDate) return false; return (0, _date_utils.isSameDay)(endDate, day); }, isWeekend: function isWeekend() { var weekday = this.props.day.day(); return weekday === 0 || weekday === 6; }, isOutsideMonth: function isOutsideMonth() { return this.props.month !== undefined && this.props.month !== this.props.day.month(); }, getClassNames: function getClassNames() { return (0, _classnames2.default)('react-datepicker__day', { 'react-datepicker__day--disabled': this.isDisabled(), 'react-datepicker__day--selected': this.isSameDay(this.props.selected), 'react-datepicker__day--keyboard-selected': this.isKeyboardSelected(), 'react-datepicker__day--highlighted': this.isHighlighted(), 'react-datepicker__day--range-start': this.isRangeStart(), 'react-datepicker__day--range-end': this.isRangeEnd(), 'react-datepicker__day--in-range': this.isInRange(), 'react-datepicker__day--in-selecting-range': this.isInSelectingRange(), 'react-datepicker__day--selecting-range-start': this.isSelectingRangeStart(), 'react-datepicker__day--selecting-range-end': this.isSelectingRangeEnd(), 'react-datepicker__day--today': this.isSameDay(_moment2.default.utc().utcOffset(this.props.utcOffset)), 'react-datepicker__day--weekend': this.isWeekend(), 'react-datepicker__day--outside-month': this.isOutsideMonth() }); }, render: function render() { return _react2.default.createElement( 'div', { className: this.getClassNames(), onClick: this.handleClick, onMouseEnter: this.handleMouseEnter, 'aria-label': 'day-' + this.props.day.date(), role: 'option' }, this.props.day.date() ); } }); module.exports = Day; /***/ }, /* 590 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var WeekNumber = _react2.default.createClass({ displayName: 'WeekNumber', propTypes: { weekNumber: _react2.default.PropTypes.number.isRequired }, render: function render() { return _react2.default.createElement( 'div', { className: 'react-datepicker__week-number', 'aria-label': 'week-' + this.props.weekNumber }, this.props.weekNumber ); } }); module.exports = WeekNumber; /***/ }, /* 591 */ /***/ function(module, exports, __webpack_require__) { var baseDelay = __webpack_require__(592), baseRest = __webpack_require__(593); /** * Defers invoking the `func` until the current call stack has cleared. Any * additional arguments are provided to `func` when it's invoked. * * @static * @memberOf _ * @since 0.1.0 * @category Function * @param {Function} func The function to defer. * @param {...*} [args] The arguments to invoke `func` with. * @returns {number} Returns the timer id. * @example * * _.defer(function(text) { * console.log(text); * }, 'deferred'); * // => Logs 'deferred' after one millisecond. */ var defer = baseRest(function(func, args) { return baseDelay(func, 1, args); }); module.exports = defer; /***/ }, /* 592 */ /***/ function(module, exports) { /** Error message constants. */ var FUNC_ERROR_TEXT = 'Expected a function'; /** * The base implementation of `_.delay` and `_.defer` which accepts `args` * to provide to `func`. * * @private * @param {Function} func The function to delay. * @param {number} wait The number of milliseconds to delay invocation. * @param {Array} args The arguments to provide to `func`. * @returns {number|Object} Returns the timer id or timeout object. */ function baseDelay(func, wait, args) { if (typeof func != 'function') { throw new TypeError(FUNC_ERROR_TEXT); } return setTimeout(function() { func.apply(undefined, args); }, wait); } module.exports = baseDelay; /***/ }, /* 593 */ /***/ function(module, exports, __webpack_require__) { var identity = __webpack_require__(568), overRest = __webpack_require__(594), setToString = __webpack_require__(596); /** * The base implementation of `_.rest` which doesn't validate or coerce arguments. * * @private * @param {Function} func The function to apply a rest parameter to. * @param {number} [start=func.length-1] The start position of the rest parameter. * @returns {Function} Returns the new function. */ function baseRest(func, start) { return setToString(overRest(func, start, identity), func + ''); } module.exports = baseRest; /***/ }, /* 594 */ /***/ function(module, exports, __webpack_require__) { var apply = __webpack_require__(595); /* Built-in method references for those with the same name as other `lodash` methods. */ var nativeMax = Math.max; /** * A specialized version of `baseRest` which transforms the rest array. * * @private * @param {Function} func The function to apply a rest parameter to. * @param {number} [start=func.length-1] The start position of the rest parameter. * @param {Function} transform The rest array transform. * @returns {Function} Returns the new function. */ function overRest(func, start, transform) { start = nativeMax(start === undefined ? (func.length - 1) : start, 0); return function() { var args = arguments, index = -1, length = nativeMax(args.length - start, 0), array = Array(length); while (++index < length) { array[index] = args[start + index]; } index = -1; var otherArgs = Array(start + 1); while (++index < start) { otherArgs[index] = args[index]; } otherArgs[start] = transform(array); return apply(func, this, otherArgs); }; } module.exports = overRest; /***/ }, /* 595 */ /***/ function(module, exports) { /** * A faster alternative to `Function#apply`, this function invokes `func` * with the `this` binding of `thisArg` and the arguments of `args`. * * @private * @param {Function} func The function to invoke. * @param {*} thisArg The `this` binding of `func`. * @param {Array} args The arguments to invoke `func` with. * @returns {*} Returns the result of `func`. */ function apply(func, thisArg, args) { switch (args.length) { case 0: return func.call(thisArg); case 1: return func.call(thisArg, args[0]); case 2: return func.call(thisArg, args[0], args[1]); case 3: return func.call(thisArg, args[0], args[1], args[2]); } return func.apply(thisArg, args); } module.exports = apply; /***/ }, /* 596 */ /***/ function(module, exports, __webpack_require__) { var baseSetToString = __webpack_require__(597), shortOut = __webpack_require__(600); /** * Sets the `toString` method of `func` to return `string`. * * @private * @param {Function} func The function to modify. * @param {Function} string The `toString` result. * @returns {Function} Returns `func`. */ var setToString = shortOut(baseSetToString); module.exports = setToString; /***/ }, /* 597 */ /***/ function(module, exports, __webpack_require__) { var constant = __webpack_require__(598), defineProperty = __webpack_require__(599), identity = __webpack_require__(568); /** * The base implementation of `setToString` without support for hot loop shorting. * * @private * @param {Function} func The function to modify. * @param {Function} string The `toString` result. * @returns {Function} Returns `func`. */ var baseSetToString = !defineProperty ? identity : function(func, string) { return defineProperty(func, 'toString', { 'configurable': true, 'enumerable': false, 'value': constant(string), 'writable': true }); }; module.exports = baseSetToString; /***/ }, /* 598 */ /***/ function(module, exports) { /** * Creates a function that returns `value`. * * @static * @memberOf _ * @since 2.4.0 * @category Util * @param {*} value The value to return from the new function. * @returns {Function} Returns the new constant function. * @example * * var objects = _.times(2, _.constant({ 'a': 1 })); * * console.log(objects); * // => [{ 'a': 1 }, { 'a': 1 }] * * console.log(objects[0] === objects[1]); * // => true */ function constant(value) { return function() { return value; }; } module.exports = constant; /***/ }, /* 599 */ /***/ function(module, exports, __webpack_require__) { var getNative = __webpack_require__(476); var defineProperty = (function() { try { var func = getNative(Object, 'defineProperty'); func({}, '', {}); return func; } catch (e) {} }()); module.exports = defineProperty; /***/ }, /* 600 */ /***/ function(module, exports) { /** Used to detect hot functions by number of calls within a span of milliseconds. */ var HOT_COUNT = 800, HOT_SPAN = 16; /* Built-in method references for those with the same name as other `lodash` methods. */ var nativeNow = Date.now; /** * Creates a function that'll short out and invoke `identity` instead * of `func` when it's called `HOT_COUNT` or more times in `HOT_SPAN` * milliseconds. * * @private * @param {Function} func The function to restrict. * @returns {Function} Returns the new shortable function. */ function shortOut(func) { var count = 0, lastCalled = 0; return function() { var stamp = nativeNow(), remaining = HOT_SPAN - (stamp - lastCalled); lastCalled = stamp; if (remaining > 0) { if (++count >= HOT_COUNT) { return arguments[0]; } } else { count = 0; } return func.apply(undefined, arguments); }; } module.exports = shortOut; /***/ }, /* 601 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDom = __webpack_require__(29); var _reactDom2 = _interopRequireDefault(_reactDom); var _tether = __webpack_require__(602); var _tether2 = _interopRequireDefault(_tether); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _objectWithoutProperties(obj, keys) { var target = {}; for (var i in obj) { if (keys.indexOf(i) >= 0) continue; if (!Object.prototype.hasOwnProperty.call(obj, i)) continue; target[i] = obj[i]; } return target; } function childrenPropType(_ref, propName, componentName) { var children = _ref.children; var childCount = _react.Children.count(children); if (childCount <= 0) { return new Error(componentName + ' expects at least one child to use as the target element.'); } else if (childCount > 2) { return new Error('Only a max of two children allowed in ' + componentName + '.'); } } var attachmentPositions = ['top left', 'top center', 'top right', 'middle left', 'middle center', 'middle right', 'bottom left', 'bottom center', 'bottom right']; var TetherComponent = _react2.default.createClass({ displayName: 'TetherComponent', propTypes: { attachment: _react.PropTypes.oneOf(attachmentPositions).isRequired, children: childrenPropType, className: _react.PropTypes.string, classPrefix: _react.PropTypes.string, classes: _react.PropTypes.object, constraints: _react.PropTypes.array, enabled: _react.PropTypes.bool, id: _react.PropTypes.string, offset: _react.PropTypes.string, optimizations: _react.PropTypes.object, renderElementTag: _react.PropTypes.string, renderElementTo: _react.PropTypes.any, style: _react.PropTypes.object, targetAttachment: _react.PropTypes.oneOf(attachmentPositions), targetModifier: _react.PropTypes.string, targetOffset: _react.PropTypes.string }, getDefaultProps: function getDefaultProps() { return { renderElementTag: 'div', renderElementTo: null }; }, componentDidMount: function componentDidMount() { this._targetNode = _reactDom2.default.findDOMNode(this); this._update(); }, componentDidUpdate: function componentDidUpdate() { this._update(); }, componentWillUnmount: function componentWillUnmount() { this._destroy(); }, disable: function disable() { this._tether.disable(); }, enable: function enable() { this._tether.enable(); }, position: function position() { this._tether.position(); }, _destroy: function _destroy() { if (this._elementParentNode) { _reactDom2.default.unmountComponentAtNode(this._elementParentNode); this._elementParentNode.parentNode.removeChild(this._elementParentNode); } if (this._tether) { this._tether.destroy(); } this._elementParentNode = null; this._tether = null; }, _update: function _update() { var _this = this; var _props = this.props, children = _props.children, renderElementTag = _props.renderElementTag, renderElementTo = _props.renderElementTo; var elementComponent = children[1]; // if no element component provided, bail out if (!elementComponent) { // destroy Tether elements if they have been created if (this._tether) { this._destroy(); } return; } // create element node container if it hasn't been yet if (!this._elementParentNode) { // create a node that we can stick our content Component in this._elementParentNode = document.createElement(renderElementTag); // append node to the end of the body var renderTo = renderElementTo || document.body; renderTo.appendChild(this._elementParentNode); } // render element component into the DOM _reactDom2.default.unstable_renderSubtreeIntoContainer(this, elementComponent, this._elementParentNode, function () { // don't update Tether until the subtree has finished rendering _this._updateTether(); }); }, _updateTether: function _updateTether() { var _props2 = this.props, renderElementTag = _props2.renderElementTag, renderElementTo = _props2.renderElementTo, options = _objectWithoutProperties(_props2, ['renderElementTag', 'renderElementTo']); // eslint-disable-line no-unused-vars var tetherOptions = _extends({ target: this._targetNode, element: this._elementParentNode }, options); if (!this._tether) { this._tether = new _tether2.default(tetherOptions); } else { this._tether.setOptions(tetherOptions); } this._tether.position(); }, render: function render() { var children = this.props.children; var firstChild = null; // we use forEach because the second child could be null // causing children to not be an array _react.Children.forEach(children, function (child, index) { if (index === 0) { firstChild = child; return false; } }); return firstChild; } }); module.exports = TetherComponent; /***/ }, /* 602 */ /***/ function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_RESULT__;/*! tether 1.4.0 */ (function(root, factory) { if (true) { !(__WEBPACK_AMD_DEFINE_FACTORY__ = (factory), __WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ? (__WEBPACK_AMD_DEFINE_FACTORY__.call(exports, __webpack_require__, exports, module)) : __WEBPACK_AMD_DEFINE_FACTORY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); } else if (typeof exports === 'object') { module.exports = factory(require, exports, module); } else { root.Tether = factory(); } }(this, function(require, exports, module) { 'use strict'; var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })(); function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } } var TetherBase = undefined; if (typeof TetherBase === 'undefined') { TetherBase = { modules: [] }; } var zeroElement = null; // Same as native getBoundingClientRect, except it takes into account parent <frame> offsets // if the element lies within a nested document (<frame> or <iframe>-like). function getActualBoundingClientRect(node) { var boundingRect = node.getBoundingClientRect(); // The original object returned by getBoundingClientRect is immutable, so we clone it // We can't use extend because the properties are not considered part of the object by hasOwnProperty in IE9 var rect = {}; for (var k in boundingRect) { rect[k] = boundingRect[k]; } if (node.ownerDocument !== document) { var _frameElement = node.ownerDocument.defaultView.frameElement; if (_frameElement) { var frameRect = getActualBoundingClientRect(_frameElement); rect.top += frameRect.top; rect.bottom += frameRect.top; rect.left += frameRect.left; rect.right += frameRect.left; } } return rect; } function getScrollParents(el) { // In firefox if the el is inside an iframe with display: none; window.getComputedStyle() will return null; // https://bugzilla.mozilla.org/show_bug.cgi?id=548397 var computedStyle = getComputedStyle(el) || {}; var position = computedStyle.position; var parents = []; if (position === 'fixed') { return [el]; } var parent = el; while ((parent = parent.parentNode) && parent && parent.nodeType === 1) { var style = undefined; try { style = getComputedStyle(parent); } catch (err) {} if (typeof style === 'undefined' || style === null) { parents.push(parent); return parents; } var _style = style; var overflow = _style.overflow; var overflowX = _style.overflowX; var overflowY = _style.overflowY; if (/(auto|scroll)/.test(overflow + overflowY + overflowX)) { if (position !== 'absolute' || ['relative', 'absolute', 'fixed'].indexOf(style.position) >= 0) { parents.push(parent); } } } parents.push(el.ownerDocument.body); // If the node is within a frame, account for the parent window scroll if (el.ownerDocument !== document) { parents.push(el.ownerDocument.defaultView); } return parents; } var uniqueId = (function () { var id = 0; return function () { return ++id; }; })(); var zeroPosCache = {}; var getOrigin = function getOrigin() { // getBoundingClientRect is unfortunately too accurate. It introduces a pixel or two of // jitter as the user scrolls that messes with our ability to detect if two positions // are equivilant or not. We place an element at the top left of the page that will // get the same jitter, so we can cancel the two out. var node = zeroElement; if (!node || !document.body.contains(node)) { node = document.createElement('div'); node.setAttribute('data-tether-id', uniqueId()); extend(node.style, { top: 0, left: 0, position: 'absolute' }); document.body.appendChild(node); zeroElement = node; } var id = node.getAttribute('data-tether-id'); if (typeof zeroPosCache[id] === 'undefined') { zeroPosCache[id] = getActualBoundingClientRect(node); // Clear the cache when this position call is done defer(function () { delete zeroPosCache[id]; }); } return zeroPosCache[id]; }; function removeUtilElements() { if (zeroElement) { document.body.removeChild(zeroElement); } zeroElement = null; }; function getBounds(el) { var doc = undefined; if (el === document) { doc = document; el = document.documentElement; } else { doc = el.ownerDocument; } var docEl = doc.documentElement; var box = getActualBoundingClientRect(el); var origin = getOrigin(); box.top -= origin.top; box.left -= origin.left; if (typeof box.width === 'undefined') { box.width = document.body.scrollWidth - box.left - box.right; } if (typeof box.height === 'undefined') { box.height = document.body.scrollHeight - box.top - box.bottom; } box.top = box.top - docEl.clientTop; box.left = box.left - docEl.clientLeft; box.right = doc.body.clientWidth - box.width - box.left; box.bottom = doc.body.clientHeight - box.height - box.top; return box; } function getOffsetParent(el) { return el.offsetParent || document.documentElement; } var _scrollBarSize = null; function getScrollBarSize() { if (_scrollBarSize) { return _scrollBarSize; } var inner = document.createElement('div'); inner.style.width = '100%'; inner.style.height = '200px'; var outer = document.createElement('div'); extend(outer.style, { position: 'absolute', top: 0, left: 0, pointerEvents: 'none', visibility: 'hidden', width: '200px', height: '150px', overflow: 'hidden' }); outer.appendChild(inner); document.body.appendChild(outer); var widthContained = inner.offsetWidth; outer.style.overflow = 'scroll'; var widthScroll = inner.offsetWidth; if (widthContained === widthScroll) { widthScroll = outer.clientWidth; } document.body.removeChild(outer); var width = widthContained - widthScroll; _scrollBarSize = { width: width, height: width }; return _scrollBarSize; } function extend() { var out = arguments.length <= 0 || arguments[0] === undefined ? {} : arguments[0]; var args = []; Array.prototype.push.apply(args, arguments); args.slice(1).forEach(function (obj) { if (obj) { for (var key in obj) { if (({}).hasOwnProperty.call(obj, key)) { out[key] = obj[key]; } } } }); return out; } function removeClass(el, name) { if (typeof el.classList !== 'undefined') { name.split(' ').forEach(function (cls) { if (cls.trim()) { el.classList.remove(cls); } }); } else { var regex = new RegExp('(^| )' + name.split(' ').join('|') + '( |$)', 'gi'); var className = getClassName(el).replace(regex, ' '); setClassName(el, className); } } function addClass(el, name) { if (typeof el.classList !== 'undefined') { name.split(' ').forEach(function (cls) { if (cls.trim()) { el.classList.add(cls); } }); } else { removeClass(el, name); var cls = getClassName(el) + (' ' + name); setClassName(el, cls); } } function hasClass(el, name) { if (typeof el.classList !== 'undefined') { return el.classList.contains(name); } var className = getClassName(el); return new RegExp('(^| )' + name + '( |$)', 'gi').test(className); } function getClassName(el) { // Can't use just SVGAnimatedString here since nodes within a Frame in IE have // completely separately SVGAnimatedString base classes if (el.className instanceof el.ownerDocument.defaultView.SVGAnimatedString) { return el.className.baseVal; } return el.className; } function setClassName(el, className) { el.setAttribute('class', className); } function updateClasses(el, add, all) { // Of the set of 'all' classes, we need the 'add' classes, and only the // 'add' classes to be set. all.forEach(function (cls) { if (add.indexOf(cls) === -1 && hasClass(el, cls)) { removeClass(el, cls); } }); add.forEach(function (cls) { if (!hasClass(el, cls)) { addClass(el, cls); } }); } var deferred = []; var defer = function defer(fn) { deferred.push(fn); }; var flush = function flush() { var fn = undefined; while (fn = deferred.pop()) { fn(); } }; var Evented = (function () { function Evented() { _classCallCheck(this, Evented); } _createClass(Evented, [{ key: 'on', value: function on(event, handler, ctx) { var once = arguments.length <= 3 || arguments[3] === undefined ? false : arguments[3]; if (typeof this.bindings === 'undefined') { this.bindings = {}; } if (typeof this.bindings[event] === 'undefined') { this.bindings[event] = []; } this.bindings[event].push({ handler: handler, ctx: ctx, once: once }); } }, { key: 'once', value: function once(event, handler, ctx) { this.on(event, handler, ctx, true); } }, { key: 'off', value: function off(event, handler) { if (typeof this.bindings === 'undefined' || typeof this.bindings[event] === 'undefined') { return; } if (typeof handler === 'undefined') { delete this.bindings[event]; } else { var i = 0; while (i < this.bindings[event].length) { if (this.bindings[event][i].handler === handler) { this.bindings[event].splice(i, 1); } else { ++i; } } } } }, { key: 'trigger', value: function trigger(event) { if (typeof this.bindings !== 'undefined' && this.bindings[event]) { var i = 0; for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { args[_key - 1] = arguments[_key]; } while (i < this.bindings[event].length) { var _bindings$event$i = this.bindings[event][i]; var handler = _bindings$event$i.handler; var ctx = _bindings$event$i.ctx; var once = _bindings$event$i.once; var context = ctx; if (typeof context === 'undefined') { context = this; } handler.apply(context, args); if (once) { this.bindings[event].splice(i, 1); } else { ++i; } } } } }]); return Evented; })(); TetherBase.Utils = { getActualBoundingClientRect: getActualBoundingClientRect, getScrollParents: getScrollParents, getBounds: getBounds, getOffsetParent: getOffsetParent, extend: extend, addClass: addClass, removeClass: removeClass, hasClass: hasClass, updateClasses: updateClasses, defer: defer, flush: flush, uniqueId: uniqueId, Evented: Evented, getScrollBarSize: getScrollBarSize, removeUtilElements: removeUtilElements }; /* globals TetherBase, performance */ 'use strict'; var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i['return']) _i['return'](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError('Invalid attempt to destructure non-iterable instance'); } }; })(); var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })(); var _get = function get(_x6, _x7, _x8) { var _again = true; _function: while (_again) { var object = _x6, property = _x7, receiver = _x8; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x6 = parent; _x7 = property; _x8 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } }; function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } } function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } if (typeof TetherBase === 'undefined') { throw new Error('You must include the utils.js file before tether.js'); } var _TetherBase$Utils = TetherBase.Utils; var getScrollParents = _TetherBase$Utils.getScrollParents; var getBounds = _TetherBase$Utils.getBounds; var getOffsetParent = _TetherBase$Utils.getOffsetParent; var extend = _TetherBase$Utils.extend; var addClass = _TetherBase$Utils.addClass; var removeClass = _TetherBase$Utils.removeClass; var updateClasses = _TetherBase$Utils.updateClasses; var defer = _TetherBase$Utils.defer; var flush = _TetherBase$Utils.flush; var getScrollBarSize = _TetherBase$Utils.getScrollBarSize; var removeUtilElements = _TetherBase$Utils.removeUtilElements; function within(a, b) { var diff = arguments.length <= 2 || arguments[2] === undefined ? 1 : arguments[2]; return a + diff >= b && b >= a - diff; } var transformKey = (function () { if (typeof document === 'undefined') { return ''; } var el = document.createElement('div'); var transforms = ['transform', 'WebkitTransform', 'OTransform', 'MozTransform', 'msTransform']; for (var i = 0; i < transforms.length; ++i) { var key = transforms[i]; if (el.style[key] !== undefined) { return key; } } })(); var tethers = []; var position = function position() { tethers.forEach(function (tether) { tether.position(false); }); flush(); }; function now() { if (typeof performance !== 'undefined' && typeof performance.now !== 'undefined') { return performance.now(); } return +new Date(); } (function () { var lastCall = null; var lastDuration = null; var pendingTimeout = null; var tick = function tick() { if (typeof lastDuration !== 'undefined' && lastDuration > 16) { // We voluntarily throttle ourselves if we can't manage 60fps lastDuration = Math.min(lastDuration - 16, 250); // Just in case this is the last event, remember to position just once more pendingTimeout = setTimeout(tick, 250); return; } if (typeof lastCall !== 'undefined' && now() - lastCall < 10) { // Some browsers call events a little too frequently, refuse to run more than is reasonable return; } if (pendingTimeout != null) { clearTimeout(pendingTimeout); pendingTimeout = null; } lastCall = now(); position(); lastDuration = now() - lastCall; }; if (typeof window !== 'undefined' && typeof window.addEventListener !== 'undefined') { ['resize', 'scroll', 'touchmove'].forEach(function (event) { window.addEventListener(event, tick); }); } })(); var MIRROR_LR = { center: 'center', left: 'right', right: 'left' }; var MIRROR_TB = { middle: 'middle', top: 'bottom', bottom: 'top' }; var OFFSET_MAP = { top: 0, left: 0, middle: '50%', center: '50%', bottom: '100%', right: '100%' }; var autoToFixedAttachment = function autoToFixedAttachment(attachment, relativeToAttachment) { var left = attachment.left; var top = attachment.top; if (left === 'auto') { left = MIRROR_LR[relativeToAttachment.left]; } if (top === 'auto') { top = MIRROR_TB[relativeToAttachment.top]; } return { left: left, top: top }; }; var attachmentToOffset = function attachmentToOffset(attachment) { var left = attachment.left; var top = attachment.top; if (typeof OFFSET_MAP[attachment.left] !== 'undefined') { left = OFFSET_MAP[attachment.left]; } if (typeof OFFSET_MAP[attachment.top] !== 'undefined') { top = OFFSET_MAP[attachment.top]; } return { left: left, top: top }; }; function addOffset() { var out = { top: 0, left: 0 }; for (var _len = arguments.length, offsets = Array(_len), _key = 0; _key < _len; _key++) { offsets[_key] = arguments[_key]; } offsets.forEach(function (_ref) { var top = _ref.top; var left = _ref.left; if (typeof top === 'string') { top = parseFloat(top, 10); } if (typeof left === 'string') { left = parseFloat(left, 10); } out.top += top; out.left += left; }); return out; } function offsetToPx(offset, size) { if (typeof offset.left === 'string' && offset.left.indexOf('%') !== -1) { offset.left = parseFloat(offset.left, 10) / 100 * size.width; } if (typeof offset.top === 'string' && offset.top.indexOf('%') !== -1) { offset.top = parseFloat(offset.top, 10) / 100 * size.height; } return offset; } var parseOffset = function parseOffset(value) { var _value$split = value.split(' '); var _value$split2 = _slicedToArray(_value$split, 2); var top = _value$split2[0]; var left = _value$split2[1]; return { top: top, left: left }; }; var parseAttachment = parseOffset; var TetherClass = (function (_Evented) { _inherits(TetherClass, _Evented); function TetherClass(options) { var _this = this; _classCallCheck(this, TetherClass); _get(Object.getPrototypeOf(TetherClass.prototype), 'constructor', this).call(this); this.position = this.position.bind(this); tethers.push(this); this.history = []; this.setOptions(options, false); TetherBase.modules.forEach(function (module) { if (typeof module.initialize !== 'undefined') { module.initialize.call(_this); } }); this.position(); } _createClass(TetherClass, [{ key: 'getClass', value: function getClass() { var key = arguments.length <= 0 || arguments[0] === undefined ? '' : arguments[0]; var classes = this.options.classes; if (typeof classes !== 'undefined' && classes[key]) { return this.options.classes[key]; } else if (this.options.classPrefix) { return this.options.classPrefix + '-' + key; } else { return key; } } }, { key: 'setOptions', value: function setOptions(options) { var _this2 = this; var pos = arguments.length <= 1 || arguments[1] === undefined ? true : arguments[1]; var defaults = { offset: '0 0', targetOffset: '0 0', targetAttachment: 'auto auto', classPrefix: 'tether' }; this.options = extend(defaults, options); var _options = this.options; var element = _options.element; var target = _options.target; var targetModifier = _options.targetModifier; this.element = element; this.target = target; this.targetModifier = targetModifier; if (this.target === 'viewport') { this.target = document.body; this.targetModifier = 'visible'; } else if (this.target === 'scroll-handle') { this.target = document.body; this.targetModifier = 'scroll-handle'; } ['element', 'target'].forEach(function (key) { if (typeof _this2[key] === 'undefined') { throw new Error('Tether Error: Both element and target must be defined'); } if (typeof _this2[key].jquery !== 'undefined') { _this2[key] = _this2[key][0]; } else if (typeof _this2[key] === 'string') { _this2[key] = document.querySelector(_this2[key]); } }); addClass(this.element, this.getClass('element')); if (!(this.options.addTargetClasses === false)) { addClass(this.target, this.getClass('target')); } if (!this.options.attachment) { throw new Error('Tether Error: You must provide an attachment'); } this.targetAttachment = parseAttachment(this.options.targetAttachment); this.attachment = parseAttachment(this.options.attachment); this.offset = parseOffset(this.options.offset); this.targetOffset = parseOffset(this.options.targetOffset); if (typeof this.scrollParents !== 'undefined') { this.disable(); } if (this.targetModifier === 'scroll-handle') { this.scrollParents = [this.target]; } else { this.scrollParents = getScrollParents(this.target); } if (!(this.options.enabled === false)) { this.enable(pos); } } }, { key: 'getTargetBounds', value: function getTargetBounds() { if (typeof this.targetModifier !== 'undefined') { if (this.targetModifier === 'visible') { if (this.target === document.body) { return { top: pageYOffset, left: pageXOffset, height: innerHeight, width: innerWidth }; } else { var bounds = getBounds(this.target); var out = { height: bounds.height, width: bounds.width, top: bounds.top, left: bounds.left }; out.height = Math.min(out.height, bounds.height - (pageYOffset - bounds.top)); out.height = Math.min(out.height, bounds.height - (bounds.top + bounds.height - (pageYOffset + innerHeight))); out.height = Math.min(innerHeight, out.height); out.height -= 2; out.width = Math.min(out.width, bounds.width - (pageXOffset - bounds.left)); out.width = Math.min(out.width, bounds.width - (bounds.left + bounds.width - (pageXOffset + innerWidth))); out.width = Math.min(innerWidth, out.width); out.width -= 2; if (out.top < pageYOffset) { out.top = pageYOffset; } if (out.left < pageXOffset) { out.left = pageXOffset; } return out; } } else if (this.targetModifier === 'scroll-handle') { var bounds = undefined; var target = this.target; if (target === document.body) { target = document.documentElement; bounds = { left: pageXOffset, top: pageYOffset, height: innerHeight, width: innerWidth }; } else { bounds = getBounds(target); } var style = getComputedStyle(target); var hasBottomScroll = target.scrollWidth > target.clientWidth || [style.overflow, style.overflowX].indexOf('scroll') >= 0 || this.target !== document.body; var scrollBottom = 0; if (hasBottomScroll) { scrollBottom = 15; } var height = bounds.height - parseFloat(style.borderTopWidth) - parseFloat(style.borderBottomWidth) - scrollBottom; var out = { width: 15, height: height * 0.975 * (height / target.scrollHeight), left: bounds.left + bounds.width - parseFloat(style.borderLeftWidth) - 15 }; var fitAdj = 0; if (height < 408 && this.target === document.body) { fitAdj = -0.00011 * Math.pow(height, 2) - 0.00727 * height + 22.58; } if (this.target !== document.body) { out.height = Math.max(out.height, 24); } var scrollPercentage = this.target.scrollTop / (target.scrollHeight - height); out.top = scrollPercentage * (height - out.height - fitAdj) + bounds.top + parseFloat(style.borderTopWidth); if (this.target === document.body) { out.height = Math.max(out.height, 24); } return out; } } else { return getBounds(this.target); } } }, { key: 'clearCache', value: function clearCache() { this._cache = {}; } }, { key: 'cache', value: function cache(k, getter) { // More than one module will often need the same DOM info, so // we keep a cache which is cleared on each position call if (typeof this._cache === 'undefined') { this._cache = {}; } if (typeof this._cache[k] === 'undefined') { this._cache[k] = getter.call(this); } return this._cache[k]; } }, { key: 'enable', value: function enable() { var _this3 = this; var pos = arguments.length <= 0 || arguments[0] === undefined ? true : arguments[0]; if (!(this.options.addTargetClasses === false)) { addClass(this.target, this.getClass('enabled')); } addClass(this.element, this.getClass('enabled')); this.enabled = true; this.scrollParents.forEach(function (parent) { if (parent !== _this3.target.ownerDocument) { parent.addEventListener('scroll', _this3.position); } }); if (pos) { this.position(); } } }, { key: 'disable', value: function disable() { var _this4 = this; removeClass(this.target, this.getClass('enabled')); removeClass(this.element, this.getClass('enabled')); this.enabled = false; if (typeof this.scrollParents !== 'undefined') { this.scrollParents.forEach(function (parent) { parent.removeEventListener('scroll', _this4.position); }); } } }, { key: 'destroy', value: function destroy() { var _this5 = this; this.disable(); tethers.forEach(function (tether, i) { if (tether === _this5) { tethers.splice(i, 1); } }); // Remove any elements we were using for convenience from the DOM if (tethers.length === 0) { removeUtilElements(); } } }, { key: 'updateAttachClasses', value: function updateAttachClasses(elementAttach, targetAttach) { var _this6 = this; elementAttach = elementAttach || this.attachment; targetAttach = targetAttach || this.targetAttachment; var sides = ['left', 'top', 'bottom', 'right', 'middle', 'center']; if (typeof this._addAttachClasses !== 'undefined' && this._addAttachClasses.length) { // updateAttachClasses can be called more than once in a position call, so // we need to clean up after ourselves such that when the last defer gets // ran it doesn't add any extra classes from previous calls. this._addAttachClasses.splice(0, this._addAttachClasses.length); } if (typeof this._addAttachClasses === 'undefined') { this._addAttachClasses = []; } var add = this._addAttachClasses; if (elementAttach.top) { add.push(this.getClass('element-attached') + '-' + elementAttach.top); } if (elementAttach.left) { add.push(this.getClass('element-attached') + '-' + elementAttach.left); } if (targetAttach.top) { add.push(this.getClass('target-attached') + '-' + targetAttach.top); } if (targetAttach.left) { add.push(this.getClass('target-attached') + '-' + targetAttach.left); } var all = []; sides.forEach(function (side) { all.push(_this6.getClass('element-attached') + '-' + side); all.push(_this6.getClass('target-attached') + '-' + side); }); defer(function () { if (!(typeof _this6._addAttachClasses !== 'undefined')) { return; } updateClasses(_this6.element, _this6._addAttachClasses, all); if (!(_this6.options.addTargetClasses === false)) { updateClasses(_this6.target, _this6._addAttachClasses, all); } delete _this6._addAttachClasses; }); } }, { key: 'position', value: function position() { var _this7 = this; var flushChanges = arguments.length <= 0 || arguments[0] === undefined ? true : arguments[0]; // flushChanges commits the changes immediately, leave true unless you are positioning multiple // tethers (in which case call Tether.Utils.flush yourself when you're done) if (!this.enabled) { return; } this.clearCache(); // Turn 'auto' attachments into the appropriate corner or edge var targetAttachment = autoToFixedAttachment(this.targetAttachment, this.attachment); this.updateAttachClasses(this.attachment, targetAttachment); var elementPos = this.cache('element-bounds', function () { return getBounds(_this7.element); }); var width = elementPos.width; var height = elementPos.height; if (width === 0 && height === 0 && typeof this.lastSize !== 'undefined') { var _lastSize = this.lastSize; // We cache the height and width to make it possible to position elements that are // getting hidden. width = _lastSize.width; height = _lastSize.height; } else { this.lastSize = { width: width, height: height }; } var targetPos = this.cache('target-bounds', function () { return _this7.getTargetBounds(); }); var targetSize = targetPos; // Get an actual px offset from the attachment var offset = offsetToPx(attachmentToOffset(this.attachment), { width: width, height: height }); var targetOffset = offsetToPx(attachmentToOffset(targetAttachment), targetSize); var manualOffset = offsetToPx(this.offset, { width: width, height: height }); var manualTargetOffset = offsetToPx(this.targetOffset, targetSize); // Add the manually provided offset offset = addOffset(offset, manualOffset); targetOffset = addOffset(targetOffset, manualTargetOffset); // It's now our goal to make (element position + offset) == (target position + target offset) var left = targetPos.left + targetOffset.left - offset.left; var top = targetPos.top + targetOffset.top - offset.top; for (var i = 0; i < TetherBase.modules.length; ++i) { var _module2 = TetherBase.modules[i]; var ret = _module2.position.call(this, { left: left, top: top, targetAttachment: targetAttachment, targetPos: targetPos, elementPos: elementPos, offset: offset, targetOffset: targetOffset, manualOffset: manualOffset, manualTargetOffset: manualTargetOffset, scrollbarSize: scrollbarSize, attachment: this.attachment }); if (ret === false) { return false; } else if (typeof ret === 'undefined' || typeof ret !== 'object') { continue; } else { top = ret.top; left = ret.left; } } // We describe the position three different ways to give the optimizer // a chance to decide the best possible way to position the element // with the fewest repaints. var next = { // It's position relative to the page (absolute positioning when // the element is a child of the body) page: { top: top, left: left }, // It's position relative to the viewport (fixed positioning) viewport: { top: top - pageYOffset, bottom: pageYOffset - top - height + innerHeight, left: left - pageXOffset, right: pageXOffset - left - width + innerWidth } }; var doc = this.target.ownerDocument; var win = doc.defaultView; var scrollbarSize = undefined; if (win.innerHeight > doc.documentElement.clientHeight) { scrollbarSize = this.cache('scrollbar-size', getScrollBarSize); next.viewport.bottom -= scrollbarSize.height; } if (win.innerWidth > doc.documentElement.clientWidth) { scrollbarSize = this.cache('scrollbar-size', getScrollBarSize); next.viewport.right -= scrollbarSize.width; } if (['', 'static'].indexOf(doc.body.style.position) === -1 || ['', 'static'].indexOf(doc.body.parentElement.style.position) === -1) { // Absolute positioning in the body will be relative to the page, not the 'initial containing block' next.page.bottom = doc.body.scrollHeight - top - height; next.page.right = doc.body.scrollWidth - left - width; } if (typeof this.options.optimizations !== 'undefined' && this.options.optimizations.moveElement !== false && !(typeof this.targetModifier !== 'undefined')) { (function () { var offsetParent = _this7.cache('target-offsetparent', function () { return getOffsetParent(_this7.target); }); var offsetPosition = _this7.cache('target-offsetparent-bounds', function () { return getBounds(offsetParent); }); var offsetParentStyle = getComputedStyle(offsetParent); var offsetParentSize = offsetPosition; var offsetBorder = {}; ['Top', 'Left', 'Bottom', 'Right'].forEach(function (side) { offsetBorder[side.toLowerCase()] = parseFloat(offsetParentStyle['border' + side + 'Width']); }); offsetPosition.right = doc.body.scrollWidth - offsetPosition.left - offsetParentSize.width + offsetBorder.right; offsetPosition.bottom = doc.body.scrollHeight - offsetPosition.top - offsetParentSize.height + offsetBorder.bottom; if (next.page.top >= offsetPosition.top + offsetBorder.top && next.page.bottom >= offsetPosition.bottom) { if (next.page.left >= offsetPosition.left + offsetBorder.left && next.page.right >= offsetPosition.right) { // We're within the visible part of the target's scroll parent var scrollTop = offsetParent.scrollTop; var scrollLeft = offsetParent.scrollLeft; // It's position relative to the target's offset parent (absolute positioning when // the element is moved to be a child of the target's offset parent). next.offset = { top: next.page.top - offsetPosition.top + scrollTop - offsetBorder.top, left: next.page.left - offsetPosition.left + scrollLeft - offsetBorder.left }; } } })(); } // We could also travel up the DOM and try each containing context, rather than only // looking at the body, but we're gonna get diminishing returns. this.move(next); this.history.unshift(next); if (this.history.length > 3) { this.history.pop(); } if (flushChanges) { flush(); } return true; } // THE ISSUE }, { key: 'move', value: function move(pos) { var _this8 = this; if (!(typeof this.element.parentNode !== 'undefined')) { return; } var same = {}; for (var type in pos) { same[type] = {}; for (var key in pos[type]) { var found = false; for (var i = 0; i < this.history.length; ++i) { var point = this.history[i]; if (typeof point[type] !== 'undefined' && !within(point[type][key], pos[type][key])) { found = true; break; } } if (!found) { same[type][key] = true; } } } var css = { top: '', left: '', right: '', bottom: '' }; var transcribe = function transcribe(_same, _pos) { var hasOptimizations = typeof _this8.options.optimizations !== 'undefined'; var gpu = hasOptimizations ? _this8.options.optimizations.gpu : null; if (gpu !== false) { var yPos = undefined, xPos = undefined; if (_same.top) { css.top = 0; yPos = _pos.top; } else { css.bottom = 0; yPos = -_pos.bottom; } if (_same.left) { css.left = 0; xPos = _pos.left; } else { css.right = 0; xPos = -_pos.right; } if (window.matchMedia) { // HubSpot/tether#207 var retina = window.matchMedia('only screen and (min-resolution: 1.3dppx)').matches || window.matchMedia('only screen and (-webkit-min-device-pixel-ratio: 1.3)').matches; if (!retina) { xPos = Math.round(xPos); yPos = Math.round(yPos); } } css[transformKey] = 'translateX(' + xPos + 'px) translateY(' + yPos + 'px)'; if (transformKey !== 'msTransform') { // The Z transform will keep this in the GPU (faster, and prevents artifacts), // but IE9 doesn't support 3d transforms and will choke. css[transformKey] += " translateZ(0)"; } } else { if (_same.top) { css.top = _pos.top + 'px'; } else { css.bottom = _pos.bottom + 'px'; } if (_same.left) { css.left = _pos.left + 'px'; } else { css.right = _pos.right + 'px'; } } }; var moved = false; if ((same.page.top || same.page.bottom) && (same.page.left || same.page.right)) { css.position = 'absolute'; transcribe(same.page, pos.page); } else if ((same.viewport.top || same.viewport.bottom) && (same.viewport.left || same.viewport.right)) { css.position = 'fixed'; transcribe(same.viewport, pos.viewport); } else if (typeof same.offset !== 'undefined' && same.offset.top && same.offset.left) { (function () { css.position = 'absolute'; var offsetParent = _this8.cache('target-offsetparent', function () { return getOffsetParent(_this8.target); }); if (getOffsetParent(_this8.element) !== offsetParent) { defer(function () { _this8.element.parentNode.removeChild(_this8.element); offsetParent.appendChild(_this8.element); }); } transcribe(same.offset, pos.offset); moved = true; })(); } else { css.position = 'absolute'; transcribe({ top: true, left: true }, pos.page); } if (!moved) { if (this.options.bodyElement) { this.options.bodyElement.appendChild(this.element); } else { var offsetParentIsBody = true; var currentNode = this.element.parentNode; while (currentNode && currentNode.nodeType === 1 && currentNode.tagName !== 'BODY') { if (getComputedStyle(currentNode).position !== 'static') { offsetParentIsBody = false; break; } currentNode = currentNode.parentNode; } if (!offsetParentIsBody) { this.element.parentNode.removeChild(this.element); this.element.ownerDocument.body.appendChild(this.element); } } } // Any css change will trigger a repaint, so let's avoid one if nothing changed var writeCSS = {}; var write = false; for (var key in css) { var val = css[key]; var elVal = this.element.style[key]; if (elVal !== val) { write = true; writeCSS[key] = val; } } if (write) { defer(function () { extend(_this8.element.style, writeCSS); _this8.trigger('repositioned'); }); } } }]); return TetherClass; })(Evented); TetherClass.modules = []; TetherBase.position = position; var Tether = extend(TetherClass, TetherBase); /* globals TetherBase */ 'use strict'; var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i['return']) _i['return'](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError('Invalid attempt to destructure non-iterable instance'); } }; })(); var _TetherBase$Utils = TetherBase.Utils; var getBounds = _TetherBase$Utils.getBounds; var extend = _TetherBase$Utils.extend; var updateClasses = _TetherBase$Utils.updateClasses; var defer = _TetherBase$Utils.defer; var BOUNDS_FORMAT = ['left', 'top', 'right', 'bottom']; function getBoundingRect(tether, to) { if (to === 'scrollParent') { to = tether.scrollParents[0]; } else if (to === 'window') { to = [pageXOffset, pageYOffset, innerWidth + pageXOffset, innerHeight + pageYOffset]; } if (to === document) { to = to.documentElement; } if (typeof to.nodeType !== 'undefined') { (function () { var node = to; var size = getBounds(to); var pos = size; var style = getComputedStyle(to); to = [pos.left, pos.top, size.width + pos.left, size.height + pos.top]; // Account any parent Frames scroll offset if (node.ownerDocument !== document) { var win = node.ownerDocument.defaultView; to[0] += win.pageXOffset; to[1] += win.pageYOffset; to[2] += win.pageXOffset; to[3] += win.pageYOffset; } BOUNDS_FORMAT.forEach(function (side, i) { side = side[0].toUpperCase() + side.substr(1); if (side === 'Top' || side === 'Left') { to[i] += parseFloat(style['border' + side + 'Width']); } else { to[i] -= parseFloat(style['border' + side + 'Width']); } }); })(); } return to; } TetherBase.modules.push({ position: function position(_ref) { var _this = this; var top = _ref.top; var left = _ref.left; var targetAttachment = _ref.targetAttachment; if (!this.options.constraints) { return true; } var _cache = this.cache('element-bounds', function () { return getBounds(_this.element); }); var height = _cache.height; var width = _cache.width; if (width === 0 && height === 0 && typeof this.lastSize !== 'undefined') { var _lastSize = this.lastSize; // Handle the item getting hidden as a result of our positioning without glitching // the classes in and out width = _lastSize.width; height = _lastSize.height; } var targetSize = this.cache('target-bounds', function () { return _this.getTargetBounds(); }); var targetHeight = targetSize.height; var targetWidth = targetSize.width; var allClasses = [this.getClass('pinned'), this.getClass('out-of-bounds')]; this.options.constraints.forEach(function (constraint) { var outOfBoundsClass = constraint.outOfBoundsClass; var pinnedClass = constraint.pinnedClass; if (outOfBoundsClass) { allClasses.push(outOfBoundsClass); } if (pinnedClass) { allClasses.push(pinnedClass); } }); allClasses.forEach(function (cls) { ['left', 'top', 'right', 'bottom'].forEach(function (side) { allClasses.push(cls + '-' + side); }); }); var addClasses = []; var tAttachment = extend({}, targetAttachment); var eAttachment = extend({}, this.attachment); this.options.constraints.forEach(function (constraint) { var to = constraint.to; var attachment = constraint.attachment; var pin = constraint.pin; if (typeof attachment === 'undefined') { attachment = ''; } var changeAttachX = undefined, changeAttachY = undefined; if (attachment.indexOf(' ') >= 0) { var _attachment$split = attachment.split(' '); var _attachment$split2 = _slicedToArray(_attachment$split, 2); changeAttachY = _attachment$split2[0]; changeAttachX = _attachment$split2[1]; } else { changeAttachX = changeAttachY = attachment; } var bounds = getBoundingRect(_this, to); if (changeAttachY === 'target' || changeAttachY === 'both') { if (top < bounds[1] && tAttachment.top === 'top') { top += targetHeight; tAttachment.top = 'bottom'; } if (top + height > bounds[3] && tAttachment.top === 'bottom') { top -= targetHeight; tAttachment.top = 'top'; } } if (changeAttachY === 'together') { if (tAttachment.top === 'top') { if (eAttachment.top === 'bottom' && top < bounds[1]) { top += targetHeight; tAttachment.top = 'bottom'; top += height; eAttachment.top = 'top'; } else if (eAttachment.top === 'top' && top + height > bounds[3] && top - (height - targetHeight) >= bounds[1]) { top -= height - targetHeight; tAttachment.top = 'bottom'; eAttachment.top = 'bottom'; } } if (tAttachment.top === 'bottom') { if (eAttachment.top === 'top' && top + height > bounds[3]) { top -= targetHeight; tAttachment.top = 'top'; top -= height; eAttachment.top = 'bottom'; } else if (eAttachment.top === 'bottom' && top < bounds[1] && top + (height * 2 - targetHeight) <= bounds[3]) { top += height - targetHeight; tAttachment.top = 'top'; eAttachment.top = 'top'; } } if (tAttachment.top === 'middle') { if (top + height > bounds[3] && eAttachment.top === 'top') { top -= height; eAttachment.top = 'bottom'; } else if (top < bounds[1] && eAttachment.top === 'bottom') { top += height; eAttachment.top = 'top'; } } } if (changeAttachX === 'target' || changeAttachX === 'both') { if (left < bounds[0] && tAttachment.left === 'left') { left += targetWidth; tAttachment.left = 'right'; } if (left + width > bounds[2] && tAttachment.left === 'right') { left -= targetWidth; tAttachment.left = 'left'; } } if (changeAttachX === 'together') { if (left < bounds[0] && tAttachment.left === 'left') { if (eAttachment.left === 'right') { left += targetWidth; tAttachment.left = 'right'; left += width; eAttachment.left = 'left'; } else if (eAttachment.left === 'left') { left += targetWidth; tAttachment.left = 'right'; left -= width; eAttachment.left = 'right'; } } else if (left + width > bounds[2] && tAttachment.left === 'right') { if (eAttachment.left === 'left') { left -= targetWidth; tAttachment.left = 'left'; left -= width; eAttachment.left = 'right'; } else if (eAttachment.left === 'right') { left -= targetWidth; tAttachment.left = 'left'; left += width; eAttachment.left = 'left'; } } else if (tAttachment.left === 'center') { if (left + width > bounds[2] && eAttachment.left === 'left') { left -= width; eAttachment.left = 'right'; } else if (left < bounds[0] && eAttachment.left === 'right') { left += width; eAttachment.left = 'left'; } } } if (changeAttachY === 'element' || changeAttachY === 'both') { if (top < bounds[1] && eAttachment.top === 'bottom') { top += height; eAttachment.top = 'top'; } if (top + height > bounds[3] && eAttachment.top === 'top') { top -= height; eAttachment.top = 'bottom'; } } if (changeAttachX === 'element' || changeAttachX === 'both') { if (left < bounds[0]) { if (eAttachment.left === 'right') { left += width; eAttachment.left = 'left'; } else if (eAttachment.left === 'center') { left += width / 2; eAttachment.left = 'left'; } } if (left + width > bounds[2]) { if (eAttachment.left === 'left') { left -= width; eAttachment.left = 'right'; } else if (eAttachment.left === 'center') { left -= width / 2; eAttachment.left = 'right'; } } } if (typeof pin === 'string') { pin = pin.split(',').map(function (p) { return p.trim(); }); } else if (pin === true) { pin = ['top', 'left', 'right', 'bottom']; } pin = pin || []; var pinned = []; var oob = []; if (top < bounds[1]) { if (pin.indexOf('top') >= 0) { top = bounds[1]; pinned.push('top'); } else { oob.push('top'); } } if (top + height > bounds[3]) { if (pin.indexOf('bottom') >= 0) { top = bounds[3] - height; pinned.push('bottom'); } else { oob.push('bottom'); } } if (left < bounds[0]) { if (pin.indexOf('left') >= 0) { left = bounds[0]; pinned.push('left'); } else { oob.push('left'); } } if (left + width > bounds[2]) { if (pin.indexOf('right') >= 0) { left = bounds[2] - width; pinned.push('right'); } else { oob.push('right'); } } if (pinned.length) { (function () { var pinnedClass = undefined; if (typeof _this.options.pinnedClass !== 'undefined') { pinnedClass = _this.options.pinnedClass; } else { pinnedClass = _this.getClass('pinned'); } addClasses.push(pinnedClass); pinned.forEach(function (side) { addClasses.push(pinnedClass + '-' + side); }); })(); } if (oob.length) { (function () { var oobClass = undefined; if (typeof _this.options.outOfBoundsClass !== 'undefined') { oobClass = _this.options.outOfBoundsClass; } else { oobClass = _this.getClass('out-of-bounds'); } addClasses.push(oobClass); oob.forEach(function (side) { addClasses.push(oobClass + '-' + side); }); })(); } if (pinned.indexOf('left') >= 0 || pinned.indexOf('right') >= 0) { eAttachment.left = tAttachment.left = false; } if (pinned.indexOf('top') >= 0 || pinned.indexOf('bottom') >= 0) { eAttachment.top = tAttachment.top = false; } if (tAttachment.top !== targetAttachment.top || tAttachment.left !== targetAttachment.left || eAttachment.top !== _this.attachment.top || eAttachment.left !== _this.attachment.left) { _this.updateAttachClasses(eAttachment, tAttachment); _this.trigger('update', { attachment: eAttachment, targetAttachment: tAttachment }); } }); defer(function () { if (!(_this.options.addTargetClasses === false)) { updateClasses(_this.target, addClasses, allClasses); } updateClasses(_this.element, addClasses, allClasses); }); return { top: top, left: left }; } }); /* globals TetherBase */ 'use strict'; var _TetherBase$Utils = TetherBase.Utils; var getBounds = _TetherBase$Utils.getBounds; var updateClasses = _TetherBase$Utils.updateClasses; var defer = _TetherBase$Utils.defer; TetherBase.modules.push({ position: function position(_ref) { var _this = this; var top = _ref.top; var left = _ref.left; var _cache = this.cache('element-bounds', function () { return getBounds(_this.element); }); var height = _cache.height; var width = _cache.width; var targetPos = this.getTargetBounds(); var bottom = top + height; var right = left + width; var abutted = []; if (top <= targetPos.bottom && bottom >= targetPos.top) { ['left', 'right'].forEach(function (side) { var targetPosSide = targetPos[side]; if (targetPosSide === left || targetPosSide === right) { abutted.push(side); } }); } if (left <= targetPos.right && right >= targetPos.left) { ['top', 'bottom'].forEach(function (side) { var targetPosSide = targetPos[side]; if (targetPosSide === top || targetPosSide === bottom) { abutted.push(side); } }); } var allClasses = []; var addClasses = []; var sides = ['left', 'top', 'right', 'bottom']; allClasses.push(this.getClass('abutted')); sides.forEach(function (side) { allClasses.push(_this.getClass('abutted') + '-' + side); }); if (abutted.length) { addClasses.push(this.getClass('abutted')); } abutted.forEach(function (side) { addClasses.push(_this.getClass('abutted') + '-' + side); }); defer(function () { if (!(_this.options.addTargetClasses === false)) { updateClasses(_this.target, addClasses, allClasses); } updateClasses(_this.element, addClasses, allClasses); }); return true; } }); /* globals TetherBase */ 'use strict'; var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i['return']) _i['return'](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError('Invalid attempt to destructure non-iterable instance'); } }; })(); TetherBase.modules.push({ position: function position(_ref) { var top = _ref.top; var left = _ref.left; if (!this.options.shift) { return; } var shift = this.options.shift; if (typeof this.options.shift === 'function') { shift = this.options.shift.call(this, { top: top, left: left }); } var shiftTop = undefined, shiftLeft = undefined; if (typeof shift === 'string') { shift = shift.split(' '); shift[1] = shift[1] || shift[0]; var _shift = shift; var _shift2 = _slicedToArray(_shift, 2); shiftTop = _shift2[0]; shiftLeft = _shift2[1]; shiftTop = parseFloat(shiftTop, 10); shiftLeft = parseFloat(shiftLeft, 10); } else { shiftTop = shift.top; shiftLeft = shift.left; } top += shiftTop; left += shiftLeft; return { top: top, left: left }; } }); return Tether; })); /***/ }, /* 603 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'CodeExampleComponent', propTypes: { children: _react2.default.PropTypes.element, id: _react2.default.PropTypes.number, title: _react2.default.PropTypes.string }, render: function render() { return _react2.default.createElement( 'div', { key: this.props.id, id: 'example-' + this.props.id, className: 'example' }, _react2.default.createElement( 'h2', { className: 'example__heading' }, this.props.title ), this.props.children ); } }); /***/ }, /* 604 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'CustomDateFormat', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), _react2.default.createElement( 'strong', null, 'dateFormat="YYYY/MM/DD"' ), _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange} />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { dateFormat: 'YYYY/MM/DD', selected: this.state.startDate, onChange: this.handleChange }) ) ); } }); /***/ }, /* 605 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'CustomClassName', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', ' ', _react2.default.createElement('br', null), 'className="red-border" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, className: 'red-border' }) ) ); } }); /***/ }, /* 606 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'CustomCalendarClassName', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', ' ', _react2.default.createElement('br', null), 'calendarClassName="rasta-stripes" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, calendarClassName: 'rasta-stripes' }) ) ); } }); /***/ }, /* 607 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'PlaceholderText', render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker placeholderText="Click to select a date" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { placeholderText: 'Click to select a date' }) ) ); } }); /***/ }, /* 608 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'SpecificDateRange', getInitialState: function getInitialState() { return { startDate: null }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), _react2.default.createElement( 'strong', null, 'minDate={moment()}' ), _react2.default.createElement('br', null), _react2.default.createElement( 'strong', null, 'maxDate={moment().add(5, "days")}' ), _react2.default.createElement('br', null), '\xA0 \xA0 ', 'placeholderText="Select a date between today and 5 days in the future" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, minDate: (0, _moment2.default)(), maxDate: (0, _moment2.default)().add(5, 'days'), placeholderText: 'Select a date between today and 5 days in the future' }) ) ); } }); /***/ }, /* 609 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'CustomStartDate', getInitialState: function getInitialState() { return { startDate: null }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), _react2.default.createElement( 'strong', null, 'locale="en-gb"' ), _react2.default.createElement('br', null), '\xA0 \xA0 ', 'placeholderText="Weeks start on Monday" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, locale: 'en-gb', placeholderText: 'Weeks start on Monday' }) ) ); } }); /***/ }, /* 610 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'ExcludeDates', getInitialState: function getInitialState() { return { startDate: null }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), _react2.default.createElement( 'strong', null, 'excludeDates={[moment(), moment().subtract(1, "days")]}' ), _react2.default.createElement('br', null), '\xA0 \xA0 ', 'placeholderText="Select a date other than today or yesterday" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, excludeDates: [(0, _moment2.default)(), (0, _moment2.default)().subtract(1, 'days')], placeholderText: 'Select a date other than today or yesterday' }) ) ); } }); /***/ }, /* 611 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'highlightDates', getInitialState: function getInitialState() { return { startDate: null }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), _react2.default.createElement( 'strong', null, 'highlightDates={[moment().subtract(7, "days"), moment().add(7, "days")]}' ), _react2.default.createElement('br', null), '\xA0 \xA0 ', 'placeholderText="This highlights a week ago and a week from today" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, highlightDates: [(0, _moment2.default)().subtract(7, 'days'), (0, _moment2.default)().add(7, 'days')], placeholderText: 'This highlights a week ago and a week from today' }) ) ); } }); /***/ }, /* 612 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'includeDates', getInitialState: function getInitialState() { return { startDate: null }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), _react2.default.createElement( 'strong', null, 'includeDates={[moment(), moment().add(1, "days")]}' ), _react2.default.createElement('br', null), '\xA0 \xA0 ', 'placeholderText="This only includes today and tomorrow" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, includeDates: [(0, _moment2.default)(), (0, _moment2.default)().add(1, 'days')], placeholderText: 'This only includes today and tomorrow' }) ) ); } }); /***/ }, /* 613 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'FilterDates', getInitialState: function getInitialState() { return { date: null }; }, handleChange: function handleChange(date) { this.setState({ date: date }); }, isWeekday: function isWeekday(date) { var day = date.day(); return day !== 0 && day !== 6; }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.date}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), _react2.default.createElement( 'strong', null, 'filterDate={this.isWeekday}' ), _react2.default.createElement('br', null), '\xA0 \xA0 ', 'placeholderText="Select a weekday" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.date, onChange: this.handleChange, filterDate: this.isWeekday, placeholderText: 'Select a weekday' }) ) ); } }); /***/ }, /* 614 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'Disabled', getInitialState: function getInitialState() { return { startDate: null }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), _react2.default.createElement( 'strong', null, 'disabled={true}' ), _react2.default.createElement('br', null), '\xA0 \xA0 ', 'placeholderText="This is disabled"', ' />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, disabled: true, placeholderText: 'This is disabled' }) ) ); } }); /***/ }, /* 615 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'DisabledKeyboardNavigation', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), _react2.default.createElement( 'strong', null, 'disabledKeyboardNavigation />' ), _react2.default.createElement('br', null), 'placeholderText="This has disabled keyboard navigation"' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, disabledKeyboardNavigation: true, placeholderText: 'This has disabled keyboard navigation' }) ) ); } }); /***/ }, /* 616 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'ClearInput', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange }', _react2.default.createElement('br', null), 'isClearable={true}', _react2.default.createElement('br', null), 'placeholderText="I have been cleared!" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, isClearable: true, placeholderText: 'I have been cleared!' }) ) ); } }); /***/ }, /* 617 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'Disabled', getInitialState: function getInitialState() { return { startDate: null }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, handleOnBlur: function handleOnBlur(date) { if (date === null) { console.log('selected date: %s', date); } else { console.log('selected date: %s', date.format('DD/MM/YYYY')); } }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'js' }, 'handleOnBlur: function (date) {', _react2.default.createElement('br', null), 'if (date === null) {', _react2.default.createElement('br', null), 'console.log("selected date: %s", date);', _react2.default.createElement('br', null), '}', _react2.default.createElement('br', null), 'else {', _react2.default.createElement('br', null), 'console.log("selected date: %s", date.format("DD/MM/YYYY"));', _react2.default.createElement('br', null), '}', _react2.default.createElement('br', null), '};' ), _react2.default.createElement('br', null), _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'key="example9"', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), _react2.default.createElement( 'strong', null, 'onBlur={this.handleOnBlur}' ), _react2.default.createElement('br', null), '\xA0 \xA0 ', 'placeholderText="View blur callbacks in console" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { key: 'example9', selected: this.state.startDate, onChange: this.handleChange, onBlur: this.handleOnBlur, placeholderText: 'View blur callbacks in console' }) ) ); } }); /***/ }, /* 618 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'Placement', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), 'popoverAttachment="bottom center"', _react2.default.createElement('br', null), 'popoverTargetAttachment="top center"', _react2.default.createElement('br', null), 'popoverTargetOffset="0px 0px" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, popoverAttachment: 'bottom center', popoverTargetAttachment: 'top center', popoverTargetOffset: '0px 0px' }) ) ); } }); /***/ }, /* 619 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; var React = __webpack_require__(2); var DatePicker = __webpack_require__(341); var moment = __webpack_require__(343); var DateRange = React.createClass({ displayName: 'DateRange', getInitialState: function getInitialState() { return { startDate: moment('2014-02-08'), endDate: moment('2014-02-10') }; }, handleChange: function handleChange(_ref) { var startDate = _ref.startDate, endDate = _ref.endDate; startDate = startDate || this.state.startDate; endDate = endDate || this.state.endDate; if (startDate.isAfter(endDate)) { var temp = startDate; startDate = endDate; endDate = temp; } this.setState({ startDate: startDate, endDate: endDate }); }, handleChangeStart: function handleChangeStart(startDate) { this.handleChange({ startDate: startDate }); }, handleChangeEnd: function handleChangeEnd(endDate) { this.handleChange({ endDate: endDate }); }, render: function render() { return React.createElement( 'div', { className: 'row' }, React.createElement( 'pre', { className: 'column example__code' }, React.createElement( 'code', { className: 'jsx' }, '<DatePicker', React.createElement('br', null), 'selected={this.state.startDate}', React.createElement('br', null), 'selectsStart', '\xA0 ', 'startDate={this.state.startDate}', React.createElement('br', null), 'endDate={this.state.endDate}', React.createElement('br', null), 'onChange={this.handleChangeStart} />', React.createElement('br', null), '<DatePicker', React.createElement('br', null), 'selected={this.state.endDate}', React.createElement('br', null), 'selectsEnd', '\xA0 ', 'startDate={this.state.startDate}', React.createElement('br', null), 'endDate={this.state.endDate}', React.createElement('br', null), 'onChange={this.handleChangeEnd} />', React.createElement('br', null) ) ), React.createElement( 'div', { className: 'column' }, React.createElement(DatePicker, { selected: this.state.startDate, selectsStart: true, startDate: this.state.startDate, endDate: this.state.endDate, onChange: this.handleChangeStart }), React.createElement(DatePicker, { selected: this.state.endDate, selectsEnd: true, startDate: this.state.startDate, endDate: this.state.endDate, onChange: this.handleChangeEnd }) ) ); } }); module.exports = DateRange; /***/ }, /* 620 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'TabIndex', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), 'tabIndex={1} />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, tabIndex: 1 }) ) ); } }); /***/ }, /* 621 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'YearDropdown', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', ' ', _react2.default.createElement('br', null), 'showYearDropdown', ' ', _react2.default.createElement('br', null), 'dateFormatCalendar="MMMM"', ' ', _react2.default.createElement('br', null), 'scrollableYearDropdown />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, showYearDropdown: true, dateFormatCalendar: 'MMMM', scrollableYearDropdown: true }) ) ); } }); /***/ }, /* 622 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'MonthDropdown', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', ' ', _react2.default.createElement('br', null), 'showMonthDropdown />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, showMonthDropdown: true }) ) ); } }); /***/ }, /* 623 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'YearDropdown', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', ' ', _react2.default.createElement('br', null), 'peekNextMonth', ' ', _react2.default.createElement('br', null), 'showMonthDropdown', ' ', _react2.default.createElement('br', null), 'showYearDropdown', ' ', _react2.default.createElement('br', null), '\xA0 \xA0 ', 'dropdownMode="select" />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, peekNextMonth: true, showMonthDropdown: true, showYearDropdown: true, dropdownMode: 'select' }) ) ); } }); /***/ }, /* 624 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'Default', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'todayButton={"Vandaag"}', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange} />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { todayButton: 'Vandaag', selected: this.state.startDate, onChange: this.handleChange }) ) ); } }); /***/ }, /* 625 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'Default', getInitialState: function getInitialState() { return { startDate: null, utcOffset: -4 }; }, timezoneNames: [{ name: 'GMT+10', value: 10 }, { name: 'GMT+8', value: 8 }, { name: 'GMT+4', value: 4 }, { name: 'GMT+1', value: 1 }, { name: 'GMT', value: 0 }, { name: 'GMT-3', value: -3 }, { name: 'GMT-4', value: -4 }, { name: 'GMT-8', value: -8 }, { name: 'GMT-10', value: -10 }], handleChange: function handleChange(date) { this.setState({ startDate: date }); }, handleTmzChange: function handleTmzChange(event) { this.setState({ utcOffset: parseInt(event.target.value, 10) }); }, getOffsetLabel: function getOffsetLabel(tmz) { var obj = this.timezoneNames.find(function (item) { return item.value === tmz; }); return obj && obj.name || ''; }, render: function render() { var selected = this.state.startDate && this.state.startDate.clone().utcOffset(this.state.utcOffset); var utcText = this.getOffsetLabel(this.state.utcOffset); var todayTxt = 'Today in ' + utcText; return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'utcOffset=-4', _react2.default.createElement('br', null), 'dateFormat="DD-MMM HH:mm"', _react2.default.createElement('br', null), 'todayButton="Today in Puerto Rico"', _react2.default.createElement('br', null), 'onChange={this.handleChange} />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { utcOffset: this.state.utcOffset, dateFormat: 'DD-MMM YYYY HH:mm', todayButton: todayTxt, selected: selected, minDate: (0, _moment2.default)('2016-11-05T00:00:00+00:00').utcOffset(this.state.utcOffset), maxDate: (0, _moment2.default)('2016-12-04T00:00:00-04:00').utcOffset(this.state.utcOffset), onChange: this.handleChange }), _react2.default.createElement('br', null), _react2.default.createElement( 'label', { className: 'example__timezone-label' }, 'Timezone Offset:', _react2.default.createElement( 'select', { className: 'example__timezone-selector', value: this.state.utcOffset, onChange: this.handleTmzChange }, _react2.default.createElement( 'option', { value: '10' }, 'GMT+10:00' ), _react2.default.createElement( 'option', { value: '8' }, 'GMT+08:00' ), _react2.default.createElement( 'option', { value: '4' }, 'GMT+04:00' ), _react2.default.createElement( 'option', { value: '1' }, 'GMT+01:00' ), _react2.default.createElement( 'option', { value: '0' }, 'GMT' ), _react2.default.createElement( 'option', { value: '-3' }, 'GMT-03:00' ), _react2.default.createElement( 'option', { value: '-4' }, 'GMT-04:00' ), _react2.default.createElement( 'option', { value: '-8' }, 'GMT-08:00' ), _react2.default.createElement( 'option', { value: '-10' }, 'GMT-10:00' ) ) ) ) ); } }); /***/ }, /* 626 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'Inline', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'inline', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange} />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { inline: true, selected: this.state.startDate, onChange: this.handleChange }) ) ); } }); /***/ }, /* 627 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'OpenToDate', getInitialState: function getInitialState() { return { startDate: null }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'openToDate={moment("1993-09-28")}', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange} />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { openToDate: (0, _moment2.default)('1993-09-28'), selected: this.state.startDate, onChange: this.handleChange }) ) ); } }); /***/ }, /* 628 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'FixedCalendar', getInitialState: function getInitialState() { return { startDate: null }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'fixedHeight', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange} />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { fixedHeight: true, selected: this.state.startDate, onChange: this.handleChange }) ) ); } }); /***/ }, /* 629 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'Default', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), 'showWeekNumbers />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, showWeekNumbers: true }) ) ); } }); /***/ }, /* 630 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /* eslint-disable react/no-multi-comp */ var ExampleCustomInput = _react2.default.createClass({ displayName: 'ExampleCustomInput', propTypes: { onClick: _react2.default.PropTypes.func, value: _react2.default.PropTypes.string }, render: function render() { return _react2.default.createElement( 'button', { className: 'example-custom-input', onClick: this.props.onClick }, this.props.value ); } }); exports.default = _react2.default.createClass({ displayName: 'Custom Input', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, 'var ExampleCustomInput = React.createClass({', _react2.default.createElement('br', null), 'displayName: "ExampleCustomInput" ,', _react2.default.createElement('br', null), _react2.default.createElement('br', null), '\xA0 ', 'propTypes: {', _react2.default.createElement('br', null), 'onClick: React.PropTypes.func,', _react2.default.createElement('br', null), 'value: React.PropTypes.string', _react2.default.createElement('br', null), '},', _react2.default.createElement('br', null), _react2.default.createElement('br', null), '\xA0 ', 'render () {', _react2.default.createElement('br', null), 'return (', _react2.default.createElement('br', null), '<button', _react2.default.createElement('br', null), 'className="example-custom-input"', _react2.default.createElement('br', null), 'onClick={this.props.onClick}>', _react2.default.createElement('br', null), '{this.props.value}', _react2.default.createElement('br', null), '</button>', _react2.default.createElement('br', null), ')', _react2.default.createElement('br', null), '}', _react2.default.createElement('br', null), '})', _react2.default.createElement('br', null), _react2.default.createElement('br', null), '...', _react2.default.createElement('br', null), _react2.default.createElement('br', null), '<DatePicker', _react2.default.createElement('br', null), 'customInput={<ExampleCustomInput />}', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange} />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { customInput: _react2.default.createElement(ExampleCustomInput, null), selected: this.state.startDate, onChange: this.handleChange }) ) ); } }); /***/ }, /* 631 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'MultiMonth', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), 'monthsShown={2} />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { monthsShown: 2, onChange: this.handleChange, selected: this.state.startDate }) ) ); } }); /***/ }, /* 632 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'MultiMonthDrp', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), 'showYearDropdown', _react2.default.createElement('br', null), 'monthsShown={2} />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { monthsShown: 2, onChange: this.handleChange, showYearDropdown: true, selected: this.state.startDate }) ) ); } }); /***/ }, /* 633 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'Children', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column' }, _react2.default.createElement( 'code', { className: 'jsx' }, '\n<DatePicker\n selected={this.state.startDate}\n onChange={this.handleChange}>\n <div style={{color: \'red\'}}>\n Don\'t forget to check the weather!\n </div>\n</DatePicker>\n' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement( _reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange }, _react2.default.createElement( 'div', { style: { color: 'red' } }, 'Don\'t forget to check the weather!' ) ) ) ); } }); /***/ }, /* 634 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'With Portal', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, '<DatePicker', _react2.default.createElement('br', null), 'selected={this.state.startDate}', _react2.default.createElement('br', null), 'onChange={this.handleChange}', _react2.default.createElement('br', null), 'withPortal />' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, withPortal: true }) ) ); } }); /***/ }, /* 635 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'Default', getInitialState: function getInitialState() { return { startDate: null }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, handleChangeRaw: function handleChangeRaw(value) { if (value === 'tomorrow') { this.handleChange((0, _moment2.default)().add(1, 'day')); } }, render: function render() { var _this = this; return _react2.default.createElement( 'div', { className: 'row' }, _react2.default.createElement( 'pre', { className: 'column example__code' }, _react2.default.createElement( 'code', { className: 'jsx' }, 'handleChangeRaw(value) {', _react2.default.createElement('br', null), ' if(value === "tomorrow") {', _react2.default.createElement('br', null), ' const tomorrow = moment().add(1, "day")', _react2.default.createElement('br', null), ' this.handleChange(tomorrow)', _react2.default.createElement('br', null), ' }', _react2.default.createElement('br', null), '}', _react2.default.createElement('br', null), '<DatePicker', _react2.default.createElement('br', null), ' selected={this.state.startDate}', _react2.default.createElement('br', null), ' onChange={this.handleChange} />', _react2.default.createElement('br', null), ' placeholderText="Enter tomorrow"', _react2.default.createElement('br', null), ' onChangeRaw={(event) => ', _react2.default.createElement('br', null), ' this.handleChangeRaw(event.target.value)', _react2.default.createElement('br', null), '/>' ) ), _react2.default.createElement( 'div', { className: 'column' }, _react2.default.createElement(_reactDatepicker2.default, { selected: this.state.startDate, onChange: this.handleChange, placeholderText: 'Enter "tomorrow"', onChangeRaw: function onChangeRaw(event) { return _this.handleChangeRaw(event.target.value); } }) ) ); } }); /***/ }, /* 636 */ /***/ function(module, exports) { // removed by extract-text-webpack-plugin /***/ }, /* 637 */ 636, /* 638 */ /***/ function(module, exports, __webpack_require__) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = __webpack_require__(2); var _react2 = _interopRequireDefault(_react); var _reactDatepicker = __webpack_require__(341); var _reactDatepicker2 = _interopRequireDefault(_reactDatepicker); var _moment = __webpack_require__(343); var _moment2 = _interopRequireDefault(_moment); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = _react2.default.createClass({ displayName: 'HeroExample', getInitialState: function getInitialState() { return { startDate: (0, _moment2.default)() }; }, handleChange: function handleChange(date) { this.setState({ startDate: date }); }, render: function render() { return _react2.default.createElement(_reactDatepicker2.default, { autoFocus: true, selected: this.state.startDate, onChange: this.handleChange }); } }); /***/ }, /* 639 */ /***/ function(module, exports, __webpack_require__, __webpack_module_template_argument_0__) { /** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * */ 'use strict'; var _prodInvariant = __webpack_require__(__webpack_module_template_argument_0__); var invariant = __webpack_require__(8); /** * Static poolers. Several custom versions for each potential number of * arguments. A completely generic pooler is easy to implement, but would * require accessing the `arguments` object. In each of these, `this` refers to * the Class itself, not an instance. If any others are needed, simply add them * here, or in their own files. */ var oneArgumentPooler = function (copyFieldsFrom) { var Klass = this; if (Klass.instancePool.length) { var instance = Klass.instancePool.pop(); Klass.call(instance, copyFieldsFrom); return instance; } else { return new Klass(copyFieldsFrom); } }; var twoArgumentPooler = function (a1, a2) { var Klass = this; if (Klass.instancePool.length) { var instance = Klass.instancePool.pop(); Klass.call(instance, a1, a2); return instance; } else { return new Klass(a1, a2); } }; var threeArgumentPooler = function (a1, a2, a3) { var Klass = this; if (Klass.instancePool.length) { var instance = Klass.instancePool.pop(); Klass.call(instance, a1, a2, a3); return instance; } else { return new Klass(a1, a2, a3); } }; var fourArgumentPooler = function (a1, a2, a3, a4) { var Klass = this; if (Klass.instancePool.length) { var instance = Klass.instancePool.pop(); Klass.call(instance, a1, a2, a3, a4); return instance; } else { return new Klass(a1, a2, a3, a4); } }; var standardReleaser = function (instance) { var Klass = this; !(instance instanceof Klass) ? false ? invariant(false, 'Trying to release an instance into a pool of a different type.') : _prodInvariant('25') : void 0; instance.destructor(); if (Klass.instancePool.length < Klass.poolSize) { Klass.instancePool.push(instance); } }; var DEFAULT_POOL_SIZE = 10; var DEFAULT_POOLER = oneArgumentPooler; /** * Augments `CopyConstructor` to be a poolable class, augmenting only the class * itself (statically) not adding any prototypical fields. Any CopyConstructor * you give this may have a `poolSize` property, and will look for a * prototypical `destructor` on instances. * * @param {Function} CopyConstructor Constructor that can be used to reset. * @param {Function} pooler Customizable pooler. */ var addPoolingTo = function (CopyConstructor, pooler) { // Casting as any so that flow ignores the actual implementation and trusts // it to match the type we declared var NewKlass = CopyConstructor; NewKlass.instancePool = []; NewKlass.getPooled = pooler || DEFAULT_POOLER; if (!NewKlass.poolSize) { NewKlass.poolSize = DEFAULT_POOL_SIZE; } NewKlass.release = standardReleaser; return NewKlass; }; var PooledClass = { addPoolingTo: addPoolingTo, oneArgumentPooler: oneArgumentPooler, twoArgumentPooler: twoArgumentPooler, threeArgumentPooler: threeArgumentPooler, fourArgumentPooler: fourArgumentPooler }; module.exports = PooledClass; /***/ } /******/ ])));
1
6,033
Please avoid committing build artifacts, as they produce unnecessary conflicts and noise.
Hacker0x01-react-datepicker
js
@@ -101,7 +101,7 @@ class RetryWrapper: if name in self.__unwrap_methods: return partial(x.__func__, self) - elif hasattr(x, "__call__"): + elif callable(x): return self.__wrap_method_with_call_and_maybe_retry(x) else: return x
1
# This file duplicated from the Yelp MRJob project: # # https://github.com/Yelp/mrjob # # # Copyright 2009-2013 Yelp, David Marin # Copyright 2015 Yelp # Copyright 2017 Yelp # Copyright 2018 Contributors # Copyright 2019 Yelp and Contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Wrappers for gracefully retrying on error.""" import logging import time from functools import partial log = logging.getLogger(__name__) _DEFAULT_BACKOFF = 15 _DEFAULT_MULTIPLIER = 1.5 _DEFAULT_MAX_TRIES = 10 _DEFAULT_MAX_BACKOFF = 1200 # 20 minutes class RetryWrapper: """Handle transient errors, with configurable backoff. This class can wrap any object. The wrapped object will behave like the original one, except that if you call a function and it raises a retriable exception, we'll back off for a certain number of seconds and call the function again, until it succeeds or we get a non-retriable exception. """ def __init__( self, wrapped, retry_if, backoff=_DEFAULT_BACKOFF, multiplier=_DEFAULT_MULTIPLIER, max_tries=_DEFAULT_MAX_TRIES, max_backoff=_DEFAULT_MAX_BACKOFF, unwrap_methods=(), ): """ Wrap the given object :param wrapped: the object to wrap :param retry_if: a method that takes an exception, and returns whether we should retry :type backoff: float :param backoff: the number of seconds to wait the first time we get a retriable error :type multiplier: float :param multiplier: if we retry multiple times, the amount to multiply the backoff time by every time we get an error :type max_tries: int :param max_tries: how many tries we get. ``0`` means to keep trying forever :type max_backoff: float :param max_backoff: cap the backoff at this number of seconds :type unwrap_methods: sequence :param unwrap_methods: names of methods to call with this object as *self* rather than retrying on transient errors (e.g. methods that return a paginator) """ self.__wrapped = wrapped self.__retry_if = retry_if self.__backoff = backoff if self.__backoff <= 0: raise ValueError("backoff must be positive") self.__multiplier = multiplier if self.__multiplier < 1: raise ValueError("multiplier must be at least one!") self.__max_tries = max_tries self.__max_backoff = max_backoff self.__unwrap_methods = set(unwrap_methods) def __getattr__(self, name): """The glue that makes functions retriable, and returns other attributes from the wrapped object as-is.""" x = getattr(self.__wrapped, name) if name in self.__unwrap_methods: return partial(x.__func__, self) elif hasattr(x, "__call__"): return self.__wrap_method_with_call_and_maybe_retry(x) else: return x def __wrap_method_with_call_and_maybe_retry(self, f): """Wrap method f in a retry loop.""" def call_and_maybe_retry(*args, **kwargs): backoff = self.__backoff tries = 0 while not self.__max_tries or tries < self.__max_tries: try: return f(*args, **kwargs) except Exception as ex: # pylint: disable=broad-except if self.__retry_if(ex) and ( tries < self.__max_tries - 1 or not self.__max_tries ): log.info("Got retriable error: %r" % ex) log.info("Backing off for %.1f seconds" % backoff) time.sleep(backoff) tries += 1 backoff *= self.__multiplier backoff = min(backoff, self.__max_backoff) else: raise # pretend to be the original function call_and_maybe_retry.__doc__ = f.__doc__ if hasattr(f, "__name__"): call_and_maybe_retry.__name__ = f.__name__ return call_and_maybe_retry
1
14,149
this is vendored code so i would hesitate to make random changes to it
dagster-io-dagster
py