prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>fixedheader.html.py<|end_file_name|><|fim▁begin|>XXXXXXXXX XXXXX XXXXXX XXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXX X XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX X XXX XXXXX X XXXXXXXXXXXXXXXXXXXXXXXX X XXXX XXXXXXXXX X XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XX X XX XXXXXXXXX XXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXX XXXXXXXXXXXXX XXXXXXXXXXXXXX XX X XXXXXXXXXXXX XXXXXX XXXXXXX XXX XXXXXXXXXXX XXXXXXXX X XXXXX XXXXXX XX XXXXX XX XXX XXX XX X XXXXXXXXX XXXXXXX XXXXXXXXXX XXXXX XXXX XXXX XXXXXXXXXXXX XXXXXXXX XXX XX XXXXXXX XXXXXXX XXXX XXXXX XXX XXXXXXXX XXXXXXX XX XXXXX XX XXX XXXXXXX XXXXXXXXXX XXXXXX XXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXX XXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXX XXXXX XXXXXXXX XXXXXXX XXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXX XXXXX XXXXXXXX XXXXXXX XXXX XXXXXXXXX XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXX XXXXXXXX XXXXXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXX XXXXXXXXXX XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXXXX XXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXX XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXX XXXXXXXXX XXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXXX XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXXX XXXXXXXXXX XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX<|fim▁hole|> XXXXX XXXX XXXXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXX XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXX XXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXXXX XXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXX XXXXXXXX XXXXXXXX XXX XXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXX XXXX XXXXXXXXXXXXX XXXX XXXXXXXXXXX XXXXXX XXXXXXXXXX XXXXX XXXXX XX XXXX XX XXXXXXXXXX XXX XXXXX XXXXX XX XXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X XXX XXXXX X XXXXXXXXXXXXXXXXXXXXXXXX X XXXX XXXXXXXXX X XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XX X XXXXXXXXX XXXXX XXXXXXXX XX XXX XXXXX XXXXX XXX XXXXXXXXX XXXXXXXXXX XXXXXXX XXXXX XXX XXXXXX XXX XXX XX XXXX XXXXXXXXXXXX XXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXX XXXX XXXXXXXXXXXXXX XXXXXX XXXX XXXXX XXXXX XX XXX XXX XXXX XXXXX XXXXXXXX XXXXXX XX XXX XXXX XXXXXXXX XX XXXXXXXXXXXXXXX XXXXXX XXXX XXXXXXXXXXXX XXXXX XXXXXXX XXXXXXX XXXX X XXXXXX XXX XX XXXXXXXXXX XXX XXXXXX XXXX XX XXXXXX XXXX XXX XXXXXXX XXXXX XXXXXXXX XX XXXXX XX XXXXXXXXX XXXXXXX XXX XXXXXX XXX XXXXXXXXXX XXX XXXX XX XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXX XXXXXXXXX XXX XXXXXXX XXXXX XXX XXXXXX XXX XXX XX XXXX XXXXXXX XX XXXXXXX XXX XXXXXXX XX XXX XXXXXXXXXX XXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXX XXXX XXXXXXXXXXXXX XXXXXXX XXXXX XXXXX XXXX XX XXXXX XXX XXXXXX XXXX XXXX XXX XXXX XXXXXX XX XXXXX XXXXXX XXXX XXXX XXXX XXXXXX XXXXXXXXXXXXX XX XXX XXXXXXXXXX XXXX XX XXXXXXXXXXX XXXXXX XXXX XXXXXXXXXXXX XXXXXX XXXXXX XXXX XX XXXXXXX XXX XXXXXXXXXXX XXXXXXXXXX XXX XXXX XXXXX XX XXXXX XXXXXX XXXXXX XXXX XXXX XXXX XX XXXX XX XXXXXXX XXXXXX XXXXX XXXX XXXXXXXXXXX XXXXXXXXXX XXXXXXX XXX XX XXXXXXX XX XXX XXXXXXXXX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXX XX XXX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXX XXXXXXXXXX XXXXXX XXXXXXXXX XXXX XXXXXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXX XXXX XXXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXX XXXX XXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXXXX XXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXX XXXXXX XXXX XXXXXXXXXXXXXXXXX XXXXXXXXX XXXXX XX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXX XXXX XXXXXXXXXXX XXXXX XXX XXX XXXXXXXXXX XXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXX X XXXX XXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXX XXX XXXXXXXXXXXX XX XXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXX XXXXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXXXXX XXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXX XXXXXX XXXXXXXXXX XXXXXXX XXXXXXX<|fim▁end|>
XXXXXXXXXXXXXXXX
<|file_name|>feature_stream.go<|end_file_name|><|fim▁begin|>package jsoniter import ( "io" ) // Stream is a io.Writer like object, with JSON specific write functions. // Error is not returned as return value, but stored as Error member on this stream instance. type Stream struct { cfg *frozenConfig out io.Writer buf []byte n int Error error indention int } // NewStream create new stream instance. // cfg can be jsoniter.ConfigDefault. // out can be nil if write to internal buffer. // bufSize is the initial size for the internal buffer in bytes. func NewStream(cfg API, out io.Writer, bufSize int) *Stream { return &Stream{ cfg: cfg.(*frozenConfig), out: out, buf: make([]byte, bufSize), n: 0, Error: nil, indention: 0, } } // Pool returns a pool can provide more stream with same configuration func (stream *Stream) Pool() StreamPool { return stream.cfg } // Reset reuse this stream instance by assign a new writer func (stream *Stream) Reset(out io.Writer) { stream.out = out stream.n = 0 } // Available returns how many bytes are unused in the buffer. func (stream *Stream) Available() int { return len(stream.buf) - stream.n } // Buffered returns the number of bytes that have been written into the current buffer. func (stream *Stream) Buffered() int { return stream.n } // Buffer if writer is nil, use this method to take the result func (stream *Stream) Buffer() []byte { return stream.buf[:stream.n] } // Write writes the contents of p into the buffer. // It returns the number of bytes written. // If nn < len(p), it also returns an error explaining // why the write is short. func (stream *Stream) Write(p []byte) (nn int, err error) { for len(p) > stream.Available() && stream.Error == nil { if stream.out == nil { stream.growAtLeast(len(p)) } else { var n int if stream.Buffered() == 0 { // Large write, empty buffer. // Write directly from p to avoid copy. n, stream.Error = stream.out.Write(p) } else { n = copy(stream.buf[stream.n:], p) stream.n += n stream.Flush() } nn += n p = p[n:] } } if stream.Error != nil { return nn, stream.Error } n := copy(stream.buf[stream.n:], p) stream.n += n nn += n return nn, nil } // WriteByte writes a single byte. func (stream *Stream) writeByte(c byte) {<|fim▁hole|> stream.growAtLeast(1) } stream.buf[stream.n] = c stream.n++ } func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) { if stream.Error != nil { return } if stream.Available() < 2 { stream.growAtLeast(2) } stream.buf[stream.n] = c1 stream.buf[stream.n+1] = c2 stream.n += 2 } func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) { if stream.Error != nil { return } if stream.Available() < 3 { stream.growAtLeast(3) } stream.buf[stream.n] = c1 stream.buf[stream.n+1] = c2 stream.buf[stream.n+2] = c3 stream.n += 3 } func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) { if stream.Error != nil { return } if stream.Available() < 4 { stream.growAtLeast(4) } stream.buf[stream.n] = c1 stream.buf[stream.n+1] = c2 stream.buf[stream.n+2] = c3 stream.buf[stream.n+3] = c4 stream.n += 4 } func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) { if stream.Error != nil { return } if stream.Available() < 5 { stream.growAtLeast(5) } stream.buf[stream.n] = c1 stream.buf[stream.n+1] = c2 stream.buf[stream.n+2] = c3 stream.buf[stream.n+3] = c4 stream.buf[stream.n+4] = c5 stream.n += 5 } // Flush writes any buffered data to the underlying io.Writer. func (stream *Stream) Flush() error { if stream.out == nil { return nil } if stream.Error != nil { return stream.Error } if stream.n == 0 { return nil } n, err := stream.out.Write(stream.buf[0:stream.n]) if n < stream.n && err == nil { err = io.ErrShortWrite } if err != nil { if n > 0 && n < stream.n { copy(stream.buf[0:stream.n-n], stream.buf[n:stream.n]) } stream.n -= n stream.Error = err return err } stream.n = 0 return nil } func (stream *Stream) ensure(minimal int) { available := stream.Available() if available < minimal { stream.growAtLeast(minimal) } } func (stream *Stream) growAtLeast(minimal int) { if stream.out != nil { stream.Flush() } toGrow := len(stream.buf) if toGrow < minimal { toGrow = minimal } newBuf := make([]byte, len(stream.buf)+toGrow) copy(newBuf, stream.Buffer()) stream.buf = newBuf } // WriteRaw write string out without quotes, just like []byte func (stream *Stream) WriteRaw(s string) { stream.ensure(len(s)) if stream.Error != nil { return } n := copy(stream.buf[stream.n:], s) stream.n += n } // WriteNil write null to stream func (stream *Stream) WriteNil() { stream.writeFourBytes('n', 'u', 'l', 'l') } // WriteTrue write true to stream func (stream *Stream) WriteTrue() { stream.writeFourBytes('t', 'r', 'u', 'e') } // WriteFalse write false to stream func (stream *Stream) WriteFalse() { stream.writeFiveBytes('f', 'a', 'l', 's', 'e') } // WriteBool write true or false into stream func (stream *Stream) WriteBool(val bool) { if val { stream.WriteTrue() } else { stream.WriteFalse() } } // WriteObjectStart write { with possible indention func (stream *Stream) WriteObjectStart() { stream.indention += stream.cfg.indentionStep stream.writeByte('{') stream.writeIndention(0) } // WriteObjectField write "field": with possible indention func (stream *Stream) WriteObjectField(field string) { stream.WriteString(field) if stream.indention > 0 { stream.writeTwoBytes(':', ' ') } else { stream.writeByte(':') } } // WriteObjectEnd write } with possible indention func (stream *Stream) WriteObjectEnd() { stream.writeIndention(stream.cfg.indentionStep) stream.indention -= stream.cfg.indentionStep stream.writeByte('}') } // WriteEmptyObject write {} func (stream *Stream) WriteEmptyObject() { stream.writeByte('{') stream.writeByte('}') } // WriteMore write , with possible indention func (stream *Stream) WriteMore() { stream.writeByte(',') stream.writeIndention(0) } // WriteArrayStart write [ with possible indention func (stream *Stream) WriteArrayStart() { stream.indention += stream.cfg.indentionStep stream.writeByte('[') stream.writeIndention(0) } // WriteEmptyArray write [] func (stream *Stream) WriteEmptyArray() { stream.writeByte('[') stream.writeByte(']') } // WriteArrayEnd write ] with possible indention func (stream *Stream) WriteArrayEnd() { stream.writeIndention(stream.cfg.indentionStep) stream.indention -= stream.cfg.indentionStep stream.writeByte(']') } func (stream *Stream) writeIndention(delta int) { if stream.indention == 0 { return } stream.writeByte('\n') toWrite := stream.indention - delta stream.ensure(toWrite) for i := 0; i < toWrite && stream.n < len(stream.buf); i++ { stream.buf[stream.n] = ' ' stream.n++ } }<|fim▁end|>
if stream.Error != nil { return } if stream.Available() < 1 {
<|file_name|>TaskLogo.js<|end_file_name|><|fim▁begin|>/* ************************************************************************ Copyright: 2009 OETIKER+PARTNER AG License: GPLv3 or later Authors: Tobi Oetiker <[email protected]> Utf8Check: äöü ************************************************************************ */ /** * A container that looks a bit like a PostIt */ qx.Class.define("remocular.ui.TaskLogo", { extend : qx.ui.container.Composite, /** * @param title {String} title of the Logo * @param byline {String} byline * @param about {String} text about the plugin * @param url {String} url to link to */ construct : function(title, byline, about, link) { this.base(arguments); this.setLayout(new qx.ui.layout.VBox(3)); this.set({ margin : 4, padding : 15, maxWidth : 300, allowGrowX : true, alignX : 'center', alignY : 'middle', shadow : new qx.ui.decoration.Grid("decoration/shadow/shadow.png", [ 2, 4, 4, 2 ]), decorator : new qx.ui.decoration.Single(1, 'solid', '#ddd'), backgroundColor : '#fff', opacity : 1<|fim▁hole|> }); if (link) { this.setCursor('pointer'); this.addListener('click', function(e) { qx.bom.Window.open(link, '_blank'); }); } var t = new qx.ui.container.Composite(new qx.ui.layout.VBox(3)).set({ opacity : 0.5 }); t.addListener('mouseover', function(e) { this.setOpacity(1); }, t); t.addListener('mouseout', function(e) { this.setOpacity(0.5); }, t); t.add(new qx.ui.basic.Label(title).set({ font : 'smallTitle' })); t.add(new qx.ui.basic.Label(byline).set({ font : 'bold' })); t.add(new qx.ui.basic.Label(about).set({ rich : true, paddingTop : 4 })); this.add(t); } });<|fim▁end|>
<|file_name|>imagemagnifieridevice.py<|end_file_name|><|fim▁begin|># =========================================================================== # eXe # Copyright 2004-2006, University of Auckland # Copyright 2004-2008 eXe Project, http://eXeLearning.org/ # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # =========================================================================== """ A ImageMagnifier Idevice is one built up from an image and free text. """ import logging from exe.engine.idevice import Idevice from exe.engine.field import TextAreaField, MagnifierField from exe.engine.translate import lateTranslate log = logging.getLogger(__name__) # =========================================================================== class ImageMagnifierIdevice(Idevice): """ A ImageMagnifier Idevice is one built up from an image and free text. """ persistenceVersion = 4 def __init__(self, defaultImage = None): Idevice.__init__(self, x_(u"Image Magnifier"), x_(u"University of Auckland"), x_(u"""The image magnifier is a magnifying tool enabling learners to magnify the view of the image they have been given. Moving the magnifying glass over the image allows larger detail to be studied."""), u"", u"") self.emphasis = Idevice.NoEmphasis self.short_desc = x_("Allow learners to magnify a given view of an image") self.imageMagnifier = MagnifierField( x_(u"Choose an Image"), x_(u"""Click on the picture below or the "Add Image" button to select an image file to be magnified.""")) self.imageMagnifier.idevice = self self.imageMagnifier.defaultImage = defaultImage self.text = TextAreaField(x_(u"Text"), x_("""Enter the text you wish to associate with the file.""")) self.text.idevice = self self.float = u"left" self.caption = u"" self._captionInstruc = x_(u"""Provide a caption for the image to be magnified.""") self._dimensionInstruc = x_(u"""Choose the size you want your image to display at. The measurements are in pixels. Generally, 100 pixels equals approximately 3cm. Leave both fields blank if you want the image to display at its original size.""") self._alignInstruc = x_(u"""Alignment allows you to choose where on the screen the image will be positioned.""") self._initialZoomInstruc = x_(u"""Set the initial level of zoom when the IDevice loads, as a percentage of the original image size""") self._maxZoomInstruc = x_(u"""Set the maximum level of zoom, as a percentage of the original image size""")<|fim▁hole|> captionInstruc = lateTranslate('captionInstruc') dimensionInstruc = lateTranslate('dimensionInstruc') alignInstruc = lateTranslate('alignInstruc') initialZoomInstruc = lateTranslate('initialZoomInstruc') maxZoomInstruc = lateTranslate('maxZoomInstruc') glassSizeInstruc = lateTranslate('glassSizeInstruc') def getResourcesField(self, this_resource): """ implement the specific resource finding mechanism for this iDevice: """ # be warned that before upgrading, this iDevice field could not exist: if hasattr(self, 'imageMagnifier')\ and hasattr(self.imageMagnifier, 'imageResource'): if this_resource == self.imageMagnifier.imageResource: return self.imageMagnifier # be warned that before upgrading, this iDevice field could not exist: if hasattr(self, 'text') and hasattr(self.text, 'images'): for this_image in self.text.images: if hasattr(this_image, '_imageResource') \ and this_resource == this_image._imageResource: return self.text return None def getRichTextFields(self): """ Like getResourcesField(), a general helper to allow nodes to search through all of their fields without having to know the specifics of each iDevice type. """ fields_list = [] if hasattr(self, 'text'): fields_list.append(self.text) return fields_list def burstHTML(self, i): """ takes a BeautifulSoup fragment (i) and bursts its contents to import this idevice from a CommonCartridge export """ # ImageMagnifier Idevice: #======> WARNING - NOT YET BURSTING!!!!!!!! #title = i.find(name='span', attrs={'class' : 'iDeviceTitle' }) #idevice.title = title.renderContents().decode('utf-8') # no title for this idevice # WARNING: not yet loading the image or its parameters: # Could be in the following tag: # <param name="FlashVars" \ # value="glassSize=2&height=189&width=267 \ # &initialZoomSize=100&file=sunflowers.jpg \ # &maxZoomSize=150&targetColor=#FF0000&borderWidth=12 #inner = i.find(name='div', attrs={'class' : 'iDevice_inner' }) #idevice.fields[0].content = inner.renderContents().decode('utf-8') #idevice.fields[0].content_w_resourcePaths = inner.renderContents().decode('utf-8') #idevice.fields[0].content_wo_resourcePaths = inner.renderContents().decode('utf-8') def upgradeToVersion1(self): """ Upgrades to v0.14 """ self._alignInstruc = x_(u"""Alignment allows you to choose where on the screen the image will be positioned.""") self._initialZoomInstruc = x_(u"""Set the initial level of zoom when the IDevice loads, as a percentage of the original image size""") self._maxZoomInstruc = x_(u"""Set the maximum level of zoom, as a percentage of the original image size""") self._glassSizeInstruc = x_(u"""This chooses the initial size of the magnifying glass""") def upgradeToVersion2(self): """ Upgrades to v0.24 """ self.imageMagnifier.isDefaultImage = False def upgradeToVersion3(self): if 'magnifier.swf' in self.systemResources: self.systemResources.remove('magnifier.swf') if 'mojomagnify.js' not in self.systemResources: self.systemResources.append('mojomagnify.js') def upgradeToVersion4(self): """ Delete icon from system resources """ self._upgradeIdeviceToVersion3() # ===========================================================================<|fim▁end|>
self._glassSizeInstruc = x_(u"""Select the size of the magnifying glass""") self.systemResources += ['mojomagnify.js'] # Properties
<|file_name|>stars.rs<|end_file_name|><|fim▁begin|>// +--------------------------------------------------------------------------+ // | Copyright 2016 Matthew D. Steele <[email protected]> | // | | // | This file is part of System Syzygy. | // | | // | System Syzygy is free software: you can redistribute it and/or modify it | // | under the terms of the GNU General Public License as published by the | // | Free Software Foundation, either version 3 of the License, or (at your | // | option) any later version. | // | | // | System Syzygy is distributed in the hope that it will be useful, but | // | WITHOUT ANY WARRANTY; without even the implied warranty of | // | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | // | General Public License for details. | // | | // | You should have received a copy of the GNU General Public License along | // | with System Syzygy. If not, see <http://www.gnu.org/licenses/>. | // +--------------------------------------------------------------------------+ use crate::gui::{Canvas, Rect}; // ========================================================================= // pub struct MovingStars { rect: Rect, anim: i32, visible: bool, } impl MovingStars { pub fn new(left: i32, top: i32, width: u32, height: u32) -> MovingStars { MovingStars { rect: Rect::new(left, top, width, height), anim: 0, visible: false, } } pub fn set_visible(&mut self, visible: bool) { self.visible = visible; } fn rand(range: u32, seed: &mut (u32, u32)) -> i32 { seed.0 = 36969 * (seed.0 & 0xffff) + (seed.0 >> 16); seed.1 = 18000 * (seed.1 & 0xffff) + (seed.1 >> 16); let next = (seed.0 << 16) | (seed.1 & 0xffff); (next % range) as i32 } fn draw_star( &self, x: i32, y: i32, width: u32, gray: u8, canvas: &mut Canvas, ) { canvas.fill_rect((gray, gray, gray), Rect::new(x, y, width, 1)); } fn draw_layer( &self, spacing: u32, speed: i32, gray: u8, canvas: &mut Canvas, ) { let mut seed = (123456789, 987654321); let star_width = (speed / 2) as u32; let modulus = (self.rect.width() + spacing) as i32; let scroll = (self.anim * speed) % modulus; let mut yoff = 0; while yoff < modulus { let mut xoff = 0; while xoff < modulus { let x = ((xoff + scroll) % modulus) - spacing as i32 + MovingStars::rand(spacing, &mut seed); let y = yoff + MovingStars::rand(spacing, &mut seed); self.draw_star(x, y, star_width, gray, canvas); xoff += spacing as i32; }<|fim▁hole|> } } pub fn draw(&self, canvas: &mut Canvas) { if self.visible { let mut canvas = canvas.subcanvas(self.rect); canvas.clear((0, 0, 0)); self.draw_layer(16, 8, 63, &mut canvas); self.draw_layer(32, 16, 127, &mut canvas); } } pub fn tick_animation(&mut self) -> bool { if self.visible { self.anim += 1; } self.visible } } // ========================================================================= //<|fim▁end|>
yoff += spacing as i32;
<|file_name|>event_ring.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Chromium OS Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use data_model::DataInit; use remain::sorted; use std::mem::size_of; use std::sync::atomic::{fence, Ordering}; use thiserror::Error; use vm_memory::{GuestAddress, GuestMemory, GuestMemoryError}; use super::xhci_abi::*; #[sorted] #[derive(Error, Debug)] pub enum Error { #[error("event ring has a bad enqueue pointer: {0}")] BadEnqueuePointer(GuestAddress), #[error("event ring has a bad seg table addr: {0}")] BadSegTableAddress(GuestAddress), #[error("event ring has a bad seg table index: {0}")] BadSegTableIndex(u16), #[error("event ring is full")] EventRingFull, #[error("event ring cannot read from guest memory: {0}")] MemoryRead(GuestMemoryError), #[error("event ring cannot write to guest memory: {0}")] MemoryWrite(GuestMemoryError), #[error("event ring is uninitialized")] Uninitialized, } type Result<T> = std::result::Result<T, Error>; /// Event rings are segmented circular buffers used to pass event TRBs from the xHCI device back to /// the guest. Each event ring is associated with a single interrupter. See section 4.9.4 of the /// xHCI specification for more details. /// This implementation is only for primary interrupter. Please review xhci spec before using it /// for secondary. pub struct EventRing { mem: GuestMemory, segment_table_size: u16,<|fim▁hole|> enqueue_pointer: GuestAddress, dequeue_pointer: GuestAddress, producer_cycle_state: bool, } impl EventRing { /// Create an empty, uninitialized event ring. pub fn new(mem: GuestMemory) -> Self { EventRing { mem, segment_table_size: 0, segment_table_base_address: GuestAddress(0), current_segment_index: 0, enqueue_pointer: GuestAddress(0), dequeue_pointer: GuestAddress(0), trb_count: 0, // As specified in xHCI spec 4.9.4, cycle state should be initialized to 1. producer_cycle_state: true, } } /// This function implements left side of xHCI spec, Figure 4-12. pub fn add_event(&mut self, mut trb: Trb) -> Result<()> { self.check_inited()?; if self.is_full()? { return Err(Error::EventRingFull); } // Event is write twice to avoid race condition. // Guest kernel use cycle bit to check ownership, thus we should write cycle last. trb.set_cycle(!self.producer_cycle_state); self.mem .write_obj_at_addr(trb, self.enqueue_pointer) .map_err(Error::MemoryWrite)?; // Updating the cycle state bit should always happen after updating other parts. fence(Ordering::SeqCst); trb.set_cycle(self.producer_cycle_state); // Offset of cycle state byte. const CYCLE_STATE_OFFSET: usize = 12usize; let data = trb.as_slice(); // Trb contains 4 dwords, the last one contains cycle bit. let cycle_bit_dword = &data[CYCLE_STATE_OFFSET..]; let address = self.enqueue_pointer; let address = address .checked_add(CYCLE_STATE_OFFSET as u64) .ok_or(Error::BadEnqueuePointer(self.enqueue_pointer))?; self.mem .write_all_at_addr(cycle_bit_dword, address) .map_err(Error::MemoryWrite)?; usb_debug!( "event write to pointer {:#x}, trb_count {}, {}", self.enqueue_pointer.0, self.trb_count, trb ); self.enqueue_pointer = match self.enqueue_pointer.checked_add(size_of::<Trb>() as u64) { Some(addr) => addr, None => return Err(Error::BadEnqueuePointer(self.enqueue_pointer)), }; self.trb_count -= 1; if self.trb_count == 0 { self.current_segment_index += 1; if self.current_segment_index == self.segment_table_size { self.producer_cycle_state ^= true; self.current_segment_index = 0; } self.load_current_seg_table_entry()?; } Ok(()) } /// Set segment table size. pub fn set_seg_table_size(&mut self, size: u16) -> Result<()> { usb_debug!("event ring seg table size is set to {}", size); self.segment_table_size = size; self.try_reconfigure_event_ring() } /// Set segment table base addr. pub fn set_seg_table_base_addr(&mut self, addr: GuestAddress) -> Result<()> { usb_debug!("event ring seg table base addr is set to {:#x}", addr.0); self.segment_table_base_address = addr; self.try_reconfigure_event_ring() } /// Set dequeue pointer. pub fn set_dequeue_pointer(&mut self, addr: GuestAddress) { usb_debug!("event ring dequeue pointer set to {:#x}", addr.0); self.dequeue_pointer = addr; } /// Check if event ring is empty. pub fn is_empty(&self) -> bool { self.enqueue_pointer == self.dequeue_pointer } /// Event ring is considered full when there is only space for one last TRB. In this case, xHC /// should write an error Trb and do a bunch of handlings. See spec, figure 4-12 for more /// details. /// For now, we just check event ring full and fail (as it's unlikely to happen). pub fn is_full(&self) -> Result<bool> { if self.trb_count == 1 { // erst == event ring segment table let next_erst_idx = (self.current_segment_index + 1) % self.segment_table_size; let erst_entry = self.read_seg_table_entry(next_erst_idx)?; Ok(self.dequeue_pointer.0 == erst_entry.get_ring_segment_base_address()) } else { Ok(self.dequeue_pointer.0 == self.enqueue_pointer.0 + size_of::<Trb>() as u64) } } /// Try to init event ring. Will fail if seg table size/address are invalid. fn try_reconfigure_event_ring(&mut self) -> Result<()> { if self.segment_table_size == 0 || self.segment_table_base_address.0 == 0 { return Ok(()); } self.load_current_seg_table_entry() } // Check if this event ring is inited. fn check_inited(&self) -> Result<()> { if self.segment_table_size == 0 || self.segment_table_base_address == GuestAddress(0) || self.enqueue_pointer == GuestAddress(0) { return Err(Error::Uninitialized); } Ok(()) } // Load entry of current seg table. fn load_current_seg_table_entry(&mut self) -> Result<()> { let entry = self.read_seg_table_entry(self.current_segment_index)?; self.enqueue_pointer = GuestAddress(entry.get_ring_segment_base_address()); self.trb_count = entry.get_ring_segment_size(); Ok(()) } // Get seg table entry at index. fn read_seg_table_entry(&self, index: u16) -> Result<EventRingSegmentTableEntry> { let seg_table_addr = self.get_seg_table_addr(index)?; // TODO(jkwang) We can refactor GuestMemory to allow in-place memory operation. self.mem .read_obj_from_addr(seg_table_addr) .map_err(Error::MemoryRead) } // Get seg table addr at index. fn get_seg_table_addr(&self, index: u16) -> Result<GuestAddress> { if index > self.segment_table_size { return Err(Error::BadSegTableIndex(index)); } self.segment_table_base_address .checked_add(((size_of::<EventRingSegmentTableEntry>() as u16) * index) as u64) .ok_or(Error::BadSegTableAddress(self.segment_table_base_address)) } } #[cfg(test)] mod test { use super::*; use std::mem::size_of; #[test] fn test_uninited() { let gm = GuestMemory::new(&[(GuestAddress(0), 0x1000)]).unwrap(); let mut er = EventRing::new(gm); let trb = Trb::new(); match er.add_event(trb).err().unwrap() { Error::Uninitialized => {} _ => panic!("unexpected error"), } assert_eq!(er.is_empty(), true); assert_eq!(er.is_full().unwrap(), false); } #[test] fn test_event_ring() { let trb_size = size_of::<Trb>() as u64; let gm = GuestMemory::new(&[(GuestAddress(0), 0x1000)]).unwrap(); let mut er = EventRing::new(gm.clone()); let mut st_entries = [EventRingSegmentTableEntry::new(); 3]; st_entries[0].set_ring_segment_base_address(0x100); st_entries[0].set_ring_segment_size(3); st_entries[1].set_ring_segment_base_address(0x200); st_entries[1].set_ring_segment_size(3); st_entries[2].set_ring_segment_base_address(0x300); st_entries[2].set_ring_segment_size(3); gm.write_obj_at_addr(st_entries[0], GuestAddress(0x8)) .unwrap(); gm.write_obj_at_addr( st_entries[1], GuestAddress(0x8 + size_of::<EventRingSegmentTableEntry>() as u64), ) .unwrap(); gm.write_obj_at_addr( st_entries[2], GuestAddress(0x8 + 2 * size_of::<EventRingSegmentTableEntry>() as u64), ) .unwrap(); // Init event ring. Must init after segment tables writting. er.set_seg_table_size(3).unwrap(); er.set_seg_table_base_addr(GuestAddress(0x8)).unwrap(); er.set_dequeue_pointer(GuestAddress(0x100)); let mut trb = Trb::new(); // Fill first table. trb.set_control(1); assert_eq!(er.is_empty(), true); assert_eq!(er.is_full().unwrap(), false); assert!(er.add_event(trb).is_ok()); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), false); let t: Trb = gm.read_obj_from_addr(GuestAddress(0x100)).unwrap(); assert_eq!(t.get_control(), 1); assert_eq!(t.get_cycle(), true); trb.set_control(2); assert!(er.add_event(trb).is_ok()); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), false); let t: Trb = gm .read_obj_from_addr(GuestAddress(0x100 + trb_size)) .unwrap(); assert_eq!(t.get_control(), 2); assert_eq!(t.get_cycle(), true); trb.set_control(3); assert!(er.add_event(trb).is_ok()); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), false); let t: Trb = gm .read_obj_from_addr(GuestAddress(0x100 + 2 * trb_size)) .unwrap(); assert_eq!(t.get_control(), 3); assert_eq!(t.get_cycle(), true); // Fill second table. trb.set_control(4); assert!(er.add_event(trb).is_ok()); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), false); let t: Trb = gm.read_obj_from_addr(GuestAddress(0x200)).unwrap(); assert_eq!(t.get_control(), 4); assert_eq!(t.get_cycle(), true); trb.set_control(5); assert!(er.add_event(trb).is_ok()); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), false); let t: Trb = gm .read_obj_from_addr(GuestAddress(0x200 + trb_size)) .unwrap(); assert_eq!(t.get_control(), 5); assert_eq!(t.get_cycle(), true); trb.set_control(6); assert!(er.add_event(trb).is_ok()); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), false); let t: Trb = gm .read_obj_from_addr(GuestAddress(0x200 + 2 * trb_size as u64)) .unwrap(); assert_eq!(t.get_control(), 6); assert_eq!(t.get_cycle(), true); // Fill third table. trb.set_control(7); assert!(er.add_event(trb).is_ok()); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), false); let t: Trb = gm.read_obj_from_addr(GuestAddress(0x300)).unwrap(); assert_eq!(t.get_control(), 7); assert_eq!(t.get_cycle(), true); trb.set_control(8); assert!(er.add_event(trb).is_ok()); // There is only one last trb. Considered full. assert_eq!(er.is_full().unwrap(), true); assert_eq!(er.is_empty(), false); let t: Trb = gm .read_obj_from_addr(GuestAddress(0x300 + trb_size)) .unwrap(); assert_eq!(t.get_control(), 8); assert_eq!(t.get_cycle(), true); // Add the last trb will result in error. match er.add_event(trb) { Err(Error::EventRingFull) => {} _ => panic!("er should be full"), }; // Dequeue one trb. er.set_dequeue_pointer(GuestAddress(0x100 + trb_size)); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), false); // Fill the last trb of the third table. trb.set_control(9); assert!(er.add_event(trb).is_ok()); // There is only one last trb. Considered full. assert_eq!(er.is_full().unwrap(), true); assert_eq!(er.is_empty(), false); let t: Trb = gm .read_obj_from_addr(GuestAddress(0x300 + trb_size)) .unwrap(); assert_eq!(t.get_control(), 8); assert_eq!(t.get_cycle(), true); // Add the last trb will result in error. match er.add_event(trb) { Err(Error::EventRingFull) => {} _ => panic!("er should be full"), }; // Dequeue until empty. er.set_dequeue_pointer(GuestAddress(0x100)); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), true); // Fill first table again. trb.set_control(10); assert!(er.add_event(trb).is_ok()); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), false); let t: Trb = gm.read_obj_from_addr(GuestAddress(0x100)).unwrap(); assert_eq!(t.get_control(), 10); // cycle bit should be reversed. assert_eq!(t.get_cycle(), false); trb.set_control(11); assert!(er.add_event(trb).is_ok()); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), false); let t: Trb = gm .read_obj_from_addr(GuestAddress(0x100 + trb_size)) .unwrap(); assert_eq!(t.get_control(), 11); assert_eq!(t.get_cycle(), false); trb.set_control(12); assert!(er.add_event(trb).is_ok()); assert_eq!(er.is_full().unwrap(), false); assert_eq!(er.is_empty(), false); let t: Trb = gm .read_obj_from_addr(GuestAddress(0x100 + 2 * trb_size)) .unwrap(); assert_eq!(t.get_control(), 12); assert_eq!(t.get_cycle(), false); } }<|fim▁end|>
segment_table_base_address: GuestAddress, current_segment_index: u16, trb_count: u16,
<|file_name|>core_connector.ts<|end_file_name|><|fim▁begin|>/** * core_connector.ts * * Handles all connection and communication with the uProxy core. */ /// <reference path='../../../../third_party/typings/es6-promise/es6-promise.d.ts' /> import uproxy_core_api = require('../../interfaces/uproxy_core_api'); import browser_connector = require('../../interfaces/browser_connector'); import social = require('../../interfaces/social'); import net = require('../../../../third_party/uproxy-lib/net/net.types'); interface FullfillAndReject { fulfill :Function; reject :Function; }; /** * This class hides all cross backend-ui communication wiring so that the * uProxy UI may speak through this connector as if talking directly to Core. * * Propagates these messages: * Core --[ UPDATES ]--> UI * UI --[ COMMANDS ]--> Core */ class CoreConnector implements uproxy_core_api.CoreApi { // Global unique promise ID. private promiseId_ :number = 1; private mapPromiseIdToFulfillAndReject_ :{[id :number] : FullfillAndReject} = {}; // If non-null, the ID of the instance from which we are presently // disconnected. public disconnectedWhileProxying :string = null; constructor(private browserConnector_ :browser_connector.CoreBrowserConnector) { this.browserConnector_.onUpdate(uproxy_core_api.Update.COMMAND_FULFILLED, this.handleRequestFulfilled_); this.browserConnector_.onUpdate(uproxy_core_api.Update.COMMAND_REJECTED, this.handleRequestRejected_); this.connect(); } public on = (name :string, callback :Function) => { this.browserConnector_.on(name, callback); } public connect = () :Promise<void> => { return this.browserConnector_.connect(); } public connected = () => { return this.browserConnector_.status.connected; } public onUpdate = (update :uproxy_core_api.Update, handler :Function) => { this.browserConnector_.onUpdate(update, handler); } /** * Send a Command from the UI to the Core, as a result of some user * interaction. */ public sendCommand = (command :uproxy_core_api.Command, data ?:any) => { var payload :browser_connector.Payload = { cmd: 'emit', type: command, data: data, promiseId: 0 } console.log('UI sending Command ' + //uproxy_core_api.Command[command], JSON.stringify(payload)); this.browserConnector_.send(payload); } /** * Send a Command from the UI to the Core, as a result of some user * interaction. Command returns a promise that fulfills/rejects upon * an ack/reject from the backend. */ public promiseCommand = (command :uproxy_core_api.Command, data ?:any) : Promise<any> => { var promiseId :number = ++(this.promiseId_); var payload :browser_connector.Payload = { cmd: 'emit', type: command, data: data, promiseId: promiseId } console.log('UI sending Promise Command ' + uproxy_core_api.Command[command], JSON.stringify(payload)); // Create a new promise and store its fulfill and reject functions. var fulfillFunc :Function; var rejectFunc :Function; var promise :Promise<any> = new Promise<any>((F, R) => { fulfillFunc = F; rejectFunc = R; }); // TODO: we may want to periodically remove garbage from this table // e.g. if the app restarts, all promises should be removed or reject.<|fim▁hole|> }; // Send request to backend. this.browserConnector_.send(payload); return promise; } private handleRequestFulfilled_ = (data :any) => { var promiseId = data.promiseId; console.log('promise command fulfilled ' + promiseId); if (this.mapPromiseIdToFulfillAndReject_[promiseId]) { this.mapPromiseIdToFulfillAndReject_[promiseId] .fulfill(data.argsForCallback); delete this.mapPromiseIdToFulfillAndReject_[promiseId]; } else { console.warn('fulfill not found ' + promiseId); } } private handleRequestRejected_ = (data :any) => { var promiseId = data.promiseId; console.log('promise command rejected ' + promiseId); if (this.mapPromiseIdToFulfillAndReject_[promiseId]) { this.mapPromiseIdToFulfillAndReject_[promiseId] .reject(data.errorForCallback); delete this.mapPromiseIdToFulfillAndReject_[promiseId]; } else { console.warn('reject not found ' + promiseId); } } // --- CoreApi interface requirements (sending COMMANDS) --- public getFullState = () :Promise<uproxy_core_api.InitialState> => { return this.promiseCommand(uproxy_core_api.Command.GET_FULL_STATE); } // TODO: Reconnect this hook, which while we're testing, sends a new instance // message anytime we click on the user in the UI. sendInstance = (clientId :string) => { this.sendCommand(uproxy_core_api.Command.SEND_INSTANCE_HANDSHAKE_MESSAGE, clientId); } modifyConsent = (command:uproxy_core_api.ConsentCommand) => { console.log('Modifying consent.', command); this.sendCommand(uproxy_core_api.Command.MODIFY_CONSENT, command); } startCopyPasteGet = () : Promise<net.Endpoint> => { console.log('Starting to proxy for CopyPaste'); return this.promiseCommand(uproxy_core_api.Command.START_PROXYING_COPYPASTE_GET); } stopCopyPasteGet = () :Promise<void> => { return this.promiseCommand(uproxy_core_api.Command.STOP_PROXYING_COPYPASTE_GET); } startCopyPasteShare = () => { this.sendCommand(uproxy_core_api.Command.START_PROXYING_COPYPASTE_SHARE); } stopCopyPasteShare = () :Promise<void> => { return this.promiseCommand(uproxy_core_api.Command.STOP_PROXYING_COPYPASTE_SHARE); } sendCopyPasteSignal = (signal:string) => { this.sendCommand(uproxy_core_api.Command.COPYPASTE_SIGNALLING_MESSAGE, signal); } start = (path :social.InstancePath) : Promise<net.Endpoint> => { console.log('Starting to proxy through ' + path); return this.promiseCommand(uproxy_core_api.Command.START_PROXYING, path); } stop = (path :social.InstancePath) => { console.log('Stopping proxy session.'); this.sendCommand(uproxy_core_api.Command.STOP_PROXYING, path); } updateGlobalSettings = (newSettings :uproxy_core_api.GlobalSettings) => { console.log('Updating global settings to ' + JSON.stringify(newSettings)); this.sendCommand(uproxy_core_api.Command.UPDATE_GLOBAL_SETTINGS, newSettings); } // TODO: Implement this or remove it. // changeOption = (option) => { // console.log('Changing option ' + option); // this.sendCommand(uproxy_core_api.Command.CHANGE_OPTION, option); // } login = (loginArgs :uproxy_core_api.LoginArgs) : Promise<void> => { return this.promiseCommand(uproxy_core_api.Command.LOGIN, loginArgs); } logout = (networkInfo :social.SocialNetworkInfo) : Promise<void> => { return this.promiseCommand(uproxy_core_api.Command.LOGOUT, networkInfo); } addUser = (inviteUrl: string): Promise<void> => { return this.promiseCommand(uproxy_core_api.Command.ADD_USER, inviteUrl); } // TODO: this should probably take the network path, including userId getInviteUrl = (networkInfo :social.SocialNetworkInfo): Promise<string> => { return this.promiseCommand(uproxy_core_api.Command.GET_INVITE_URL, networkInfo); } // TODO: this should probably take the network path, including userId sendEmail = (emailData :uproxy_core_api.EmailData): void => { this.sendCommand(uproxy_core_api.Command.SEND_EMAIL, emailData); } restart = () => { this.browserConnector_.restart(); } getLogs = () : Promise<string> => { return this.promiseCommand(uproxy_core_api.Command.GET_LOGS); } getNatType = () : Promise<string> => { return this.promiseCommand(uproxy_core_api.Command.GET_NAT_TYPE); } refreshPortControlSupport = () : Promise<void> => { return this.promiseCommand(uproxy_core_api.Command.REFRESH_PORT_CONTROL); } pingUntilOnline = (pingUrl :string) : Promise<void> => { return this.promiseCommand( uproxy_core_api.Command.PING_UNTIL_ONLINE, pingUrl); } getVersion = () :Promise<{ version :string }> => { return this.promiseCommand(uproxy_core_api.Command.GET_VERSION); } } // class CoreConnector export = CoreConnector;<|fim▁end|>
// Also we may want to reject promises after some timeout. this.mapPromiseIdToFulfillAndReject_[promiseId] = { fulfill: fulfillFunc, reject: rejectFunc
<|file_name|>styles.ts<|end_file_name|><|fim▁begin|>/* eslint-disable import/named */ import styled, {DefaultTheme, StyledComponent} from 'styled-components' import {Card} from '@sanity/ui' import {fileTarget} from '../../common/fileTarget' import {withFocusRing} from '../../../components/withFocusRing' export type {FileInfo} from '../../common/fileTarget' // Note: FileTarget needs its own focusRing because we need show it on click, not only when :focus-visible export const FileTarget = fileTarget(withFocusRing(Card)) export const Overlay: StyledComponent<'div', DefaultTheme> = styled.div` position: absolute; display: flex; flex-direction: column; align-items: center; justify-content: center; top: 2px; left: 2px; right: 2px; bottom: 2px;<|fim▁hole|> background-color: var(--card-bg-color); z-index: 3; pointer-events: none; `<|fim▁end|>
<|file_name|>gae_sample.py<|end_file_name|><|fim▁begin|># Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software<|fim▁hole|> # [START gae_detected_tag] # [END gae_detected_tag] """ [START gae_block_comment_tag] [END gae_block_comment_tag] """<|fim▁end|>
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
<|file_name|>loader.py<|end_file_name|><|fim▁begin|># vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2018-2020 Florian Bruhin (The Compiler) <[email protected]> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Loader for qutebrowser extensions.""" import importlib.abc import pkgutil import types import typing import sys import pathlib import attr from PyQt5.QtCore import pyqtSlot from qutebrowser import components from qutebrowser.config import config from qutebrowser.utils import log, standarddir from qutebrowser.misc import objects if typing.TYPE_CHECKING: import argparse # ModuleInfo objects for all loaded plugins _module_infos = [] @attr.s class InitContext: """Context an extension gets in its init hook.""" data_dir = attr.ib() # type: pathlib.Path config_dir = attr.ib() # type: pathlib.Path args = attr.ib() # type: argparse.Namespace @attr.s class ModuleInfo: """Information attached to an extension module. This gets used by qutebrowser.api.hook. """ _ConfigChangedHooksType = typing.List[typing.Tuple[typing.Optional[str], typing.Callable]] skip_hooks = attr.ib(False) # type: bool init_hook = attr.ib(None) # type: typing.Optional[typing.Callable] config_changed_hooks = attr.ib( attr.Factory(list)) # type: _ConfigChangedHooksType @attr.s class ExtensionInfo: """Information about a qutebrowser extension.""" name = attr.ib() # type: str def add_module_info(module: types.ModuleType) -> ModuleInfo: """Add ModuleInfo to a module (if not added yet).""" # pylint: disable=protected-access if not hasattr(module, '__qute_module_info'): module.__qute_module_info = ModuleInfo() # type: ignore return module.__qute_module_info # type: ignore def load_components(*, skip_hooks: bool = False) -> None: """Load everything from qutebrowser.components.""" for info in walk_components(): _load_component(info, skip_hooks=skip_hooks) def walk_components() -> typing.Iterator[ExtensionInfo]: """Yield ExtensionInfo objects for all modules.""" if hasattr(sys, 'frozen'): yield from _walk_pyinstaller() else: yield from _walk_normal() def _on_walk_error(name: str) -> None: raise ImportError("Failed to import {}".format(name)) def _walk_normal() -> typing.Iterator[ExtensionInfo]: """Walk extensions when not using PyInstaller.""" for _finder, name, ispkg in pkgutil.walk_packages( # Only packages have a __path__ attribute, # but we're sure this is one. path=components.__path__, # type: ignore prefix=components.__name__ + '.', onerror=_on_walk_error): if ispkg: continue yield ExtensionInfo(name=name) def _walk_pyinstaller() -> typing.Iterator[ExtensionInfo]: """Walk extensions when using PyInstaller. See https://github.com/pyinstaller/pyinstaller/issues/1905 Inspired by: https://github.com/webcomics/dosage/blob/master/dosagelib/loader.py<|fim▁hole|> toc = set() # type: typing.Set[str] for importer in pkgutil.iter_importers('qutebrowser'): if hasattr(importer, 'toc'): toc |= importer.toc for name in toc: if name.startswith(components.__name__ + '.'): yield ExtensionInfo(name=name) def _get_init_context() -> InitContext: """Get an InitContext object.""" return InitContext(data_dir=pathlib.Path(standarddir.data()), config_dir=pathlib.Path(standarddir.config()), args=objects.args) def _load_component(info: ExtensionInfo, *, skip_hooks: bool = False) -> types.ModuleType: """Load the given extension and run its init hook (if any). Args: skip_hooks: Whether to skip all hooks for this module. This is used to only run @cmdutils.register decorators. """ log.extensions.debug("Importing {}".format(info.name)) mod = importlib.import_module(info.name) mod_info = add_module_info(mod) if skip_hooks: mod_info.skip_hooks = True if mod_info.init_hook is not None and not skip_hooks: log.extensions.debug("Running init hook {!r}" .format(mod_info.init_hook.__name__)) mod_info.init_hook(_get_init_context()) _module_infos.append(mod_info) return mod @pyqtSlot(str) def _on_config_changed(changed_name: str) -> None: """Call config_changed hooks if the config changed.""" for mod_info in _module_infos: if mod_info.skip_hooks: continue for option, hook in mod_info.config_changed_hooks: if option is None: hook() else: cfilter = config.change_filter(option) cfilter.validate() if cfilter.check_match(changed_name): hook() def init() -> None: config.instance.changed.connect(_on_config_changed)<|fim▁end|>
"""
<|file_name|>bitcoin_hr.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="hr" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About YardSaleCoin</source> <translation>O YardSaleCoin-u</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;YardSaleCoin&lt;/b&gt; version</source> <translation>&lt;b&gt;YardSaleCoin&lt;/b&gt; verzija</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation type="unfinished"/> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The YardSaleCoin developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Adresar</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Dvostruki klik za uređivanje adrese ili oznake</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Dodajte novu adresu</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Kopiraj trenutno odabranu adresu u međuspremnik</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Nova adresa</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your YardSaleCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Ovo su vaše YardSaleCoin adrese za primanje isplate. Možda želite dati drukčiju adresu svakom primatelju tako da možete pratiti tko je platio.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Kopirati adresu</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Prikaži &amp;QR Kôd</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a YardSaleCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Izvoz podataka iz trenutnog taba u datoteku</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified YardSaleCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Brisanje</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your YardSaleCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Kopirati &amp;oznaku</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Izmjeniti</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation type="unfinished"/> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Izvoz podataka adresara</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Datoteka vrijednosti odvojenih zarezom (*. csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Pogreška kod izvoza</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Ne mogu pisati u datoteku %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Oznaka</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(bez oznake)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Unesite lozinku</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Nova lozinka</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Ponovite novu lozinku</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Unesite novi lozinku za novčanik. &lt;br/&gt; Molimo Vas da koristite zaporku od &lt;b&gt;10 ili više slučajnih znakova,&lt;/b&gt; ili &lt;b&gt;osam ili više riječi.&lt;/b&gt;</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Šifriranje novčanika</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Ova operacija treba lozinku vašeg novčanika kako bi se novčanik otključao.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Otključaj novčanik</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Ova operacija treba lozinku vašeg novčanika kako bi se novčanik dešifrirao.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Dešifriranje novčanika.</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Promjena lozinke</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Unesite staru i novu lozinku za novčanik.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Potvrdi šifriranje novčanika</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR LITECOINS&lt;/b&gt;!</source> <translation>Upozorenje: Ako šifrirate vaš novčanik i izgubite lozinku, &lt;b&gt;IZGUBIT ĆETE SVE SVOJE LITECOINSE!&lt;/b&gt;</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Jeste li sigurni da želite šifrirati svoj novčanik?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation type="unfinished"/> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Novčanik šifriran</translation> </message> <message> <location line="-56"/> <source>YardSaleCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your yardsalecoins from being stolen by malware infecting your computer.</source> <translation>YardSaleCoin će se sada zatvoriti kako bi dovršio postupak šifriranja. Zapamtite da šifriranje vašeg novčanika ne može u potpunosti zaštititi vaše yardsalecoine od krađe preko zloćudnog softvera koji bi bio na vašem računalu.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Šifriranje novčanika nije uspjelo</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Šifriranje novčanika nije uspjelo zbog interne pogreške. Vaš novčanik nije šifriran.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Priložene lozinke se ne podudaraju.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Otključavanje novčanika nije uspjelo</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Lozinka za dešifriranje novčanika nije točna.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Dešifriranje novčanika nije uspjelo</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Lozinka novčanika je uspješno promijenjena.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>&amp;Potpišite poruku...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Usklađivanje s mrežom ...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Pregled</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Prikaži opći pregled novčanika</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Transakcije</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Pretraži povijest transakcija</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Uređivanje popisa pohranjenih adresa i oznaka</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Prikaži popis adresa za primanje isplate</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>&amp;Izlaz</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Izlazak iz programa</translation> </message> <message> <location line="+4"/> <source>Show information about YardSaleCoin</source> <translation>Prikaži informacije o YardSaleCoinu</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Više o &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Prikaži informacije o Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Postavke</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Šifriraj novčanik...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Backup novčanika...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Promijena lozinke...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>Importiranje blokova sa diska...</translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>Re-indeksiranje blokova na disku...</translation> </message> <message> <location line="-347"/> <source>Send coins to a YardSaleCoin address</source> <translation>Slanje novca na yardsalecoin adresu</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for YardSaleCoin</source> <translation>Promijeni postavke konfiguracije za yardsalecoin</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Napravite sigurnosnu kopiju novčanika na drugoj lokaciji</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Promijenite lozinku za šifriranje novčanika</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation type="unfinished"/> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation type="unfinished"/> </message> <message> <location line="-165"/> <location line="+530"/> <source>YardSaleCoin</source> <translation>YardSaleCoin</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Novčanik</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>&amp;About YardSaleCoin</source> <translation>&amp;O YardSaleCoinu</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Sign messages with your YardSaleCoin addresses to prove you own them</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified YardSaleCoin addresses</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Datoteka</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Konfiguracija</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Pomoć</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Traka kartica</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>YardSaleCoin client</source> <translation>YardSaleCoin klijent</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to YardSaleCoin network</source> <translation><numerusform>%n aktivna veza na YardSaleCoin mrežu</numerusform><numerusform>%n aktivne veze na YardSaleCoin mrežu</numerusform><numerusform>%n aktivnih veza na YardSaleCoin mrežu</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Obrađeno %1 blokova povijesti transakcije.</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Error</source> <translation>Greška</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Ažurno</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Ažuriranje...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Poslana transakcija</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Dolazna transakcija</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Datum:%1 Iznos:%2 Tip:%3 Adresa:%4 </translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation type="unfinished"/> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid YardSaleCoin address or malformed URI parameters.</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Novčanik je &lt;b&gt;šifriran&lt;/b&gt; i trenutno &lt;b&gt;otključan&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Novčanik je &lt;b&gt;šifriran&lt;/b&gt; i trenutno &lt;b&gt;zaključan&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. YardSaleCoin can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation type="unfinished"/> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Izmjeni adresu</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Oznaka</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Oznaka ovog upisa u adresar</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Adresa</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Adresa ovog upisa u adresar. Može se mjenjati samo kod adresa za slanje.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Nova adresa za primanje</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nova adresa za slanje</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Uredi adresu za primanje</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Uredi adresu za slanje</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Upisana adresa &quot;%1&quot; je već u adresaru.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid YardSaleCoin address.</source> <translation>Upisana adresa &quot;%1&quot; nije valjana yardsalecoin adresa.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Ne mogu otključati novčanik.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Stvaranje novog ključa nije uspjelo.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>YardSaleCoin-Qt</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>version</source> <translation>verzija</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Upotreba:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>UI options</source> <translation>UI postavke</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Pokreni minimiziran</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Postavke</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Glavno</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Plati &amp;naknadu za transakciju</translation> </message> <message> <location line="+31"/> <source>Automatically start YardSaleCoin after logging in to the system.</source> <translation>Automatski pokreni YardSaleCoin kad se uključi računalo</translation> </message> <message> <location line="+3"/> <source>&amp;Start YardSaleCoin on system login</source> <translation>&amp;Pokreni YardSaleCoin kod pokretanja sustava</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Automatically open the YardSaleCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Automatski otvori port YardSaleCoin klijenta na ruteru. To radi samo ako ruter podržava UPnP i ako je omogućen.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Mapiraj port koristeći &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the YardSaleCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Spojite se na Bitcon mrežu putem SOCKS proxy-a (npr. kod povezivanja kroz Tor)</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Povezivanje putem SOCKS proxy-a:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>IP adresa proxy-a (npr. 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Port od proxy-a (npr. 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation type="unfinished"/> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Prikaži samo ikonu u sistemskoj traci nakon minimiziranja prozora</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimiziraj u sistemsku traku umjesto u traku programa</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimizirati umjesto izaći iz aplikacije kada je prozor zatvoren. Kada je ova opcija omogućena, aplikacija će biti zatvorena tek nakon odabira Izlaz u izborniku.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimiziraj kod zatvaranja</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Prikaz</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting YardSaleCoin.</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Jedinica za prikazivanje iznosa:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Izaberite željeni najmanji dio yardsalecoina koji će biti prikazan u sučelju i koji će se koristiti za plaćanje.</translation> </message> <message> <location line="+9"/> <source>Whether to show YardSaleCoin addresses in the transaction list or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Prikaži adrese u popisu transakcija</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation type="unfinished"/> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation type="unfinished"/> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Upozorenje</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting YardSaleCoin.</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation type="unfinished"/> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Oblik</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the YardSaleCoin network after a connection is established, but this process has not completed yet.</source> <translation type="unfinished"/> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Stanje:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Nepotvrđene:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Novčanik</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"/> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Nedavne transakcije&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Vaše trenutno stanje računa</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Ukupni iznos transakcija koje tek trebaju biti potvrđene, i još uvijek nisu uračunate u trenutni saldo</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation type="unfinished"/> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start yardsalecoin: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>QR Code Dijalog</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Zatraži plaćanje</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Iznos:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Oznaka</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Poruka:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Spremi kao...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Rezultirajući URI je predug, probajte umanjiti tekst za naslov / poruku.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>PNG slike (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation type="unfinished"/> </message> <message> <location line="-217"/> <source>Client version</source> <translation type="unfinished"/> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation type="unfinished"/> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>Startup time</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Network</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>On testnet</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Lanac blokova</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Trenutni broj blokova</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Procjenjeni ukupni broj blokova</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Posljednje vrijeme bloka</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Show the YardSaleCoin-Qt help message to get a list with possible YardSaleCoin command-line options.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation type="unfinished"/> </message> <message> <location line="-260"/> <source>Build date</source> <translation type="unfinished"/> </message> <message> <location line="-104"/> <source>YardSaleCoin - Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>YardSaleCoin Core</source> <translation type="unfinished"/> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Open the YardSaleCoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation type="unfinished"/> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the YardSaleCoin RPC console.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Slanje novca</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Pošalji k nekoliko primatelja odjednom</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>&amp;Dodaj primatelja</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Obriši sva polja transakcija</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Obriši &amp;sve</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Stanje:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123,456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Potvrdi akciju slanja</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;Pošalji</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; do %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Potvrdi slanje novca</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Jeste li sigurni da želite poslati %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>i</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>Adresa primatelja je nevaljala, molimo provjerite je ponovo.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Iznos mora biti veći od 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Iznos je veći od stanja računa.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Iznos je veći od stanja računa kad se doda naknada za transakcije od %1.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Pronašli smo adresu koja se ponavlja. U svakom plaćanju program može svaku adresu koristiti samo jedanput.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Generirani novčići moraju pričekati nastanak 120 blokova prije nego što ih je moguće potrošiti. Kad ste generirali taj blok, on je bio emitiran u mrežu kako bi bio dodan postojećim lancima blokova. Ako ne uspije biti dodan, njegov status bit će promijenjen u &quot;nije prihvatljiv&quot; i on neće biti potrošiv. S vremena na vrijeme tako nešto se može desiti ako neki drugi nod približno istovremeno generira blok.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Oblik</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>&amp;Iznos:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>&amp;Primatelj plaćanja:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Unesite oznaku za ovu adresu kako bi ju dodali u vaš adresar</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Oznaka:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Odaberite adresu iz adresara</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Zalijepi adresu iz međuspremnika</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Ukloni ovog primatelja</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a YardSaleCoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Unesite YardSaleCoin adresu (npr. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Potpišite poruku</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Možete potpisati poruke sa svojom adresom kako bi dokazali da ih posjedujete. Budite oprezni da ne potpisujete ništa mutno, jer bi vas phishing napadi mogli na prevaru natjerati da prepišete svoj identitet njima. Potpisujte samo detaljno objašnjene izjave sa kojima se slažete.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Unesite YardSaleCoin adresu (npr. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Odaberite adresu iz adresara</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Zalijepi adresu iz međuspremnika</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Upišite poruku koju želite potpisati ovdje</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this YardSaleCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Obriši &amp;sve</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Unesite YardSaleCoin adresu (npr. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified YardSaleCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation type="unfinished"/> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a YardSaleCoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Unesite YardSaleCoin adresu (npr. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Enter YardSaleCoin signature</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation type="unfinished"/> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation type="unfinished"/> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The YardSaleCoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Otvoren do %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1 nije dostupan</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/nepotvrđeno</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 potvrda</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Status</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Generiran</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Od</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>Za</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>label</source> <translation>oznaka</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Uplaćeno</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>Nije prihvaćeno</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Zaduženje</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Naknada za transakciju</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Neto iznos</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Poruka</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Komentar</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Generirani novčići moraju pričekati nastanak 120 blokova prije nego što ih je moguće potrošiti. Kad ste generirali taj blok, on je bio emitiran u mrežu kako bi bio dodan postojećim lancima blokova. Ako ne uspije biti dodan, njegov status bit će promijenjen u &quot;nije prihvaćen&quot; i on neće biti potrošiv. S vremena na vrijeme tako nešto se može desiti ako neki drugi nod generira blok u približno isto vrijeme.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Transaction</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Inputs</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Iznos</translation> </message> <message> <location line="+1"/> <source>true</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>false</source> <translation type="unfinished"/> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, još nije bio uspješno emitiran</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>nepoznato</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Detalji transakcije</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Ova panela prikazuje detaljni opis transakcije</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Tip</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Iznos</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Otvoren do %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Nije na mreži (%1 potvrda)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Nepotvrđen (%1 od %2 potvrda)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Potvrđen (%1 potvrda)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Generirano - Upozorenje: ovaj blok nije bio primljen od strane bilo kojeg drugog noda i vjerojatno neće biti prihvaćen!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generirano, ali nije prihvaćeno</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Primljeno s</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Primljeno od</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Poslano za</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Plaćanje samom sebi</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Rudareno</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/d)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Status transakcije</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Datum i vrijeme kad je transakcija primljena</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Vrsta transakcije.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Odredište transakcije</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Iznos odbijen od ili dodan k saldu.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Sve</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Danas</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Ovaj tjedan</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Ovaj mjesec</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Prošli mjesec</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Ove godine</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Raspon...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Primljeno s</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Poslano za</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Tebi</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Rudareno</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Ostalo</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Unesite adresu ili oznaku za pretraživanje</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Min iznos</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Kopirati adresu</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Kopirati oznaku</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Kopiraj iznos</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Izmjeniti oznaku</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation type="unfinished"/> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Izvoz podataka transakcija</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Datoteka podataka odvojenih zarezima (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Potvrđeno</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Tip</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Oznaka</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Iznos</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Izvoz pogreške</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Ne mogu pisati u datoteku %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Raspon:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>za</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Slanje novca</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Izvoz podataka iz trenutnog taba u datoteku</translation> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation type="unfinished"/> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>YardSaleCoin version</source> <translation>YardSaleCoin verzija</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Upotreba:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or yardsalecoind</source> <translation>Pošalji komandu usluzi -server ili yardsalecoind</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Prikaži komande</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Potraži pomoć za komandu</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Postavke:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: yardsalecoin.conf)</source> <translation>Odredi konfiguracijsku datoteku (ugrađeni izbor: yardsalecoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: yardsalecoind.pid)</source> <translation>Odredi proces ID datoteku (ugrađeni izbor: yardsalecoin.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Odredi direktorij za datoteke</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Postavi cache za bazu podataka u MB (zadano:25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 9555 or testnet: 19333)</source> <translation>Slušaj na &lt;port&gt;u (default: 9555 ili testnet: 19333)</translation> </message><|fim▁hole|> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Održavaj najviše &lt;n&gt; veza sa članovima (default: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Prag za odspajanje članova koji se čudno ponašaju (default: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Broj sekundi koliko se članovima koji se čudno ponašaju neće dopustiti da se opet spoje (default: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 9554 or testnet: 19332)</source> <translation>Prihvaćaj JSON-RPC povezivanje na portu broj &lt;port&gt; (ugrađeni izbor: 9554 or testnet: 19332)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Prihvati komande iz tekst moda i JSON-RPC</translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Izvršavaj u pozadini kao uslužnik i prihvaćaj komande</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Koristi test mrežu</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation type="unfinished"/> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=yardsalecoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;YardSaleCoin Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. YardSaleCoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Upozorenje: -paytxfee je podešen na preveliki iznos. To je iznos koji ćete platiti za obradu transakcije.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong YardSaleCoin will not work properly.</source> <translation>Upozorenje: Molimo provjerite jesu li datum i vrijeme na vašem računalu točni. Ako vaš sat ide krivo, YardSaleCoin neće raditi ispravno.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation>Opcije za kreiranje bloka:</translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Poveži se samo sa određenim nodom</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Importiraj blokove sa vanjskog blk000??.dat fajla</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Nevaljala -tor adresa: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Prihvati samo lance blokova koji se podudaraju sa ugrađenim checkpoint-ovima (default: 1)</translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Dodaj izlaz debuga na početak sa vremenskom oznakom</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the YardSaleCoin Wiki for SSL setup instructions)</source> <translation>SSL postavke: (za detalje o podešavanju SSL opcija vidi YardSaleCoin Wiki)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Šalji trace/debug informacije na konzolu umjesto u debug.log datoteku</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Pošalji trace/debug informacije u debugger</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Podesite maksimalnu veličinu bloka u bajtovima (default: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Podesite minimalnu veličinu bloka u bajtovima (default: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Odredi vremenski prozor za spajanje na mrežu u milisekundama (ugrađeni izbor: 5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Pokušaj koristiti UPnP da otvoriš port za uslugu (default: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Pokušaj koristiti UPnP da otvoriš port za uslugu (default: 1 when listening)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Korisničko ime za JSON-RPC veze</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation type="unfinished"/> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Lozinka za JSON-RPC veze</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Dozvoli JSON-RPC povezivanje s određene IP adrese</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Pošalji komande nodu na adresi &lt;ip&gt; (ugrađeni izbor: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Izvršite naredbu kada se najbolji blok promjeni (%s u cmd je zamjenjen sa block hash)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Nadogradite novčanik u posljednji format.</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Podesi memorijski prostor za ključeve na &lt;n&gt; (ugrađeni izbor: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Ponovno pretraži lanac blokova za transakcije koje nedostaju</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Koristi OpenSSL (https) za JSON-RPC povezivanje</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Uslužnikov SSL certifikat (ugrađeni izbor: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Uslužnikov privatni ključ (ugrađeni izbor: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Prihvaljivi načini šifriranja (ugrađeni izbor: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Ova poruka za pomoć</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Program ne može koristiti %s na ovom računalu (bind returned error %d, %s)</translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Poveži se kroz socks proxy</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Dozvoli DNS upite za dodavanje nodova i povezivanje</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Učitavanje adresa...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Greška kod učitavanja wallet.dat: Novčanik pokvaren</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of YardSaleCoin</source> <translation>Greška kod učitavanja wallet.dat: Novčanik zahtjeva noviju verziju YardSaleCoina</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart YardSaleCoin to complete</source> <translation>Novčanik je trebao prepravak: ponovo pokrenite YardSaleCoin</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Greška kod učitavanja wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Nevaljala -proxy adresa: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation type="unfinished"/> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Nevaljali iznos za opciju -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Nevaljali iznos za opciju</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Nedovoljna sredstva</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Učitavanje indeksa blokova...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Unesite nod s kojim se želite spojiti and attempt to keep the connection open</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. YardSaleCoin is probably already running.</source> <translation>Program ne može koristiti %s na ovom računalu. YardSaleCoin program je vjerojatno već pokrenut.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Naknada posredniku po KB-u koja će biti dodana svakoj transakciji koju pošalješ</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Učitavanje novčanika...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>Nije moguće novčanik vratiti na prijašnju verziju.</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Nije moguće upisati zadanu adresu.</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Rescaniranje</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Učitavanje gotovo</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation type="unfinished"/> </message> <message> <location line="-74"/> <source>Error</source> <translation>Greška</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation type="unfinished"/> </message> </context> </TS><|fim▁end|>
<message> <location line="+5"/>
<|file_name|>network.py<|end_file_name|><|fim▁begin|>import matplotlib.pyplot as plt import numpy as np import os import time import yaml from sklearn.learning_curve import learning_curve from keras.layers.core import Dense, Activation, Dropout from keras.layers.recurrent import LSTM from keras.models import Sequential, model_from_yaml from keras.wrappers.scikit_learn import KerasRegressor from django.conf import settings from crimeprediction.vectorize import vectorize if not hasattr(settings, 'OUTPUTS_DIR'): raise ImproperlyConfigured( 'The directory to save output files is missing from your settings') elif not os.path.exists(settings.OUTPUTS_DIR): os.makedirs(settings.OUTPUTS_DIR) if not hasattr(settings, 'MODEL_DIR'): raise ImproperlyConfigured( 'The directory to save the model is missing from your settings') elif not os.path.exists(settings.MODEL_DIR): os.makedirs(settings.MODEL_DIR) def run_network(grid_size, period, crime_type=None, seasonal=False): '''Build, train and run LSTM network :param grid_size: size of the cell dimension for the grid :param period: timestep of crime data :param crime_type: type of crime to be trained, None value will train all :param seasonal: implement seasonality or not ''' vectors = vectorize( grid_size, period, crime_type=crime_type, seasonal=seasonal) global_start_time = time.time() print 'Loading Data...' dim = len(vectors[0]) result = np.array(vectors) print "Data : ", result.shape row = int(round(0.7 * result.shape[0])) train = result[:row] X_train = train[:-1] y_train = train[1:] test = result[row:] X_test = test[:-1] y_test = test[1:] X_train = np.reshape(X_train, (X_train.shape[0], X_train.shape[1], 1)) X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1)) print '\nData Loaded. Compiling...\n' print X_train print y_train model = Sequential() model.add(LSTM(dim, input_shape=X_train.shape[1:])) model.compile(loss='mse', optimizer='rmsprop',) print("Train...") try: model.fit(X_train, y_train, nb_epoch=1000, shuffle=False) except KeyboardInterrupt: pass print 'Training duration (s) : ', time.time() - global_start_time predicted = model.predict(X_test) norm_predicted = predicted accuracy = [] f1scr = [] for x, data in enumerate(y_test): print len(data) print len(predicted[x]) correct = 0 total = 0 truepos = 0 falsepos = 0 trueneg = 0 falseneg = 0 for y, node in enumerate(data): total += 1 if predicted[x][y] > 0: norm_predicted[x][y] = 1 if node == 1: correct += 1 truepos += 1 else: falsepos += 1 else: norm_predicted[x][y] = -1 if node == -1: correct += 1 trueneg += 1 else: falseneg += 1 print "correct", correct print "total", total act = float(correct) / total print act accuracy.append(act) precision = truepos / float(truepos+falsepos) recall = truepos / float(truepos+falseneg) f1 = (precision * recall * 2) / float(precision + recall) f1scr.append(f1) print accuracy print f1 crime_verbose = crime_type if crime_type is not None else "ALL" output_folder = settings.OUTPUTS_DIR + \ 'Results_{0}_{1}_{2}_{3}/'.format( grid_size, crime_verbose, period, seasonal) if not os.path.exists(output_folder): os.makedirs(output_folder) results_file = output_folder + 'results.txt' predicted_file = output_folder + 'predicted.txt' X_train_file = output_folder + 'X_train.txt' y_train_file = output_folder + 'y_train.txt' X_test_file = output_folder + 'X_test.txt' y_test_file = output_folder + 'y_test.txt' np.savetxt(X_train_file, X_train, fmt='%d') np.savetxt(y_train_file, y_train, fmt='%d') np.savetxt(X_test_file, X_test, fmt='%d') np.savetxt(y_test_file, y_test, fmt='%d') np.savetxt(predicted_file, norm_predicted, fmt='%d') results = "Average Accuracy:" + str(np.average(accuracy)) + '\n' results += "Average F1 Score:" + str(np.average(f1scr)) with open(results_file, "w") as output_file: output_file.write(results) params = { 'grid_size': grid_size, 'period': period, 'crime_type': crime_type if crime_type is not None else 'all', 'seasonal': seasonal, } save_trained_model(model, yaml.dump(params)) def save_trained_model(model, params_string): ''' saves trained model to directory and files depending on settings variables :param model: model to be saved :param params_string: a yaml string of parameters used for the model: crime_type, period, grid_size and seasonality ''' folder = settings.MODEL_DIR archi = folder + settings.MODEL_ARCHITECTURE weights = folder + settings.MODEL_WEIGHTS params = folder + settings.MODEL_PARAMS yaml_string = model.to_yaml() open(archi, 'w').write(yaml_string) open(params, 'w').write(params_string) model.save_weights(weights, overwrite=True) def get_trained_model(): ''' reconstruct trained model from saved files :rtype: a tuple of the model constructed and a yaml string of parameters used ''' folder = settings.MODEL_DIR archi = folder + settings.MODEL_ARCHITECTURE weights = folder + settings.MODEL_WEIGHTS params = folder + settings.MODEL_PARAMS params = yaml.safe_load(open(params).read()) model = model_from_yaml(open(archi).read()) model.load_weights(weights) model.compile(loss='mse', optimizer='rmsprop',) return model, params def predict_next(model, **params): ''' predicts next crime hotspots<|fim▁hole|> :param **params: a yaml string of the parameters used by the model ''' vectors = vectorize(**params) print 'Loading Data...' dim = len(vectors[0]) result = np.array(vectors) result = np.reshape(result, (result.shape[0], result.shape[1], 1)) predicted = model.predict(result) return predicted[-1]<|fim▁end|>
:param model: the model to be used for prediction
<|file_name|>BFTMapList.java<|end_file_name|><|fim▁begin|>/** Copyright (c) 2007-2013 Alysson Bessani, Eduardo Alchieri, Paulo Sousa, and the authors indicated in the @author tags Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package bftsmart.demo.listvalue; import java.util.HashMap; import java.util.Map; import java.io.Serializable; import java.util.List; /** * * @author sweta */ public class BFTMapList implements Serializable { private static final long serialVersionUID = -8898539992606449057L; private Map<String, List<String>> tableList = null; public BFTMapList() { tableList=new HashMap<String, List<String>>(); } public Map<String, List<String>> getLists() { return tableList; } public List<String> addList(String key, List<String> list) { return tableList.put(key, list); }<|fim▁hole|> } public List<String> getName(String tableName) { return tableList.get(tableName); } public String getEntry(String tableName, int index) { System.out.println("Table name: "+tableName); System.out.println("Entry index: "+ index); List<String> info= tableList.get(tableName); System.out.println("Table: "+info); return info.get(index); } public int getSizeofList() { return tableList.size(); } public int getSize(String tableName) { List<String> table = tableList.get(tableName); return table.size(); } public List<String> removeList(String tableName) { return tableList.remove(tableName); } public String removeEntry(String tableName,int index) { List<String> info= tableList.get(tableName); return info.remove(index); } }<|fim▁end|>
public boolean addData(String tableName, String value) { List <String> list = tableList.get(tableName); return list.add(value);
<|file_name|>ui.py<|end_file_name|><|fim▁begin|>from enigma import ePicLoad, eTimer, getDesktop, gMainDC, eSize from Screens.Screen import Screen from Tools.Directories import resolveFilename, pathExists, SCOPE_MEDIA, SCOPE_CURRENT_SKIN from Components.Pixmap import Pixmap, MovingPixmap from Components.ActionMap import ActionMap, NumberActionMap from Components.Sources.StaticText import StaticText from Components.FileList import FileList from Components.AVSwitch import AVSwitch from Components.Sources.List import List from Components.ConfigList import ConfigList, ConfigListScreen from Components.config import config, ConfigSubsection, ConfigInteger, ConfigSelection, ConfigText, ConfigYesNo, KEY_LEFT, KEY_RIGHT, KEY_0, getConfigListEntry import skin def getScale(): return AVSwitch().getFramebufferScale() config.pic = ConfigSubsection() config.pic.framesize = ConfigInteger(default=30, limits=(5, 99)) config.pic.slidetime = ConfigInteger(default=10, limits=(1, 60)) config.pic.resize = ConfigSelection(default="1", choices = [("0", _("simple")), ("1", _("better"))]) config.pic.cache = ConfigYesNo(default=True) config.pic.lastDir = ConfigText(default=resolveFilename(SCOPE_MEDIA)) config.pic.infoline = ConfigYesNo(default=True) config.pic.loop = ConfigYesNo(default=True) config.pic.bgcolor = ConfigSelection(default="#00000000", choices = [("#00000000", _("black")),("#009eb9ff", _("blue")),("#00ff5a51", _("red")), ("#00ffe875", _("yellow")), ("#0038FF48", _("green"))]) config.pic.autoOrientation = ConfigYesNo(default=False) config.pic.textcolor = ConfigSelection(default="#0038FF48", choices = [("#00000000", _("black")),("#009eb9ff", _("blue")),("#00ff5a51", _("red")), ("#00ffe875", _("yellow")), ("#0038FF48", _("green"))]) class picshow(Screen): skin = """ <screen name="picshow" position="center,center" size="560,440" title="Picture player" > <ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" /> <ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" /> <ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" /> <widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" /> <widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" /> <widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" /> <widget source="label" render="Label" position="5,55" size="350,140" font="Regular;19" backgroundColor="#25062748" transparent="1" /> <widget name="thn" position="360,40" size="180,160" alphatest="on" /> <widget name="filelist" position="5,205" zPosition="2" size="550,230" scrollbarMode="showOnDemand" /> </screen>""" def __init__(self, session): Screen.__init__(self, session) self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions", "MenuActions"], { "cancel": self.KeyExit, "red": self.KeyExit, "green": self.KeyGreen, "yellow": self.KeyYellow, "menu": self.KeyMenu, "ok": self.KeyOk }, -1) self["key_red"] = StaticText(_("Close")) self["key_green"] = StaticText(_("Thumbnails")) self["key_yellow"] = StaticText("") self["label"] = StaticText("") self["thn"] = Pixmap() currDir = config.pic.lastDir.value if not pathExists(currDir): currDir = "/" self.oldService = self.session.nav.getCurrentlyPlayingServiceReference() self.session.nav.stopService() # Show Background MVI import os try: os.system("/usr/bin/showiframe /usr/share/enigma2/black.mvi &") except: pass self.filelist = FileList(currDir, matchingPattern = "(?i)^.*\.(jpeg|jpg|jpe|png|bmp|gif)") self["filelist"] = self.filelist self["filelist"].onSelectionChanged.append(self.selectionChanged) self.ThumbTimer = eTimer() self.ThumbTimer.callback.append(self.showThumb) self.picload = ePicLoad() self.picload.PictureData.get().append(self.showPic) self.onLayoutFinish.append(self.setConf) def showPic(self, picInfo=""): ptr = self.picload.getData() if ptr != None: self["thn"].instance.setPixmap(ptr.__deref__()) self["thn"].show() text = picInfo.split('\n',1) self["label"].setText(text[1]) self["key_yellow"].setText(_("Exif")) def showThumb(self): if not self.filelist.canDescent(): if self.filelist.getCurrentDirectory() and self.filelist.getFilename(): if self.picload.getThumbnail(self.filelist.getCurrentDirectory() + self.filelist.getFilename()) == 1: self.ThumbTimer.start(500, True) def selectionChanged(self): if not self.filelist.canDescent(): self.ThumbTimer.start(500, True) else: self["label"].setText("") self["thn"].hide() self["key_yellow"].setText("") def KeyGreen(self): #if not self.filelist.canDescent(): self.session.openWithCallback(self.callbackView, Pic_Thumb, self.filelist.getFileList(), self.filelist.getSelectionIndex(), self.filelist.getCurrentDirectory()) def KeyYellow(self): if not self.filelist.canDescent(): self.session.open(Pic_Exif, self.picload.getInfo(self.filelist.getCurrentDirectory() + self.filelist.getFilename())) def KeyMenu(self): self.session.openWithCallback(self.setConf, Pic_Setup) def KeyOk(self): if self.filelist.canDescent(): self.filelist.descent() else: self.session.openWithCallback(self.callbackView, Pic_Full_View, self.filelist.getFileList(), self.filelist.getSelectionIndex(), self.filelist.getCurrentDirectory()) def setConf(self, retval=None): self.setTitle(_("Picture player")) sc = getScale() #0=Width 1=Height 2=Aspect 3=use_cache 4=resize_type 5=Background(#AARRGGBB) self.picload.setPara((self["thn"].instance.size().width(), self["thn"].instance.size().height(), sc[0], sc[1], config.pic.cache.value, int(config.pic.resize.value), "#00000000", config.pic.autoOrientation.value)) def callbackView(self, val=0): if val > 0: self.filelist.moveToIndex(val) def KeyExit(self): del self.picload if self.filelist.getCurrentDirectory() is None: config.pic.lastDir.value = "/" else: config.pic.lastDir.value = self.filelist.getCurrentDirectory() config.pic.save() self.session.nav.playService(self.oldService) self.close() #------------------------------------------------------------------------------------------ class Pic_Setup(Screen, ConfigListScreen): def __init__(self, session): Screen.__init__(self, session) # for the skin: first try MediaPlayerSettings, then Setup, this allows individual skinning self.skinName = ["PicturePlayerSetup", "Setup"] self.setup_title = _("Settings") self.onChangedEntry = [] self.session = session ConfigListScreen.__init__(self, [], session = session, on_change = self.changedEntry) self["actions"] = ActionMap(["SetupActions", "MenuActions"], { "cancel": self.keyCancel, "save": self.keySave, "ok": self.keySave, "menu": self.closeRecursive, }, -2) self["key_red"] = StaticText(_("Cancel")) self["key_green"] = StaticText(_("OK")) self.createSetup() self.onLayoutFinish.append(self.layoutFinished) def layoutFinished(self): self.setTitle(self.setup_title) def createSetup(self): setup_list = [ getConfigListEntry(_("Slide show interval (sec.)"), config.pic.slidetime), getConfigListEntry(_("Scaling mode"), config.pic.resize), getConfigListEntry(_("Cache thumbnails"), config.pic.cache), getConfigListEntry(_("Show info line"), config.pic.infoline), getConfigListEntry(_("Frame size in full view"), config.pic.framesize), getConfigListEntry(_("Slide picture in loop"), config.pic.loop), getConfigListEntry(_("Background color"), config.pic.bgcolor), getConfigListEntry(_("Text color"), config.pic.textcolor), getConfigListEntry(_("Fulview resulution"), config.usage.pic_resolution), getConfigListEntry(_("Auto EXIF Orientation rotation/flipping"), config.pic.autoOrientation), ] self["config"].list = setup_list self["config"].l.setList(setup_list) def keyLeft(self): ConfigListScreen.keyLeft(self) def keyRight(self): ConfigListScreen.keyRight(self) # for summary: def changedEntry(self): for x in self.onChangedEntry: x() def getCurrentEntry(self): return self["config"].getCurrent()[0] def getCurrentValue(self): return str(self["config"].getCurrent()[1].getText()) def createSummary(self): from Screens.Setup import SetupSummary return SetupSummary #--------------------------------------------------------------------------- class Pic_Exif(Screen): skin = """ <screen name="Pic_Exif" position="center,center" size="560,360" title="Info" > <ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" /> <widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" /> <widget source="menu" render="Listbox" position="5,50" size="550,310" scrollbarMode="showOnDemand" selectionDisabled="1" > <convert type="TemplatedMultiContent"> { "template": [ MultiContentEntryText(pos = (5, 5), size = (250, 30), flags = RT_HALIGN_LEFT, text = 0), MultiContentEntryText(pos = (260, 5), size = (290, 30), flags = RT_HALIGN_LEFT, text = 1)], "fonts": [gFont("Regular", 20)], "itemHeight": 30 } </convert> </widget> </screen>""" def __init__(self, session, exiflist): Screen.__init__(self, session) self["actions"] = ActionMap(["SetupActions", "ColorActions"], { "cancel": self.close }, -1) self["key_red"] = StaticText(_("Close")) exifdesc = [_("filename")+':', "EXIF-Version:", "Make:", "Camera:", "Date/Time:", "Width / Height:", "Flash used:", "Orientation:", "User Comments:", "Metering Mode:", "Exposure Program:", "Light Source:", "CompressedBitsPerPixel:", "ISO Speed Rating:", "X-Resolution:", "Y-Resolution:", "Resolution Unit:", "Brightness:", "Exposure Time:", "Exposure Bias:", "Distance:", "CCD-Width:", "ApertureFNumber:"] list = [] for x in range(len(exiflist)): if x>0: list.append((exifdesc[x], exiflist[x])) else: name = exiflist[x].split('/')[-1] list.append((exifdesc[x], name)) self["menu"] = List(list) self.onLayoutFinish.append(self.layoutFinished) def layoutFinished(self): self.setTitle(_("Info")) #---------------------------------------------------------------------------------------- T_INDEX = 0 T_FRAME_POS = 1 T_PAGE = 2 T_NAME = 3 T_FULL = 4 class Pic_Thumb(Screen): def __init__(self, session, piclist, lastindex, path): self.textcolor = config.pic.textcolor.value self.color = config.pic.bgcolor.value self.spaceX, self.picX, self.spaceY, self.picY, textsize, thumtxt = skin.parameters.get("PicturePlayerThumb",(35, 190, 30, 200, 20, 14)) pic_frame = resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/pic_frame.png") size_w = getDesktop(0).size().width() size_h = getDesktop(0).size().height() self.thumbsX = size_w / (self.spaceX + self.picX) # thumbnails in X self.thumbsY = size_h / (self.spaceY + self.picY) # thumbnails in Y self.thumbsC = self.thumbsX * self.thumbsY # all thumbnails self.positionlist = [] skincontent = "" posX = -1 for x in range(self.thumbsC): posY = x / self.thumbsX posX += 1 if posX >= self.thumbsX: posX = 0 absX = self.spaceX + (posX*(self.spaceX + self.picX)) absY = self.spaceY + (posY*(self.spaceY + self.picY)) self.positionlist.append((absX, absY)) skincontent += "<widget source=\"label" + str(x) + "\" render=\"Label\" position=\"" + str(absX+5) + "," + str(absY+self.picY-textsize) + "\" size=\"" + str(self.picX - 10) + "," + str(textsize) \ + "\" font=\"Regular;" + str(thumtxt) + "\" zPosition=\"2\" transparent=\"1\" noWrap=\"1\" foregroundColor=\"" + self.textcolor + "\" />" skincontent += "<widget name=\"thumb" + str(x) + "\" position=\"" + str(absX+5)+ "," + str(absY+5) + "\" size=\"" + str(self.picX -10) + "," + str(self.picY - (textsize*2)) + "\" zPosition=\"2\" transparent=\"1\" alphatest=\"on\" />" # Screen, backgroundlabel and MovingPixmap self.skin = "<screen position=\"0,0\" size=\"" + str(size_w) + "," + str(size_h) + "\" flags=\"wfNoBorder\" > \ <eLabel position=\"0,0\" zPosition=\"0\" size=\""+ str(size_w) + "," + str(size_h) + "\" backgroundColor=\"" + self.color + "\" />" \ + "<widget name=\"frame\" position=\"" + str(self.spaceX)+ "," + str(self.spaceY)+ "\" size=\"" + str(self.picX) + "," + str(self.picY) + "\" pixmap=\"" + pic_frame + "\" zPosition=\"1\" alphatest=\"on\" />" \ + skincontent + "</screen>" Screen.__init__(self, session) self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions", "MovieSelectionActions"], { "cancel": self.Exit, "ok": self.KeyOk, "left": self.key_left, "right": self.key_right, "up": self.key_up, "down": self.key_down, "showEventInfo": self.StartExif, }, -1) self["frame"] = MovingPixmap() for x in range(self.thumbsC): self["label"+str(x)] = StaticText() self["thumb"+str(x)] = Pixmap() self.Thumbnaillist = [] self.filelist = [] self.currPage = -1 self.dirlistcount = 0 self.path = path index = 0 framePos = 0 Page = 0 for x in piclist: if x[0][1] == False: self.filelist.append((index, framePos, Page, x[0][0], path + x[0][0])) index += 1 framePos += 1 if framePos > (self.thumbsC -1): framePos = 0 Page += 1 else: self.dirlistcount += 1 self.maxentry = len(self.filelist)-1 self.index = lastindex - self.dirlistcount if self.index < 0: self.index = 0 self.picload = ePicLoad() self.picload.PictureData.get().append(self.showPic) self.onLayoutFinish.append(self.setPicloadConf) self.ThumbTimer = eTimer() self.ThumbTimer.callback.append(self.showPic) def setPicloadConf(self): sc = getScale() self.picload.setPara([self["thumb0"].instance.size().width(), self["thumb0"].instance.size().height(), sc[0], sc[1], config.pic.cache.value, int(config.pic.resize.value), self.color, config.pic.autoOrientation.value]) self.paintFrame() def paintFrame(self): #print "index=" + str(self.index) if self.maxentry < self.index or self.index < 0: return pos = self.positionlist[self.filelist[self.index][T_FRAME_POS]] self["frame"].moveTo( pos[0], pos[1], 1) self["frame"].startMoving() if self.currPage != self.filelist[self.index][T_PAGE]: self.currPage = self.filelist[self.index][T_PAGE] self.newPage() def newPage(self): self.Thumbnaillist = [] #clear Labels and Thumbnail for x in range(self.thumbsC): self["label"+str(x)].setText("") self["thumb"+str(x)].hide() #paint Labels and fill Thumbnail-List for x in self.filelist: if x[T_PAGE] == self.currPage: self["label"+str(x[T_FRAME_POS])].setText("(" + str(x[T_INDEX]+1) + ") " + x[T_NAME]) self.Thumbnaillist.append([0, x[T_FRAME_POS], x[T_FULL]]) #paint Thumbnail start self.showPic() def showPic(self, picInfo=""): for x in range(len(self.Thumbnaillist)): if self.Thumbnaillist[x][0] == 0: if self.picload.getThumbnail(self.Thumbnaillist[x][2]) == 1: #zu tun probier noch mal self.ThumbTimer.start(500, True) else: self.Thumbnaillist[x][0] = 1 break elif self.Thumbnaillist[x][0] == 1: self.Thumbnaillist[x][0] = 2 ptr = self.picload.getData() if ptr != None: self["thumb" + str(self.Thumbnaillist[x][1])].instance.setPixmap(ptr.__deref__()) self["thumb" + str(self.Thumbnaillist[x][1])].show() def key_left(self): self.index -= 1 if self.index < 0: self.index = self.maxentry self.paintFrame() def key_right(self): self.index += 1 if self.index > self.maxentry: self.index = 0 self.paintFrame() def key_up(self): self.index -= self.thumbsX if self.index < 0: self.index =self.maxentry self.paintFrame() def key_down(self): self.index += self.thumbsX if self.index > self.maxentry: self.index = 0 self.paintFrame() def StartExif(self): if self.maxentry < 0: return self.session.open(Pic_Exif, self.picload.getInfo(self.filelist[self.index][T_FULL])) def KeyOk(self): if self.maxentry < 0: return self.old_index = self.index self.session.openWithCallback(self.callbackView, Pic_Full_View, self.filelist, self.index, self.path) def callbackView(self, val=0): self.index = val if self.old_index != self.index: self.paintFrame() def Exit(self): del self.picload self.close(self.index + self.dirlistcount) #--------------------------------------------------------------------------- class Pic_Full_View(Screen): def __init__(self, session, filelist, index, path): self.textcolor = config.pic.textcolor.value self.bgcolor = config.pic.bgcolor.value space = config.pic.framesize.value self.size_w = size_w = getDesktop(0).size().width() self.size_h = size_h = getDesktop(0).size().height() if config.usage.pic_resolution.value and (size_w, size_h) != eval(config.usage.pic_resolution.value): (size_w, size_h) = eval(config.usage.pic_resolution.value) gMainDC.getInstance().setResolution(size_w, size_h) getDesktop(0).resize(eSize(size_w, size_h)) self.skin = "<screen position=\"0,0\" size=\"" + str(size_w) + "," + str(size_h) + "\" flags=\"wfNoBorder\" > \ <eLabel position=\"0,0\" zPosition=\"0\" size=\""+ str(size_w) + "," + str(size_h) + "\" backgroundColor=\""+ self.bgcolor +"\" /><widget name=\"pic\" position=\"" + str(space) + "," + str(space) + "\" size=\"" + str(size_w-(space*2)) + "," + str(size_h-(space*2)) + "\" zPosition=\"1\" alphatest=\"on\" /> \ <widget name=\"point\" position=\""+ str(space+5) + "," + str(space+2) + "\" size=\"20,20\" zPosition=\"2\" pixmap=\"skin_default/icons/record.png\" alphatest=\"on\" /> \ <widget name=\"play_icon\" position=\""+ str(space+25) + "," + str(space+2) + "\" size=\"20,20\" zPosition=\"2\" pixmap=\"skin_default/icons/ico_mp_play.png\" alphatest=\"on\" /> \ <widget source=\"file\" render=\"Label\" position=\""+ str(space+45) + "," + str(space) + "\" size=\""+ str(size_w-(space*2)-50) + ",25\" font=\"Regular;20\" borderWidth=\"1\" borderColor=\"#000000\" halign=\"left\" foregroundColor=\"" + self.textcolor + "\" zPosition=\"2\" noWrap=\"1\" transparent=\"1\" /></screen>" Screen.__init__(self, session) self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions", "MovieSelectionActions"], { "cancel": self.Exit, "green": self.PlayPause, "yellow": self.PlayPause, "blue": self.nextPic, "red": self.prevPic,<|fim▁hole|> "right": self.nextPic, "showEventInfo": self.StartExif, "contextMenu": self.KeyMenu, }, -1) self["point"] = Pixmap() self["pic"] = Pixmap() self["play_icon"] = Pixmap() self["file"] = StaticText(_("please wait, loading picture...")) self.old_index = 0 self.filelist = [] self.lastindex = index self.currPic = [] self.shownow = True self.dirlistcount = 0 for x in filelist: if len(filelist[0]) == 3: #orig. filelist if x[0][1] == False: self.filelist.append(path + x[0][0]) else: self.dirlistcount += 1 elif len(filelist[0]) == 2: #scanlist if x[0][1] == False: self.filelist.append(x[0][0]) else: self.dirlistcount += 1 else: # thumbnaillist self.filelist.append(x[T_FULL]) self.maxentry = len(self.filelist)-1 self.index = index - self.dirlistcount if self.index < 0: self.index = 0 self.picload = ePicLoad() self.picload.PictureData.get().append(self.finish_decode) self.slideTimer = eTimer() self.slideTimer.callback.append(self.slidePic) if self.maxentry >= 0: self.onLayoutFinish.append(self.setPicloadConf) def setPicloadConf(self): self.setConf() self["play_icon"].hide() if config.pic.infoline.value == False: self["file"].setText("") self.start_decode() def setConf(self, retval=None): sc = getScale() #0=Width 1=Height 2=Aspect 3=use_cache 4=resize_type 5=Background(#AARRGGBB) self.picload.setPara([self["pic"].instance.size().width(), self["pic"].instance.size().height(), sc[0], sc[1], 0, int(config.pic.resize.value), self.bgcolor, config.pic.autoOrientation.value]) def ShowPicture(self): if self.shownow and len(self.currPic): self.shownow = False if config.pic.infoline.value: self["file"].setText(self.currPic[0]) else: self["file"].setText("") self.lastindex = self.currPic[1] self["pic"].instance.setPixmap(self.currPic[2].__deref__()) self.currPic = [] self.next() self.start_decode() def finish_decode(self, picInfo=""): self["point"].hide() ptr = self.picload.getData() if ptr != None: text = "" try: text = picInfo.split('\n',1) text = "(" + str(self.index+1) + "/" + str(self.maxentry+1) + ") " + text[0].split('/')[-1] except: pass self.currPic = [] self.currPic.append(text) self.currPic.append(self.index) self.currPic.append(ptr) self.ShowPicture() def start_decode(self): self.picload.startDecode(self.filelist[self.index]) self["point"].show() def next(self): self.index += 1 if self.index > self.maxentry: self.index = 0 def prev(self): self.index -= 1 if self.index < 0: self.index = self.maxentry def slidePic(self): print "slide to next Picture index=" + str(self.lastindex) if config.pic.loop.value==False and self.lastindex == self.maxentry: self.PlayPause() self.shownow = True self.ShowPicture() def PlayPause(self): if self.slideTimer.isActive(): self.slideTimer.stop() self["play_icon"].hide() else: self.slideTimer.start(config.pic.slidetime.value*1000) self["play_icon"].show() self.nextPic() def prevPic(self): self.currPic = [] self.index = self.lastindex self.prev() self.start_decode() self.shownow = True def nextPic(self): self.shownow = True self.ShowPicture() def StartExif(self): if self.maxentry < 0: return self.session.open(Pic_Exif, self.picload.getInfo(self.filelist[self.lastindex])) def KeyMenu(self): self.session.openWithCallback(self.setConf, Pic_Setup) def Exit(self): del self.picload if config.usage.pic_resolution.value and (self.size_w, self.size_h) != eval(config.usage.pic_resolution.value): gMainDC.getInstance().setResolution(self.size_w, self.size_h) getDesktop(0).resize(eSize(self.size_w, self.size_h)) self.close(self.lastindex + self.dirlistcount)<|fim▁end|>
"left": self.prevPic,
<|file_name|>ptr_offset.rs<|end_file_name|><|fim▁begin|>extern crate crucible; use std::ptr; use crucible::*;<|fim▁hole|>use crucible::method_spec::{MethodSpec, MethodSpecBuilder, clobber_globals}; fn f(ptr: *mut u8) { unsafe { ptr::swap(ptr, ptr.add(1)) }; } #[crux_test] fn f_test() { clobber_globals(); let mut x = <[u8; 2]>::symbolic("x"); crucible_assume!(x[0] > 0); f(&mut x[0]); crucible_assert!(x[1] > 0); } fn f_spec() -> MethodSpec { let mut x = <[u8; 2]>::symbolic("x"); crucible_assume!(x[0] > 0); crucible_assume!(x[1] == 0); let mut msb = MethodSpecBuilder::new(f); msb.add_arg(& &mut x[0]); msb.gather_assumes(); // Call happens here crucible_assert!(x[1] > 0); msb.set_return(&()); msb.gather_asserts(); msb.finish() } // Spec: // - Pre state: // - Fresh vars: x0, x1 // - Fresh allocs: ptr0 // - Args: ptr0 // - PointsTos: ptr0 -> [x0, x1] // - Preconditions: x0 > 0 // - Post state: // - Fresh vars: y0, y1 // - Fresh allocs: none // - Return: () // - PointsTos: ptr0 -> [y0, y1] // - Postconditions: y1 > 0 #[crux_test] fn use_f() { f_spec().enable(); let a = u8::symbolic("a"); let b = u8::symbolic("b"); let x = u8::symbolic("x"); let y = u8::symbolic("y"); crucible_assume!(0 < a && a < 10); crucible_assume!(b == 0); let mut arr = [x, a, b, y]; f(&mut arr[1]); let [x2, a2, b2, y2] = arr; crucible_assert!(0 < b2); crucible_assert!(b2 < 10); crucible_assert!(x2 == x); crucible_assert!(y2 == y); }<|fim▁end|>
<|file_name|>previewAdapterSpec.js<|end_file_name|><|fim▁begin|>/* jshint unused: false */ /* global beforeEach, afterEach */ /* global describe, it, expect, jasmine */ /* global runs, spyOn, waitsFor, waits */ /* global window, eb, loadFixtures, document */ /* global $, _, d3*/ /* global describeInterface, describeIntegeration*/ /* global PreviewAdapter*/ // ////////////////////////////////////////////////////////////////////////////// // / @brief Graph functionality // / // / @file // / // / DISCLAIMER // / // / Copyright 2010-2012 triagens GmbH, Cologne, Germany // / // / Licensed under the Apache License, Version 2.0 (the "License") // / you may not use this file except in compliance with the License. // / You may obtain a copy of the License at // / // / http://www.apache.org/licenses/LICENSE-2.0 // / // / Unless required by applicable law or agreed to in writing, software // / distributed under the License is distributed on an "AS IS" BASIS, // / WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // / See the License for the specific language governing permissions and // / limitations under the License. // / // / Copyright holder is triAGENS GmbH, Cologne, Germany // / // / @author Michael Hackstein // / @author Copyright 2011-2013, triAGENS GmbH, Cologne, Germany // ////////////////////////////////////////////////////////////////////////////// (function () { 'use strict'; describe('Preview Adapter', function () { describeInterface(new PreviewAdapter([], [], {})); /* describeIntegeration(function() { spyOn($, "ajax").andCallFake(function(req) { var node1 = {_id: 1}, node2 = {_id: 2}, edge = {_id: "1-2", _from: 1, _to: 2} switch(req.type) { case "DELETE": req.success() break case "POST": if (req.url.match(/nodes$/)) { req.success({_id: 1}) } else if (req.url.match(/edges/)) { req.success({_id: "1-2"}) } break case "GET": req.success({ first: {_id: 1}, nodes: { "1": {_id: 1}, "2": {_id: 2} }, edges: { "1-2": {_id: "1-2", _from: 1, _to: 2} } }) break default: req.success() } }) return new FoxxAdapter([], [], "foxx/route") }) */ var adapter, nodes, edges; beforeEach(function () { nodes = []; edges = []; }); it('should throw an error if no nodes are given', function () { expect( function () { var t = new PreviewAdapter(); } ).toThrow('The nodes have to be given.'); }); it('should throw an error if no edges are given', function () { expect( function () { var t = new PreviewAdapter([]); } ).toThrow('The edges have to be given.'); }); it('should not throw an error if necessary info is given', function () { expect( function () { var t = new PreviewAdapter([], []); } ).toThrow('A reference to the graph viewer has to be given.'); }); it('should not throw an error if necessary info is given', function () { expect( function () { var t = new PreviewAdapter([], [], {}); } ).not.toThrow(); }); it('should create a nodeReducer instance', function () { spyOn(window, 'NodeReducer'); var adapter = new PreviewAdapter( nodes, edges, {} ); expect(window.NodeReducer).wasCalledWith(); }); describe('setup correctly', function () { var viewer; beforeEach(function () { var self = this; viewer = { cleanUp: function () {} }; self.fakeReducerRequest = function () {}; self.fakeReducerBucketRequest = function () {}; spyOn(window, 'NodeReducer').andCallFake(function () { return { getCommunity: function (limit, focus) { if (focus !== undefined) { return self.fakeReducerRequest(limit, focus); } return self.fakeReducerRequest(limit); }, bucketNodes: function (toSort, numBuckets) { return self.fakeReducerBucketRequest(toSort, numBuckets); } }; }); adapter = new PreviewAdapter( nodes, edges, viewer ); }); it('should be load a graph of 5 nodes and 5 edges', function () { var called, id; runs(function () { called = false; id = 1; var callback = function () { called = true; }; adapter.loadNode(id, callback); }); waitsFor(function () { return called; }, 1000); runs(function () { expect(nodes.length).toEqual(5); expect(edges.length).toEqual(5); }); }); it('nodes should offer a label', function () { var called, id; runs(function () { called = false; id = 1; var callback = function () { called = true; }; adapter.loadNode(id, callback); }); waitsFor(function () { return called; }, 1000); runs(function () { _.each(nodes, function (n) { expect(n._data.label).toBeDefined(); }); }); }); it('node one should offer an image', function () { var called, id; runs(function () { called = false; id = 1; var callback = function () { called = true; }; adapter.loadNode(id, callback); }); waitsFor(function () { return called; }, 1000); runs(function () { _.each(nodes, function (n) { if (n._id === 1) { expect(n._data.image).toBeDefined(); expect(n._data.image).toEqual('img/stored.png'); } else { expect(n._data.image).toBeUndefined(); } }); }); }); it('edges should offer a label', function () { var called, id; runs(function () { called = false; id = 1; var callback = function () { called = true; }; adapter.loadNode(id, callback); }); waitsFor(function () { return called; }, 1000); runs(function () { _.each(edges, function (e) { expect(e._data.label).toBeDefined(); }); }); }); it('should alert insertion of a node', function () { spyOn(window, 'alert'); var node = {_id: 1}; adapter.createNode(node); expect(window.alert).wasCalledWith('Server-side: createNode was triggered.'); }); it('should alert change of a node', function () { spyOn(window, 'alert');<|fim▁hole|> data = {name: 'Alice'}; adapter.patchNode(toPatch, data); expect(window.alert).wasCalledWith('Server-side: patchNode was triggered.'); }); it('should alert deletion of a node', function () { spyOn(window, 'alert'); var node = {_id: 1}; adapter.deleteNode(node); expect(window.alert).wasCalledWith('Server-side: deleteNode was triggered.'); expect(window.alert).wasCalledWith('Server-side: onNodeDelete was triggered.'); }); it('should be able to insert an edge', function () { spyOn(window, 'alert'); var source = {_id: 1}, target = {_id: 2}, edge = { source: source, target: target, label: 'Foxx' }; adapter.createEdge(edge); expect(window.alert).wasCalledWith('Server-side: createEdge was triggered.'); }); it('should be able to change an edge', function () { spyOn(window, 'alert'); var source = {_id: 1}, target = {_id: 2}, edge = { _id: '1-2', source: source, target: target }, patch = { label: 'Foxx' }; adapter.patchEdge(edge, patch); expect(window.alert).wasCalledWith('Server-side: patchEdge was triggered.'); }); it('should be able to delete an edge', function () { spyOn(window, 'alert'); var source = {_id: 1}, target = {_id: 2}, edge = { _id: '1-2', source: source, target: target }; adapter.deleteEdge(edge); expect(window.alert).wasCalledWith('Server-side: deleteEdge was triggered.'); }); }); }); }());<|fim▁end|>
var toPatch = {_id: 1},
<|file_name|>send_second_level_links.js<|end_file_name|><|fim▁begin|>// Copyright OpenLogic, Inc. // See LICENSE file for license information. // var totalRequests = 0; // First check the MIME type of the URL. If it is the desired type, then make // the AJAX request to get the content (DOM) and extract the relevant links // in the content.<|fim▁hole|> xhr.onreadystatechange = function() { if (this.readyState == this.DONE && this.getResponseHeader('content-type').indexOf("text/html") != -1) { totalRequests += 1; chrome.runtime.sendMessage({ total: totalRequests }); requestDOM(url); } } xhr.send(); } function requestDOM(url) { var domRequest = new XMLHttpRequest(); domRequest.open('GET', url, true); domRequest.onreadystatechange = function() { if (this.readyState == this.DONE && this.status == 200) { var dom = $.parseHTML(this.responseText); extractLinks(dom); } } domRequest.send(); } function extractLinks(doc) { try { var domain = window.parent.location.origin; var aTag = 'a'; if (domain == 'http://sourceforge.net') aTag = 'a.name' var links = $(aTag, doc).toArray(); links = links.map(function(element) { // Proceed only if the link is in the same domain. if (element.href.indexOf(domain) == 0) { // Return an anchor's href attribute, stripping any URL fragment (hash '#'). // If the html specifies a relative path, chrome converts it to an absolute // URL. var href = element.href; var hashIndex = href.indexOf('#'); if (hashIndex > -1) href = href.substr(0, hashIndex); return href; } }); // Remove undefined from the links array. for (var n = links.length - 1; n >= 0; --n) { if (links[n] == undefined) links.splice(n, 1); } links.sort(); totalRequests -= 1; chrome.runtime.sendMessage({ remainder: totalRequests }); chrome.extension.sendRequest(links); } catch (error) { // Do nothing. totalRequests -= 1; chrome.runtime.sendMessage({ remainder: totalRequests }); } } window.sendSecondLevelLinks = function() { var firstLevelLinks = window.getLinks(); for (var index in firstLevelLinks) { var url = firstLevelLinks[index]; var current_location = window.location.href; var domain = window.parent.location.origin; // - skip urls that look like "parents" of the current one if (url.indexOf(current_location) != -1 && url.indexOf(domain) == 0) follow_html_mime_type(url); } } window.sendSecondLevelLinks();<|fim▁end|>
function follow_html_mime_type(url) { var xhr = new XMLHttpRequest(); xhr.open('HEAD', url);
<|file_name|>selecting_stuff.py<|end_file_name|><|fim▁begin|># Copyright 2009-2014 Ram Rachum. # This program is distributed under the MIT license. ''' This module defines scripts for selecting stuff. See their documentation for more information. ''' from __future__ import with_statement import bisect import re import _ast import os.path, sys sys.path += [ os.path.dirname(__file__), os.path.join(os.path.dirname(__file__), 'third_party.zip'), ] import wingapi import shared SAFETY_LIMIT = 60 '''The maximum number of times we'll do `select-more` before giving up.''' def _ast_parse(string): return compile(string, '<unknown>', 'exec', _ast.PyCF_ONLY_AST) def _is_expression(string): '''Is `string` a Python expression?''' # Throwing out '\r' characters because `ast` can't process them for some # reason: string = string.replace('\r', '') try: nodes = _ast_parse(string).body except SyntaxError: return False else: if len(nodes) != 1: return False else: (node,) = nodes return type(node) == _ast.Expr variable_name_pattern_text = r'[a-zA-Z_][0-9a-zA-Z_]*' dotted_name_pattern = re.compile( r'\.?^%s(\.%s)*$' % (variable_name_pattern_text, variable_name_pattern_text) ) def _is_dotted_name(string): '''Is `string` a dotted name?''' assert isinstance(string, str) return bool(dotted_name_pattern.match(string.strip())) whitespace_characters = ' \n\r\t\f\v'<|fim▁hole|> return not any((whitespace_character in string for whitespace_character in whitespace_characters)) def _select_more_until_biggest_match(condition, editor=wingapi.kArgEditor): '''`select-more` until reaching biggest text that satisfies `condition`.''' assert isinstance(editor, wingapi.CAPIEditor) document = editor.GetDocument() select_more = lambda: wingapi.gApplication.ExecuteCommand('select-more') is_selection_good = lambda: condition( document.GetCharRange(*editor.GetSelection()).strip() ) last_success_n_iterations = None last_start, last_end = original_selection = editor.GetSelection() with shared.ScrollRestorer(editor): with shared.SelectionRestorer(editor): for i in range(SAFETY_LIMIT): select_more() current_start, current_end = editor.GetSelection() if (current_start == last_start) and (current_end == last_end): break if is_selection_good(): last_success_n_iterations = i last_start, last_end = current_start, current_end if last_success_n_iterations is not None: for i in range(last_success_n_iterations+1): select_more() def select_expression(editor=wingapi.kArgEditor): ''' Select the Python expression that the cursor is currently on. This does `select-more` until the biggest possible legal Python expression is selected. Suggested key combination: `Ctrl-Alt-Plus` ''' _select_more_until_biggest_match(_is_expression, editor) def select_dotted_name(editor=wingapi.kArgEditor): ''' Select the dotted name that the cursor is currently on, like `foo.bar.baz`. This does `select-more` until the biggest possible dotted name is selected. Suggested key combination: `Alt-Plus` ''' _select_more_until_biggest_match(_is_dotted_name, editor) def select_whitespaceless_name(editor=wingapi.kArgEditor): ''' Select the whitespace-less name that the cursor is currently on. Example: `foo.bar.baz(e=3)`. This does `select-more` until the biggest possible whitespace-less name is selected. Suggested key combination: `Ctrl-Alt-Equal` ''' _select_more_until_biggest_match(_is_whitespaceless_name, editor) _scope_name_pattern = re.compile( r'''(?:^|[ \t\r\n])(?:def|class) +([a-zA-Z_][0-9a-zA-Z_]*)''' r'''[ \t\r\n]*[(:]''', flags=re.DOTALL ) def _get_scope_name_positions(document): document_text = shared.get_text(document) matches = _scope_name_pattern.finditer(document_text) return tuple(match.span(1) for match in matches) def select_next_scope_name(editor=wingapi.kArgEditor, app=wingapi.kArgApplication): ''' Select the next scope name like `def thing():` or `class Thing():`. (Selects just the name.) Suggested key combination: `Alt-Semicolon` ''' assert isinstance(editor, wingapi.CAPIEditor) _, position = editor.GetSelection() position += 1 scope_name_positions = _get_scope_name_positions(editor.GetDocument()) scope_name_ends = tuple(scope_name_position[1] for scope_name_position in scope_name_positions) scope_name_index = bisect.bisect_left(scope_name_ends, position) if 0 <= scope_name_index < len(scope_name_ends): app.ExecuteCommand('set-visit-history-anchor') editor.SetSelection(*scope_name_positions[scope_name_index]) def select_prev_scope_name(editor=wingapi.kArgEditor, app=wingapi.kArgApplication): ''' Select the previous scope name like `def thing():` or `class Thing():`. (Selects just the name.) Suggested key combination: `Alt-Colon` ''' assert isinstance(editor, wingapi.CAPIEditor) position, _ = editor.GetSelection() position -= 1 scope_name_positions = _get_scope_name_positions(editor.GetDocument()) scope_name_starts = tuple(scope_name_position[0] for scope_name_position in scope_name_positions) scope_name_index = bisect.bisect_left(scope_name_starts, position) - 1 if 0 <= scope_name_index < len(scope_name_starts): app.ExecuteCommand('set-visit-history-anchor') editor.SetSelection(*scope_name_positions[scope_name_index])<|fim▁end|>
def _is_whitespaceless_name(string): '''Is `string` a whitespace-less name?''' assert isinstance(string, str)
<|file_name|>file_tests.py<|end_file_name|><|fim▁begin|>""" SoftLayer.tests.CLI.modules.file_tests ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :license: MIT, see LICENSE for more details. """ from SoftLayer import exceptions from SoftLayer import testing import json import mock class FileTests(testing.TestCase): def test_access_list(self): result = self.run_command(['file', 'access-list', '1234']) self.assert_no_fail(result) def test_authorize_host_to_volume(self): result = self.run_command(['file', 'access-authorize', '12345678', '--hardware-id=100', '--virtual-id=10', '--ip-address-id=192', '--ip-address=192.3.2.1', '--subnet-id=200']) self.assert_no_fail(result) def test_deauthorize_host_to_volume(self): result = self.run_command(['file', 'access-revoke', '12345678', '--hardware-id=100', '--virtual-id=10', '--ip-address-id=192', '--ip-address=192.3.2.1', '--subnet-id=200']) self.assert_no_fail(result) def test_volume_list(self): result = self.run_command(['file', 'volume-list']) self.assert_no_fail(result) self.assertEqual([ { 'bytes_used': None, 'capacity_gb': 10, 'datacenter': 'Dallas', 'id': 1, 'ip_addr': '127.0.0.1', 'storage_type': 'ENDURANCE', 'username': 'user', 'active_transactions': None, 'mount_addr': '127.0.0.1:/TEST', 'rep_partner_count': None }], json.loads(result.output)) @mock.patch('SoftLayer.FileStorageManager.list_file_volumes') def test_volume_count(self, list_mock): list_mock.return_value = [ {'serviceResource': {'datacenter': {'name': 'dal09'}}}, {'serviceResource': {'datacenter': {'name': 'ams01'}}}, {'serviceResource': {'datacenter': {'name': 'ams01'}}} ] result = self.run_command(['file', 'volume-count']) self.assert_no_fail(result) self.assertEqual( { 'ams01': 2, 'dal09': 1 }, json.loads(result.output)) def test_snapshot_list(self): result = self.run_command(['file', 'snapshot-list', '1234']) self.assert_no_fail(result) self.assertEqual([ { 'id': 470, 'name': 'unit_testing_note', 'created': '2016-07-06T07:41:19-05:00', 'size_bytes': '42', }], json.loads(result.output)) def test_volume_cancel(self): result = self.run_command([ '--really', 'file', 'volume-cancel', '1234']) self.assert_no_fail(result) self.assertEqual('File volume with id 1234 has been marked' ' for cancellation\n', result.output) self.assert_called_with('SoftLayer_Billing_Item', 'cancelItem', args=(False, True, None)) def test_volume_cancel_with_billing_item(self): result = self.run_command([ '--really', 'file', 'volume-cancel', '1234']) self.assert_no_fail(result) self.assertEqual('File volume with id 1234 has been marked' ' for cancellation\n', result.output) self.assert_called_with('SoftLayer_Network_Storage', 'getObject') def test_volume_cancel_without_billing_item(self): p_mock = self.set_mock('SoftLayer_Network_Storage', 'getObject') p_mock.return_value = { "accountId": 1234, "capacityGb": 20, "createDate": "2015-04-29T06:55:55-07:00", "id": 11111, "nasType": "NAS", "username": "SL01SEV307608_1" } result = self.run_command([ '--really', 'file', 'volume-cancel', '1234']) self.assertIsInstance(result.exception, exceptions.SoftLayerError) def test_volume_detail(self): result = self.run_command(['file', 'volume-detail', '1234']) self.assert_no_fail(result) self.assertEqual({ 'Username': 'username', 'Used Space': '0B', 'Endurance Tier': 'READHEAVY_TIER', 'IOPs': 1000, 'Mount Address': '127.0.0.1:/TEST', 'Snapshot Capacity (GB)': '10', 'Snapshot Used (Bytes)': 1024, 'Capacity (GB)': '20GB', 'Target IP': '10.1.2.3', 'Data Center': 'dal05', 'Type': 'ENDURANCE', 'ID': 100, '# of Active Transactions': '1', 'Ongoing Transaction': 'This is a buffer time in which the customer may cancel the server', 'Replicant Count': '1', 'Replication Status': 'Replicant Volume Provisioning ' 'has completed.', 'Replicant Volumes': [[ {'Replicant ID': 'Volume Name', '1784': 'TEST_REP_1'}, {'Replicant ID': 'Target IP', '1784': '10.3.174.79'}, {'Replicant ID': 'Data Center', '1784': 'wdc01'}, {'Replicant ID': 'Schedule', '1784': 'REPLICATION_HOURLY'}, ], [ {'Replicant ID': 'Volume Name', '1785': 'TEST_REP_2'}, {'Replicant ID': 'Target IP', '1785': '10.3.177.84'}, {'Replicant ID': 'Data Center', '1785': 'dal01'}, {'Replicant ID': 'Schedule', '1785': 'REPLICATION_DAILY'}, ]], 'Original Volume Properties': [ {'Property': 'Original Volume Size', 'Value': '20'}, {'Property': 'Original Volume Name', 'Value': 'test-original-volume-name'}, {'Property': 'Original Snapshot Name', 'Value': 'test-original-snapshot-name'} ] }, json.loads(result.output)) def test_volume_order_performance_iops_not_given(self): result = self.run_command(['file', 'volume-order', '--storage-type=performance', '--size=20', '--location=dal05']) self.assertEqual(2, result.exit_code) def test_volume_order_performance_snapshot_error(self): result = self.run_command(['file', 'volume-order', '--storage-type=performance', '--size=20', '--iops=100', '--location=dal05', '--snapshot-size=10', '--service-offering=performance']) self.assertEqual(2, result.exit_code) @mock.patch('SoftLayer.FileStorageManager.order_file_volume') def test_volume_order_performance(self, order_mock): order_mock.return_value = { 'placedOrder': { 'id': 478, 'items': [ {'description': 'Performance Storage'}, {'description': 'File Storage'}, {'description': '0.25 IOPS per GB'}, {'description': '20 GB Storage Space'}, {'description': '10 GB Storage Space (Snapshot Space)'}] } } result = self.run_command(['file', 'volume-order', '--storage-type=performance', '--size=20', '--iops=100', '--location=dal05', '--snapshot-size=10']) self.assert_no_fail(result) self.assertEqual(result.output, 'Order #478 placed successfully!\n' ' > Performance Storage\n > File Storage\n' ' > 0.25 IOPS per GB\n > 20 GB Storage Space\n' ' > 10 GB Storage Space (Snapshot Space)\n') def test_volume_order_endurance_tier_not_given(self): result = self.run_command(['file', 'volume-order', '--storage-type=endurance', '--size=20', '--location=dal05']) self.assertEqual(2, result.exit_code) @mock.patch('SoftLayer.FileStorageManager.order_file_volume') def test_volume_order_endurance(self, order_mock): order_mock.return_value = { 'placedOrder': { 'id': 478, 'items': [ {'description': 'Endurance Storage'}, {'description': 'File Storage'}, {'description': '0.25 IOPS per GB'}, {'description': '20 GB Storage Space'}, {'description': '10 GB Storage Space (Snapshot Space)'}] } } result = self.run_command(['file', 'volume-order', '--storage-type=endurance', '--size=20', '--tier=0.25', '--location=dal05', '--snapshot-size=10']) self.assert_no_fail(result) self.assertEqual(result.output, 'Order #478 placed successfully!\n' ' > Endurance Storage\n > File Storage\n' ' > 0.25 IOPS per GB\n > 20 GB Storage Space\n' ' > 10 GB Storage Space (Snapshot Space)\n') @mock.patch('SoftLayer.FileStorageManager.order_file_volume') def test_volume_order_order_not_placed(self, order_mock): order_mock.return_value = {} result = self.run_command(['file', 'volume-order', '--storage-type=endurance', '--size=20', '--tier=0.25', '--location=dal05']) self.assert_no_fail(result) self.assertEqual(result.output, 'Order could not be placed! Please verify ' 'your options and try again.\n') def test_volume_order_hourly_billing_not_available(self): result = self.run_command(['file', 'volume-order', '--storage-type=endurance', '--size=20', '--tier=0.25', '--location=dal10', '--billing=hourly', '--service-offering=enterprise']) self.assertEqual(2, result.exit_code) @mock.patch('SoftLayer.FileStorageManager.order_file_volume') def test_volume_order_hourly_billing(self, order_mock): order_mock.return_value = { 'placedOrder': { 'id': 479, 'items': [ {'description': 'Storage as a Service'}, {'description': 'File Storage'}, {'description': '20 GB Storage Space'}, {'description': '0.25 IOPS per GB'}, {'description': '10 GB Storage Space (Snapshot Space)'}] } } result = self.run_command(['file', 'volume-order', '--storage-type=endurance', '--size=20', '--tier=0.25', '--location=dal05', '--service-offering=storage_as_a_service', '--billing=hourly', '--snapshot-size=10']) self.assert_no_fail(result) self.assertEqual(result.output, 'Order #479 placed successfully!\n' ' > Storage as a Service\n' ' > File Storage\n' ' > 20 GB Storage Space\n' ' > 0.25 IOPS per GB\n' ' > 10 GB Storage Space (Snapshot Space)\n') @mock.patch('SoftLayer.FileStorageManager.order_file_volume') def test_volume_order_performance_manager_error(self, order_mock): order_mock.side_effect = ValueError('failure!') result = self.run_command(['file', 'volume-order', '--storage-type=performance', '--size=20', '--iops=100', '--location=dal05']) self.assertEqual(2, result.exit_code) self.assertEqual('Argument Error: failure!', result.exception.message) @mock.patch('SoftLayer.FileStorageManager.order_file_volume') def test_volume_order_endurance_manager_error(self, order_mock): order_mock.side_effect = ValueError('failure!') result = self.run_command(['file', 'volume-order', '--storage-type=endurance', '--size=20', '--tier=0.25', '--location=dal05']) self.assertEqual(2, result.exit_code) self.assertEqual('Argument Error: failure!', result.exception.message) def test_enable_snapshots(self): result = self.run_command(['file', 'snapshot-enable', '12345678', '--schedule-type=HOURLY', '--minute=10', '--retention-count=5']) self.assert_no_fail(result) def test_disable_snapshots(self): result = self.run_command(['file', 'snapshot-disable', '12345678', '--schedule-type=HOURLY']) self.assert_no_fail(result) def test_list_volume_schedules(self): result = self.run_command([ 'file', 'snapshot-schedule-list', '12345678']) self.assert_no_fail(result) self.assertEqual([ { "week": None, "maximum_snapshots": None, "hour": None, "day_of_week": None, "day": None, "replication": None, "date_of_month": None, "month_of_year": None, "active": "", "date_created": "", "type": "WEEKLY", "id": 978, "minute": '30' }, { "week": None, "maximum_snapshots": None, "hour": None, "day_of_week": None, "day": None, "replication": '*', "date_of_month": None, "month_of_year": None, "active": "", "date_created": "", "type": "INTERVAL", "id": 988, "minute": '*' } ], json.loads(result.output)) def test_create_snapshot(self): result = self.run_command(['file', 'snapshot-create', '12345678']) self.assert_no_fail(result) self.assertEqual('New snapshot created with id: 449\n', result.output) @mock.patch('SoftLayer.FileStorageManager.create_snapshot') def test_create_snapshot_unsuccessful(self, snapshot_mock): snapshot_mock.return_value = [] result = self.run_command(['file', 'snapshot-create', '8', '-n=note'])<|fim▁hole|> 'state which prevents taking snapshots.\n', result.output) def test_snapshot_restore(self): result = self.run_command(['file', 'snapshot-restore', '12345678', '--snapshot-id=87654321']) self.assert_no_fail(result) self.assertEqual(result.output, 'File volume 12345678 is being' ' restored using snapshot 87654321\n') def test_delete_snapshot(self): result = self.run_command(['file', 'snapshot-delete', '12345678']) self.assert_no_fail(result) @mock.patch('SoftLayer.FileStorageManager.order_snapshot_space') def test_snapshot_order_order_not_placed(self, order_mock): order_mock.return_value = {} result = self.run_command(['file', 'snapshot-order', '1234', '--capacity=10', '--tier=0.25']) self.assert_no_fail(result) self.assertEqual(result.output, 'Order could not be placed! Please verify ' 'your options and try again.\n') @mock.patch('SoftLayer.FileStorageManager.order_snapshot_space') def test_snapshot_order_performance_manager_error(self, order_mock): order_mock.side_effect = ValueError('failure!') result = self.run_command(['file', 'snapshot-order', '1234', '--capacity=10', '--tier=0.25']) self.assertEqual(2, result.exit_code) self.assertEqual('Argument Error: failure!', result.exception.message) @mock.patch('SoftLayer.FileStorageManager.order_snapshot_space') def test_snapshot_order(self, order_mock): order_mock.return_value = { 'placedOrder': { 'id': 8702, 'items': [{'description': '10 GB Storage Space (Snapshot Space)'}], 'status': 'PENDING_APPROVAL', } } result = self.run_command(['file', 'snapshot-order', '1234', '--capacity=10', '--tier=0.25']) self.assert_no_fail(result) self.assertEqual(result.output, 'Order #8702 placed successfully!\n' ' > 10 GB Storage Space (Snapshot Space)\n' ' > Order status: PENDING_APPROVAL\n') def test_snapshot_cancel(self): result = self.run_command(['--really', 'file', 'snapshot-cancel', '1234']) self.assert_no_fail(result) self.assertEqual('File volume with id 1234 has been marked' ' for snapshot cancellation\n', result.output) self.assert_called_with('SoftLayer_Billing_Item', 'cancelItem', args=(False, True, None)) def test_replicant_failover(self): result = self.run_command(['file', 'replica-failover', '12345678', '--replicant-id=5678', '--immediate']) self.assert_no_fail(result) self.assertEqual('Failover to replicant is now in progress.\n', result.output) @mock.patch('SoftLayer.FileStorageManager.failover_to_replicant') def test_replicant_failover_unsuccessful(self, failover_mock): failover_mock.return_value = False result = self.run_command(['file', 'replica-failover', '12345678', '--replicant-id=5678']) self.assertEqual('Failover operation could not be initiated.\n', result.output) def test_replicant_failback(self): result = self.run_command(['file', 'replica-failback', '12345678', '--replicant-id=5678']) self.assert_no_fail(result) self.assertEqual('Failback from replicant is now in progress.\n', result.output) @mock.patch('SoftLayer.FileStorageManager.failback_from_replicant') def test_replicant_failback_unsuccessful(self, failback_mock): failback_mock.return_value = False result = self.run_command(['file', 'replica-failback', '12345678', '--replicant-id=5678']) self.assertEqual('Failback operation could not be initiated.\n', result.output) @mock.patch('SoftLayer.FileStorageManager.order_replicant_volume') def test_replicant_order_order_not_placed(self, order_mock): order_mock.return_value = {} result = self.run_command(['file', 'replica-order', '100', '--snapshot-schedule=DAILY', '--location=dal05']) self.assert_no_fail(result) self.assertEqual(result.output, 'Order could not be placed! Please verify ' 'your options and try again.\n') @mock.patch('SoftLayer.FileStorageManager.order_replicant_volume') def test_replicant_order(self, order_mock): order_mock.return_value = { 'placedOrder': { 'id': 77309, 'items': [ {'description': 'Endurance Storage'}, {'description': '2 IOPS per GB'}, {'description': 'File Storage'}, {'description': '20 GB Storage Space'}, {'description': '10 GB Storage Space (Snapshot Space)'}, {'description': '20 GB Storage Space Replicant of: TEST'}, ], } } result = self.run_command(['file', 'replica-order', '100', '--snapshot-schedule=DAILY', '--location=dal05', '--tier=2']) self.assert_no_fail(result) self.assertEqual(result.output, 'Order #77309 placed successfully!\n' ' > Endurance Storage\n' ' > 2 IOPS per GB\n' ' > File Storage\n' ' > 20 GB Storage Space\n' ' > 10 GB Storage Space (Snapshot Space)\n' ' > 20 GB Storage Space Replicant of: TEST\n') def test_replication_locations(self): result = self.run_command(['file', 'replica-locations', '1234']) self.assert_no_fail(result) self.assertEqual( { '12345': 'Dallas 05', }, json.loads(result.output)) @mock.patch('SoftLayer.FileStorageManager.get_replication_locations') def test_replication_locations_unsuccessful(self, locations_mock): locations_mock.return_value = False result = self.run_command(['file', 'replica-locations', '1234']) self.assert_no_fail(result) self.assertEqual('No data centers compatible for replication.\n', result.output) def test_replication_partners(self): result = self.run_command(['file', 'replica-partners', '1234']) self.assert_no_fail(result) self.assertEqual([ { 'ID': 1784, 'Account ID': 3000, 'Capacity (GB)': 20, 'Host ID': None, 'Guest ID': None, 'Hardware ID': None, 'Username': 'TEST_REP_1', }, { 'ID': 1785, 'Account ID': 3001, 'Host ID': None, 'Guest ID': None, 'Hardware ID': None, 'Capacity (GB)': 20, 'Username': 'TEST_REP_2', }], json.loads(result.output)) @mock.patch('SoftLayer.FileStorageManager.get_replication_partners') def test_replication_partners_unsuccessful(self, partners_mock): partners_mock.return_value = False result = self.run_command(['file', 'replica-partners', '1234']) self.assertEqual( 'There are no replication partners for the given volume.\n', result.output) @mock.patch('SoftLayer.FileStorageManager.order_duplicate_volume') def test_duplicate_order_exception_caught(self, order_mock): order_mock.side_effect = ValueError('order attempt failed, oh noooo!') result = self.run_command(['file', 'volume-duplicate', '100']) self.assertEqual(2, result.exit_code) self.assertEqual('Argument Error: order attempt failed, oh noooo!', result.exception.message) @mock.patch('SoftLayer.FileStorageManager.order_duplicate_volume') def test_duplicate_order_order_not_placed(self, order_mock): order_mock.return_value = {} result = self.run_command(['file', 'volume-duplicate', '100', '--duplicate-iops=1400']) self.assert_no_fail(result) self.assertEqual(result.output, 'Order could not be placed! Please verify ' 'your options and try again.\n') @mock.patch('SoftLayer.FileStorageManager.order_duplicate_volume') def test_duplicate_order(self, order_mock): order_mock.return_value = { 'placedOrder': { 'id': 24602, 'items': [{'description': 'Storage as a Service'}] } } result = self.run_command(['file', 'volume-duplicate', '100', '--origin-snapshot-id=470', '--duplicate-size=250', '--duplicate-tier=2', '--duplicate-snapshot-size=20']) self.assert_no_fail(result) self.assertEqual(result.output, 'Order #24602 placed successfully!\n' ' > Storage as a Service\n') @mock.patch('SoftLayer.FileStorageManager.order_duplicate_volume') def test_duplicate_order_hourly_billing(self, order_mock): order_mock.return_value = { 'placedOrder': { 'id': 24602, 'items': [{'description': 'Storage as a Service'}] } } result = self.run_command(['file', 'volume-duplicate', '100', '--origin-snapshot-id=470', '--duplicate-size=250', '--duplicate-tier=2', '--billing=hourly', '--duplicate-snapshot-size=20']) order_mock.assert_called_with('100', origin_snapshot_id=470, duplicate_size=250, duplicate_iops=None, duplicate_tier_level=2, duplicate_snapshot_size=20, hourly_billing_flag=True) self.assert_no_fail(result) self.assertEqual(result.output, 'Order #24602 placed successfully!\n' ' > Storage as a Service\n') @mock.patch('SoftLayer.FileStorageManager.order_modified_volume') def test_modify_order_exception_caught(self, order_mock): order_mock.side_effect = ValueError('order attempt failed, noooo!') result = self.run_command(['file', 'volume-modify', '102', '--new-size=1000']) self.assertEqual(2, result.exit_code) self.assertEqual('Argument Error: order attempt failed, noooo!', result.exception.message) @mock.patch('SoftLayer.FileStorageManager.order_modified_volume') def test_modify_order_order_not_placed(self, order_mock): order_mock.return_value = {} result = self.run_command(['file', 'volume-modify', '102', '--new-iops=1400']) self.assert_no_fail(result) self.assertEqual('Order could not be placed! Please verify your options and try again.\n', result.output) @mock.patch('SoftLayer.FileStorageManager.order_modified_volume') def test_modify_order(self, order_mock): order_mock.return_value = {'placedOrder': {'id': 24602, 'items': [{'description': 'Storage as a Service'}, {'description': '1000 GBs'}, {'description': '4 IOPS per GB'}]}} result = self.run_command(['file', 'volume-modify', '102', '--new-size=1000', '--new-tier=4']) order_mock.assert_called_with('102', new_size=1000, new_iops=None, new_tier_level=4) self.assert_no_fail(result) self.assertEqual('Order #24602 placed successfully!\n > Storage as a Service\n > 1000 GBs\n > 4 IOPS per GB\n', result.output)<|fim▁end|>
self.assertEqual('Error occurred while creating snapshot.\n' 'Ensure volume is not failed over or in another '
<|file_name|>ReplayFileConfigDlg.cpp<|end_file_name|><|fim▁begin|>/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ /** * \file ReplayFileConfigDlg.cpp * \brief Implementation file for CReplayFileConfigDlg class * \author Raja N * \copyright Copyright (c) 2011, Robert Bosch Engineering and Business Solutions. All rights reserved. * * Implementation file for CReplayFileConfigDlg class */ #include "Replay_stdafx.h" // For standard includes #include "ReplayFile.h" // For Replay File class declaration #include "MsgReplayWnd.h" // For message Window class declaration #include "ReplayManager.h" // For Replay Manager class declaration #include "Utility/RadixEdit.h" // For Radix edit control used in Cfg dialog #include "ReplayFileConfigDlg.h" // For Replay File Configuration dialog #include "ReplayProcess.h" // For Replay Process class declaration #include "Filter/Filter_extern.h" #include <locale.h> #include "Utility\MultiLanguageSupport.h" //#include "../Application/GettextBusmaster.h" #define defREPLAY_FILE_IMAGE_INDEX 3 #define defREPLAY_FILE_COL_WIDTH_PROPOTION 0.95 #define defSTR_REPLAY_FILE_COL_NAME "Replay File" #define defSTR_DELETE_CONFORMATION "Do you want to remove selected replay file?" #define defSTR_REPALY_FILTER_DLG_TITLE "Configure Filter for Replay File: %s" /** * Standard default constructor. * This will initialise local variables */ CReplayFileConfigDlg::CReplayFileConfigDlg( CReplayManager& rouManager, const SFILTERAPPLIED_CAN* psFilterConfigured, CWnd* pParent /*=NULL*/) : CDialog(CReplayFileConfigDlg::IDD, pParent), m_rouManager( rouManager ), m_psFilterConfigured(psFilterConfigured) { //{{AFX_DATA_INIT(CReplayFileConfigDlg) m_nReplayMode = 0; //}}AFX_DATA_INIT m_bUpdating = FALSE; m_nSelecetedNamedLogIndex = -1; m_omStrMsgType = "Tx Messages"; } /** * \param[in] pDX Pointer to data exchange object * * This will map UI controls will mapped data members. This will * be called during UpdateData funtion call for data update */ void CReplayFileConfigDlg::DoDataExchange(CDataExchange* pDX) { CDialog::DoDataExchange(pDX); //{{AFX_DATA_MAP(CReplayFileConfigDlg) DDX_Control(pDX, IDC_CHK_INTERACTIVE, m_omChkInteractive); DDX_Control(pDX, IDC_EDIT_MSG_DELAY, m_omEditMsgDelay); DDX_Control(pDX, IDC_EDIT_CYCLE_DELAY, m_omEditCycleDelay); DDX_Control(pDX, IDC_CHK_RETAIN_RECORDED_DELAY, m_omChkRetainDelay); DDX_Control(pDX, IDC_EDIT_REPLAY_FILE_NAME, m_omEditReplayFileName); DDX_Control(pDX, IDC_LIST_REPALY_FILES, m_omLstcReplayFiles); DDX_Radio(pDX, IDC_RADIO_REPLAY_MODE_MONO, m_nReplayMode); DDX_CBString(pDX, IDC_COMBO_MSG_TYPE, m_omStrMsgType); //}}AFX_DATA_MAP } BEGIN_MESSAGE_MAP(CReplayFileConfigDlg, CDialog) //{{AFX_MSG_MAP(CReplayFileConfigDlg) ON_NOTIFY(NM_CLICK, IDC_LIST_REPALY_FILES, OnClickListRepalyFiles) ON_NOTIFY(NM_DBLCLK, IDC_LIST_REPALY_FILES, OnDblclkListRepalyFiles) ON_NOTIFY(LVN_ITEMCHANGED, IDC_LIST_REPALY_FILES, OnItemchangedListRepalyFiles) ON_BN_CLICKED(IDC_BTN_BROWSE, OnBtnBrowse) ON_BN_CLICKED(IDC_CHK_RETAIN_RECORDED_DELAY, OnChkRetainRecordedDelay) ON_EN_UPDATE(IDC_EDIT_MSG_DELAY, OnUpdateEditMsgDelay) ON_BN_CLICKED(IDC_RADIO_REPLAY_MODE_MONO, OnRadioReplayModeMono) ON_EN_UPDATE(IDC_EDIT_CYCLE_DELAY, OnUpdateEditCycleDelay) ON_BN_CLICKED(IDC_CHK_INTERACTIVE, OnChkInteractive) ON_BN_CLICKED(IDC_BTN_ADD_FILE, OnBtnAddFile) ON_BN_CLICKED(IDC_BTN_DELETE_FILE, OnBtnDeleteFile) ON_BN_CLICKED(IDC_RADIO_REPLAY_MODE_CYCLIC, OnRadioReplayModeMono) ON_BN_CLICKED(IDC_BTN_FILTER, OnBtnFilter) ON_CBN_SELCHANGE(IDC_COMBO_MSG_TYPE, OnComboMsgTypeChanged) //}}AFX_MSG_MAP END_MESSAGE_MAP() /** * \return FALSE - If focus is set to Any UI control explicitly * * Initialises dialog's UI components */ BOOL CReplayFileConfigDlg::OnInitDialog() { CDialog::OnInitDialog(); // Create Image List used in UI List bCreateImageList(); // Create Replay Files UI List vCreateReplayFileList(); // To Create Replay Components vCreateReplayCopms(); // Init Replay Components vInitReplayCopms(); // Init Replay List vInitReplayFileList(); // Update Button Status vEnableDisableButtons(); //Update Replay Msg Type vUpdateReplayMsgType(); return TRUE; // return TRUE unless you set the focus to a control // EXCEPTION: OCX Property Pages should return FALSE } /** * \return TRUE if success, FALSE otherwise * * To create image lists used in the dialog */ BOOL CReplayFileConfigDlg::bCreateImageList() { // Flag to indicate result BOOL bReturn = FALSE; // Create Image List if( m_omImageList.Create( IDB_BMP_PROPERTIES, defICON_SIZE, defICON_GROW, defCOLOR_WHITE ) == TRUE ) { // If successful set the result bReturn = TRUE; } // Return the result return bReturn; } /** * To create replay file list control with required columns */ VOID CReplayFileConfigDlg::vCreateReplayFileList() { // Insert Log File Name Column m_omLstcReplayFiles.InsertColumn(0, _(defSTR_REPLAY_FILE_COL_NAME) ); // Set the width to occupy the whole list CRect omRect; m_omLstcReplayFiles.GetWindowRect( &omRect ); int nWidth = static_cast<int>( omRect.Width() * defREPLAY_FILE_COL_WIDTH_PROPOTION ); // Set Col Width m_omLstcReplayFiles.SetColumnWidth( 0, nWidth ); // Set Image List if( m_omImageList.m_hImageList != NULL ) { m_omLstcReplayFiles.SetImageList( &m_omImageList, LVSIL_SMALL ); } // Set Extended Property m_omLstcReplayFiles.SetExtendedStyle( LVS_EX_CHECKBOXES ); } /** * Create replay components. There is no component now which * requires creation. For future usage. */ VOID CReplayFileConfigDlg::vCreateReplayCopms() { // To do any creation } /** * To initialise replay details UI components */ VOID CReplayFileConfigDlg::vInitReplayCopms() { // Update Message Delay Edit box m_omEditMsgDelay.vSetSigned( FALSE ); m_omEditMsgDelay.vSetBase( BASE_DECIMAL ); // Update Cycle Delay Edit Box m_omEditCycleDelay.vSetSigned( FALSE ); m_omEditCycleDelay.vSetBase( BASE_DECIMAL ); } /** * To initialise replay list UI control with the list of replay files */ VOID CReplayFileConfigDlg::vInitReplayFileList() { // Get the list of files and populate the list control // Switch off update m_bUpdating = TRUE; // Delete all items from the list m_omLstcReplayFiles.DeleteAllItems(); // Get the size and iterate through the list int nSize = (int)m_rouManager.m_omReplayFiles.GetSize(); for( int nIndex = 0; nIndex < nSize; nIndex++ ) { const CReplayFile& ouFile = m_rouManager.m_omReplayFiles.ElementAt( nIndex ); m_omLstcReplayFiles.InsertItem( nIndex, ouFile.m_omStrFileName, defREPLAY_FILE_IMAGE_INDEX ); //Update Check button status m_omLstcReplayFiles.SetCheck( nIndex, ouFile.m_bEnabled ); } // Enable update m_bUpdating = FALSE; // Set the focus to the first item m_omLstcReplayFiles.SetItemState( 0, LVIS_SELECTED | LVIS_FOCUSED, LVIS_SELECTED | LVIS_FOCUSED ); } /** * To update UI controls with respection to selection and other * related conditions */ VOID CReplayFileConfigDlg::vEnableDisableButtons() { // Get the list item count int nSize = m_omLstcReplayFiles.GetItemCount(); // If list is empty then disable replay comps BOOL bEnable = nSize > 0; vEnableReplayComps( bEnable ); // Disable Enable/Delete button CWnd* pWnd = GetDlgItem(IDC_BTN_DELETE_FILE); if( pWnd != NULL ) { pWnd->EnableWindow( bEnable ); } } VOID CReplayFileConfigDlg::vUpdateReplayMsgType() { //switch(m_ouRepl } /** * This function will be called wher user clicks the list * control. This will update the selection will set the * selection to the last item if nothing is selected by the user */ void CReplayFileConfigDlg::OnClickListRepalyFiles(NMHDR* /*pNMHDR*/, LRESULT* pResult) { // Create selection mask UINT unItemStateMask = LVNI_SELECTED | LVNI_FOCUSED; // Get current selection int nSel = m_omLstcReplayFiles.GetNextItem( -1, LVNI_SELECTED ); // If nothing got selected restore last selection if(nSel == -1) { m_omLstcReplayFiles.SetItemState( m_nSelecetedNamedLogIndex, unItemStateMask, unItemStateMask ); } if( pResult != NULL ) { *pResult = 0; } } /** * This function will be called wher user double clicks the list * control. This will update the selection will set the * selection to the last item if nothing is selected by the user */ void CReplayFileConfigDlg::OnDblclkListRepalyFiles(NMHDR* /*pNMHDR*/, LRESULT* pResult) { // Create selection mask UINT unItemStateMask = LVNI_SELECTED | LVNI_FOCUSED; // Get current selection int nSel = m_omLstcReplayFiles.GetNextItem( -1, LVNI_SELECTED ); // If nothing got selected restore last selection if(nSel == -1) { m_omLstcReplayFiles.SetItemState( m_nSelecetedNamedLogIndex, unItemStateMask, unItemStateMask ); } if( pResult != NULL ) { *pResult = 0; } } /** * This function will be called wher user changes the selection * in the replay list. This will update UI with the selected * replay file details. This will also be called during check * box press and will update the replay file enable flag. */ void CReplayFileConfigDlg::OnItemchangedListRepalyFiles( NMHDR* pNMHDR, LRESULT* pResult ) { NM_LISTVIEW* pNMListView = (NM_LISTVIEW*)pNMHDR; // Update Log File Components if(pNMListView->uChanged != LVIF_TEXT && m_bUpdating == FALSE ) { // Selected & Focused if( pNMListView->uNewState == (LVIS_FOCUSED | LVIS_SELECTED) ) { // Update selection m_nSelecetedNamedLogIndex = pNMListView->iItem; // Update selected Log file details vUpdateReplayFileDetails( pNMListView->iItem); } if( pNMListView->uNewState & defMASK_CHECK_UNCHECK ) { int nCurrentState = m_omLstcReplayFiles.GetCheck(pNMListView->iItem); // Update Data CReplayFile& ouFile = m_rouManager.m_omReplayFiles.ElementAt( pNMListView->iItem ); ouFile.m_bEnabled = nCurrentState; } } if( pResult != NULL ) { *pResult = 0; } } /** * \param[in] nSelectedIndex UI Index of the replay file in the list * * Update the replay information in the UI with the replay file * pointed by the index */ VOID CReplayFileConfigDlg::vUpdateReplayFileDetails( int nSelectedIndex ) { // Check for valid index if( nSelectedIndex != -1 && nSelectedIndex < m_rouManager.m_omReplayFiles.GetSize() ) { // Get the details from the data array const CReplayFile& odLog = m_rouManager.m_omReplayFiles.ElementAt( nSelectedIndex ); // Update Log File Components with this details vUpdateReplayFileComps( odLog ); } } /** * \param[in] rouFile Reference to replay file * * Updates UI with the information given by rouFile. */ VOID CReplayFileConfigDlg::vUpdateReplayFileComps( const CReplayFile& rouFile ) { // File Path m_omEditReplayFileName.SetWindowText( rouFile.m_omStrFileName ); // Message Time Mode switch( rouFile.m_nTimeMode ) { case defREPLAY_RETAIN_DELAY: { // Enable Retain Delay Checkbox and check the item m_omChkRetainDelay.SetCheck( TRUE ); // Disable Specific Delay option m_omEditMsgDelay.EnableWindow( FALSE ); } break; case defREPLAY_SPECIFIC_DELAY: { // Disable Retain Delay Checkbox and check the item m_omChkRetainDelay.SetCheck( FALSE ); // Enable Specific Delay option m_omEditMsgDelay.EnableWindow( TRUE ); // Set the value m_omEditMsgDelay.vSetValue( rouFile.m_unMsgTimeDelay ); } break; default: // Invalid value ASSERT( FALSE ); } // Replay Mode // Assign the mode value m_nReplayMode = rouFile.m_nReplayMode; // Update Cyclic time delay edit box if( rouFile.m_nReplayMode == defREPLAY_MODE_CYCLIC ) { // Enable Cyclic Delay m_omEditCycleDelay.EnableWindow( TRUE ); m_omEditCycleDelay.vSetValue( rouFile.m_unCycleTimeDelay ); } else { // Disable Cyclic Delay Editbox m_omEditCycleDelay.EnableWindow( FALSE ); } // Update Interactive Replay Option m_omChkInteractive.SetCheck( rouFile.m_bInteractive ); //Message Types switch(rouFile.m_ouReplayMsgType) { case DIR_RX: m_omStrMsgType = _(defSTR_MSG_DIR_RX); break; case DIR_TX: m_omStrMsgType = _(defSTR_MSG_DIR_TX); break; case DIR_ALL: default: m_omStrMsgType = _(defSTR_SELECTION_ALL); break; } // Update DDX data with UI UpdateData( FALSE ); } /** * This function will be called when user selects Browse button. * This function will show file selection dialog and will update * replay file data if user selects any file. */ void CReplayFileConfigDlg::OnBtnBrowse() { if( m_nSelecetedNamedLogIndex != -1 && m_nSelecetedNamedLogIndex < m_rouManager.m_omReplayFiles.GetSize() ) { // Get Selected Item Details CReplayFile& ouFile = m_rouManager.m_omReplayFiles.ElementAt( m_nSelecetedNamedLogIndex ); DWORD dwFlags = 0; dwFlags = OFN_HIDEREADONLY | OFN_PATHMUSTEXIST | OFN_EXTENSIONDIFFERENT; // Show File Selection Dialog to select Log File CFileDialog omFileDlg( TRUE, defSTR_LOG_FILE_EXTENSION, ouFile.m_omStrFileName, dwFlags, defLOG_FILTER, NULL ); //Set the caption omFileDlg.m_ofn.lpstrTitle = _(defSTR_REPLAY_FILE_SELECTION_TITLE); // Show File open dialog if( omFileDlg.DoModal() == IDOK ) { // Get Path from File Selection Dialog ouFile.m_omStrFileName = omFileDlg.GetPathName(); // Update List Item Text m_omLstcReplayFiles.SetItemText( m_nSelecetedNamedLogIndex, 0, ouFile.m_omStrFileName ); // Set File Name in the editbox m_omEditReplayFileName.SetWindowText( ouFile.m_omStrFileName ); } } } /** * This function will be called when user check/uncheck the * retain reconded delay option. This will update replay file * details */ void CReplayFileConfigDlg::OnChkRetainRecordedDelay() { if( m_nSelecetedNamedLogIndex != -1 && m_nSelecetedNamedLogIndex < m_rouManager.m_omReplayFiles.GetSize() ) { // Get Selected Item Details CReplayFile& ouFile = m_rouManager.m_omReplayFiles.ElementAt( m_nSelecetedNamedLogIndex ); // Get the check value BOOL bValue = m_omChkRetainDelay.GetCheck(); if( bValue == TRUE ) { // Disable the Msg Delay edit control m_omEditMsgDelay.EnableWindow( FALSE ); } else { // Enable Msg Delay Edit control and assign the value from // data m_omEditMsgDelay.EnableWindow( TRUE ); m_omEditMsgDelay.vSetValue( ouFile.m_unMsgTimeDelay ); } // Update type in the data ouFile.m_nTimeMode = !bValue; } } /** * This function will be called during message delay editbox * change. This will update message delay value of replay file */ void CReplayFileConfigDlg::OnUpdateEditMsgDelay() { if( m_nSelecetedNamedLogIndex != -1 && m_nSelecetedNamedLogIndex < m_rouManager.m_omReplayFiles.GetSize() ) { // Get Selected Item Details CReplayFile& ouFile = m_rouManager.m_omReplayFiles.ElementAt( m_nSelecetedNamedLogIndex ); // Get the delay Value UINT unValue = static_cast<UINT>( m_omEditMsgDelay.lGetValue() ); if(unValue > 60000) { AfxMessageBox("Time delay between messages cannot be more than 60000 milliseconds"); CString omstrDelay; omstrDelay.Format("%d", unValue/10); m_omEditMsgDelay.SetWindowTextA(omstrDelay); m_omEditMsgDelay.SetSel(0,omstrDelay.GetLength()); } // Update type in the data ouFile.m_unMsgTimeDelay = unValue; } } /** * This function will be called when user changes the replay * mode radio button. This will update replay file and will * enable cyclic delay editbox appropriatly */ void CReplayFileConfigDlg::OnRadioReplayModeMono() { if( m_nSelecetedNamedLogIndex != -1 && m_nSelecetedNamedLogIndex < m_rouManager.m_omReplayFiles.GetSize() ) { // Get Selected Item Details CReplayFile& ouFile = m_rouManager.m_omReplayFiles.ElementAt( m_nSelecetedNamedLogIndex ); // Get replay Type UpdateData(); if( m_nReplayMode == 0 ) // Monoshot { // Disable Cyclic Time Edit control m_omEditCycleDelay.EnableWindow( FALSE ); } else { m_omEditCycleDelay.EnableWindow( TRUE ); m_omEditCycleDelay.vSetValue( ouFile.m_unCycleTimeDelay ); } // Update type in the data ouFile.m_nReplayMode = m_nReplayMode; } } /** * This function will be called during cycle delay editbox * change. This will update cycle delay value of replay file */ void CReplayFileConfigDlg::OnUpdateEditCycleDelay() { if( m_nSelecetedNamedLogIndex != -1 && m_nSelecetedNamedLogIndex < m_rouManager.m_omReplayFiles.GetSize() ) { // Get Selected Item Details CReplayFile& ouFile = m_rouManager.m_omReplayFiles.ElementAt( m_nSelecetedNamedLogIndex ); // Get the delay Value UINT unValue = static_cast<UINT>( m_omEditCycleDelay.lGetValue() ); if(unValue > 60000) { AfxMessageBox("Time delay between cycles cannot be more than 60000 milliseconds"); CString omstrDelay; omstrDelay.Format("%d", unValue/10); m_omEditCycleDelay.SetWindowTextA(omstrDelay); m_omEditCycleDelay.SetSel(0,omstrDelay.GetLength()); } // Update type in the data ouFile.m_unCycleTimeDelay = unValue; } } /** * This function will update interactive option of replay file * with the user selection */ void CReplayFileConfigDlg::OnChkInteractive() { if( m_nSelecetedNamedLogIndex != -1 && m_nSelecetedNamedLogIndex < m_rouManager.m_omReplayFiles.GetSize() ) { // Get Selected Item Details CReplayFile& ouFile = m_rouManager.m_omReplayFiles.ElementAt( m_nSelecetedNamedLogIndex ); // Get the delay Value BOOL bInteractive = m_omChkInteractive.GetCheck(); // Update type in the data ouFile.m_bInteractive = bInteractive; } } /** * \req RS_19_03 It shall be possible to add / remove any log file from the input data source set * * This function will be called when user selects Add button. * This will show file selection dialog to select replay file * and if the selection is valid this will add the selected file * in to the replay file list. */ void CReplayFileConfigDlg::OnBtnAddFile() { // Throw File selection dialog to choose replay log file DWORD dwFlags = OFN_HIDEREADONLY | OFN_PATHMUSTEXIST | OFN_EXTENSIONDIFFERENT; CFileDialog omFileDlg( TRUE, defSTR_LOG_FILE_EXTENSION, NULL, dwFlags, defLOG_FILTER, NULL ); //Set the caption omFileDlg.m_ofn.lpstrTitle = _(defSTR_REPLAY_FILE_SELECTION_TITLE); // Show File open dialog if( omFileDlg.DoModal() == IDOK ) { CReplayFile ouNewReplayFile; // Assign the file name and leave the rest to default ouNewReplayFile.m_omStrFileName = omFileDlg.GetPathName(); // Add the data in to the replay file data list m_rouManager.m_omReplayFiles.Add( ouNewReplayFile ); // Insert this new item in to the list // Avoid UI update m_bUpdating = TRUE; // Get the size of the list int nIndex = m_omLstcReplayFiles.GetItemCount(); // Insert at the end m_omLstcReplayFiles.InsertItem( nIndex, ouNewReplayFile.m_omStrFileName, defREPLAY_FILE_IMAGE_INDEX ); // Update the checkbox status m_omLstcReplayFiles.SetCheck( nIndex, ouNewReplayFile.m_bEnabled ); // This is the first item then enable Replay File components if( nIndex == 0 ) { vEnableDisableButtons(); } // Set the selection to this new item // Enable UI Update m_bUpdating = FALSE; m_omLstcReplayFiles.SetItemState( nIndex, LVIS_SELECTED | LVIS_FOCUSED, LVIS_SELECTED | LVIS_FOCUSED ); } } /** * \req RS_19_03 It shall be possible to add / remove any log file from the input data source set * * This functuion will handle Delete button event. This will get * delete conformation from the user and will remove the * selected replay file from the list if user conforms. */ void CReplayFileConfigDlg::OnBtnDeleteFile() { if( m_nSelecetedNamedLogIndex != -1 && m_nSelecetedNamedLogIndex < m_rouManager.m_omReplayFiles.GetSize() ) { // Ask user about file delete int nResult = AfxMessageBox( _(defSTR_DELETE_CONFORMATION), MB_YESNO|MB_ICONQUESTION ) ; if ( nResult == IDYES ) { // Avoid UI update m_bUpdating = TRUE; // Remove the item from the list m_omLstcReplayFiles.DeleteItem( m_nSelecetedNamedLogIndex ); // Remove the item from data list m_rouManager.m_omReplayFiles.RemoveAt( m_nSelecetedNamedLogIndex ); // Set the focus to the next available item int nSize = m_omLstcReplayFiles.GetItemCount(); if( m_nSelecetedNamedLogIndex > ( nSize - 1 ) ) { m_nSelecetedNamedLogIndex = nSize - 1; } // Enable UI Update m_bUpdating = TRUE; if( m_nSelecetedNamedLogIndex != -1 ) { // Set the selection m_omLstcReplayFiles.SetItemState( m_nSelecetedNamedLogIndex, LVIS_SELECTED | LVIS_FOCUSED, LVIS_SELECTED | LVIS_FOCUSED ); } else { // Update Button Status vEnableDisableButtons(); } } } } /** * \param bEnable TRUE to enable and FALSE to disable * * To enable/disable replay UI components */ VOID CReplayFileConfigDlg::vEnableReplayComps( BOOL bEnable ) { // File Name m_omEditReplayFileName.EnableWindow( bEnable ); // Time Mode m_omChkRetainDelay.EnableWindow( bEnable ); // User Specific Msg Delay // Retain Delay is enabled. So Disable this edit control if( m_omChkRetainDelay.GetCheck() == TRUE ) { m_omEditMsgDelay.EnableWindow( FALSE ); } else { m_omEditMsgDelay.EnableWindow( bEnable ); } // Delay Between Cycles if( m_nReplayMode == 0 ) { m_omEditCycleDelay.EnableWindow( FALSE ); } else { m_omEditCycleDelay.EnableWindow( bEnable ); } // Interactive Replay Option m_omChkInteractive.EnableWindow( bEnable ); // Repolay Mode Option buttons CWnd* pWnd = NULL; // Monoshot pWnd = GetDlgItem( IDC_RADIO_REPLAY_MODE_MONO ); if( pWnd != NULL ) { pWnd->EnableWindow( bEnable ); } // Cyclic pWnd = GetDlgItem( IDC_RADIO_REPLAY_MODE_CYCLIC ); if( pWnd != NULL ) { pWnd->EnableWindow( bEnable ); } // Filter Button pWnd = GetDlgItem( IDC_BTN_FILTER ); if( pWnd != NULL ) { pWnd->EnableWindow( bEnable ); } // Msg Button pWnd = GetDlgItem( IDC_COMBO_MSG_TYPE ); if( pWnd != NULL ) { pWnd->EnableWindow( bEnable ); } } static void vPopulateMainSubList(CMainEntryList& DestList, const SFILTERAPPLIED_CAN* psFilterConfigured, const SFILTERAPPLIED_CAN* psFilterApplied) { ASSERT(psFilterConfigured != NULL); DestList.RemoveAll(); SMAINENTRY sMainEntry; sMainEntry.m_omMainEntryName = "CAN"; if (psFilterApplied == NULL) { SMAINENTRY sMainEntry; sMainEntry.m_omMainEntryName = "FILTER_SELECTION_CAN"; for (INT i = 0; i < psFilterConfigured->m_ushTotal; i++) { SSUBENTRY sSubEntry; sSubEntry.m_omSubEntryName.Format("%s", psFilterConfigured->m_psFilters[i].m_sFilterName.m_acFilterName); sMainEntry.m_odUnSelEntryList.AddTail(sSubEntry); } } else { for (INT i = 0; i < psFilterConfigured->m_ushTotal; i++) { SSUBENTRY sSubEntry; sSubEntry.m_omSubEntryName.Format("%s", psFilterConfigured->m_psFilters[i].m_sFilterName.m_acFilterName); if (SFILTERSET::psGetFilterSetPointer(psFilterApplied->m_psFilters, psFilterApplied->m_ushTotal, sSubEntry.m_omSubEntryName.GetBuffer(MAX_PATH)) != NULL) { sMainEntry.m_odSelEntryList.AddTail(sSubEntry); } else { sMainEntry.m_odUnSelEntryList.AddTail(sSubEntry); } } } DestList.AddTail(sMainEntry); } static void vPopulateFilterApplied(const SFILTERAPPLIED_CAN* psFilterConfigured, SFILTERAPPLIED_CAN& sFilterApplied, CMainEntryList& SrcList) { const SMAINENTRY& sMainEntry = SrcList.GetHead(); int nCount = (int)sMainEntry.m_odSelEntryList.GetCount(); sFilterApplied.vClear(); sFilterApplied.m_psFilters = new SFILTERSET[nCount]; POSITION pos = sMainEntry.m_odSelEntryList.GetHeadPosition(); while (pos) { SSUBENTRY sSubEntry = sMainEntry.m_odSelEntryList.GetNext(pos); const PSFILTERSET psTemp = SFILTERSET::psGetFilterSetPointer(psFilterConfigured->m_psFilters, psFilterConfigured->m_ushTotal, sSubEntry.m_omSubEntryName.GetBuffer(MAX_PATH)); ASSERT (psTemp != NULL); sFilterApplied.m_psFilters[sFilterApplied.m_ushTotal].bClone(*psTemp); sFilterApplied.m_ushTotal++; } } /** * This is filter button handler. This will invoke filter * configuation dialog and will update filter list if user * modifies the filter list of the replay file. */ void CReplayFileConfigDlg::OnBtnFilter() { CStringArray omStrFilers; // Get List of Filters from Filter Manager //CFilterManager::ouGetFilterManager().vGetFilterNameList( omStrFilers ); if( m_nSelecetedNamedLogIndex != -1 ) { CReplayFile ouReplayFile = m_rouManager.m_omReplayFiles[ m_nSelecetedNamedLogIndex ]; CString omStrTitle; omStrTitle.Format( _(defSTR_REPALY_FILTER_DLG_TITLE), ouReplayFile.m_omStrFileName ); CMainEntryList DestList; vPopulateMainSubList(DestList, m_psFilterConfigured, &(ouReplayFile.m_sFilterApplied) ); //Show dialog if (Filter_ShowSelDlg(this, &DestList) == IDOK) { vPopulateFilterApplied(m_psFilterConfigured, ouReplayFile.m_sFilterApplied, DestList); ouReplayFile.m_sFilterApplied.m_bEnabled = 0; m_rouManager.m_omReplayFiles[ m_nSelecetedNamedLogIndex ] = ouReplayFile; } } } void CReplayFileConfigDlg::OnComboMsgTypeChanged() { if( m_nSelecetedNamedLogIndex != -1 ) { UpdateData(TRUE); CReplayFile& ouReplayFile = m_rouManager.m_omReplayFiles[ m_nSelecetedNamedLogIndex ]; if(m_omStrMsgType == _(defSTR_MSG_DIR_TX)) { ouReplayFile.m_ouReplayMsgType = DIR_TX; } else if(m_omStrMsgType == _(defSTR_MSG_DIR_RX)) { ouReplayFile.m_ouReplayMsgType = DIR_RX; <|fim▁hole|> { ouReplayFile.m_ouReplayMsgType = DIR_ALL; } } }<|fim▁end|>
} else
<|file_name|>al_VideoCapture.cpp<|end_file_name|><|fim▁begin|>#include "allocv/al_VideoCapture.hpp" /* From opencv2/highgui/highgui.hpp: class CV_EXPORTS_W VideoCapture { public: CV_WRAP VideoCapture(); CV_WRAP VideoCapture(const string& filename); CV_WRAP VideoCapture(int device); virtual ~VideoCapture(); CV_WRAP virtual bool open(const string& filename); CV_WRAP virtual bool open(int device); CV_WRAP virtual bool isOpened() const; CV_WRAP virtual void release(); // Grabs the next frame from video file or capturing device. CV_WRAP virtual bool grab(); // Decodes and returns the grabbed video frame. CV_WRAP virtual bool retrieve(CV_OUT Mat& image, int channel=0); // Alias of read() virtual VideoCapture& operator >> (CV_OUT Mat& image); // Grabs, decodes and returns the next video frame. CV_WRAP virtual bool read(CV_OUT Mat& image); CV_WRAP virtual bool set(int propId, double value); CV_WRAP virtual double get(int propId); protected: Ptr<CvCapture> cap; }; */ namespace al{ VideoCapture::VideoCapture() : mFPS(1.), mRate(1.), mBadFrame(-1), mIsFile(false), mValid(true) {} VideoCapture::~VideoCapture(){ mValid = false; cvVideoCapture.release(); } bool VideoCapture::open(const std::string& filename){ if(cvVideoCapture.open(filename)){ mIsFile = true; mFPS = get(CV_CAP_PROP_FPS); return true; } return false; } bool VideoCapture::open(int device){ if(cvVideoCapture.open(device)){ mIsFile = false; mFPS = get(CV_CAP_PROP_FPS); if(mFPS == 0.) mFPS = 30.; return true; } return false; } void VideoCapture::release(){ cvVideoCapture.release(); } bool VideoCapture::grab(){ bool didGrab = cvVideoCapture.grab(); // Attempt to advance past bad frames in video files if(isFile()){ if(!didGrab){ if(mBadFrame == -1){ // last frame wasn't bad mBadFrame = posFrames()+1; } printf("VideoCapture::grab: bad frame %g\n", mBadFrame); if(mBadFrame < numFrames()){ posFrames(mBadFrame); ++mBadFrame; } } else{ mBadFrame = -1; } } return didGrab; } bool VideoCapture::retrieve(cv::Mat& dst, int chan){ return cvVideoCapture.retrieve(dst, chan); } bool VideoCapture::retrieve(int chan){ return cvVideoCapture.retrieve(cvFrame, chan); } bool VideoCapture::retrieve(Array& dst, int chan, int copyPolicy){ bool res = retrieve(chan); fromCV(dst, cvFrame, copyPolicy); return res; } bool VideoCapture::retrieveFlip(Array& dst, int chan){ return retrieve(dst, chan, -1); } bool VideoCapture::read(Array& dst, int copyPolicy){ bool res = cvVideoCapture.read(cvFrame); fromCV(dst, cvFrame, copyPolicy); return res; } bool VideoCapture::set(int cvCapProp, double val){ return cvVideoCapture.set(cvCapProp,val); } VideoCapture& VideoCapture::width(double pixels){ set(CV_CAP_PROP_FRAME_WIDTH, pixels); return *this; } VideoCapture& VideoCapture::height(double pixels){ set(CV_CAP_PROP_FRAME_HEIGHT, pixels); return *this; } VideoCapture& VideoCapture::resize(double w, double h){ return width(w).height(h); } VideoCapture& VideoCapture::posMsec(double msec){ set(CV_CAP_PROP_POS_MSEC, msec); return *this; } VideoCapture& VideoCapture::posFrames(double frame){ set(CV_CAP_PROP_POS_FRAMES, frame); return *this; } VideoCapture& VideoCapture::posFrac(double frac){ //set(CV_CAP_PROP_POS_AVI_RATIO, frac); // broken for many file types posFrames(frac*numFrames()); return *this; } VideoCapture& VideoCapture::fps(double val){ mFPS = val; return *this; } VideoCapture& VideoCapture::rate(double fpsMul){ mRate = fpsMul; return *this; } bool VideoCapture::isOpened() const { return cvVideoCapture.isOpened(); } double VideoCapture::get(int cvCapProp) const { return cvVideoCapture.get(cvCapProp); } double VideoCapture::fps() const { return mFPS; } double VideoCapture::numFrames() const { return get(CV_CAP_PROP_FRAME_COUNT); } double VideoCapture::rate() const { return mRate; } double VideoCapture::width() const { return get(CV_CAP_PROP_FRAME_WIDTH); } double VideoCapture::height() const { return get(CV_CAP_PROP_FRAME_HEIGHT); } double VideoCapture::aspect() const { double w = width(); double h = height(); return (h!=0. && w!=0.) ? w/h : 1.; } bool VideoCapture::rgb() const { return get(CV_CAP_PROP_CONVERT_RGB); } int VideoCapture::fourcc() const { return int(get(CV_CAP_PROP_FOURCC)); } std::string VideoCapture::fourccString() const { union{ int i; char c[4]; } x = { fourcc() }; return std::string(x.c, 4); } double VideoCapture::posMsec() const { return get(CV_CAP_PROP_POS_MSEC); } double VideoCapture::posFrames() const { return get(CV_CAP_PROP_POS_FRAMES); } double VideoCapture::posFrac() const { //return get(CV_CAP_PROP_POS_AVI_RATIO); // broken for many file types return double(posFrames())/numFrames(); } bool VideoCapture::loop(double minFrame, double maxFrame){ double Nf = numFrames(); if(maxFrame < 0) maxFrame += Nf + 1.; else if(maxFrame > Nf) maxFrame = Nf; double pos = posFrames(); if(pos >= maxFrame){ posFrames(minFrame); return true; } return false; } bool VideoCapture::isFile() const { return mIsFile; } void VideoCapture::print(FILE * fp){ fprintf(fp, "%g x %g %s %s, %g fps", width(), height(), rgb()?"RGB":"BGR", fourccString().c_str(), fps()); if(isFile()){ fprintf(fp, ", %g frames (%g sec)", numFrames(), numFrames()/fps()); } fprintf(fp, "\n"); }<|fim▁hole|> VideoCaptureHandler::VideoThreadFunction::VideoThreadFunction() : videoCapture(NULL), handler(NULL), streamIdx(-1) {} VideoCaptureHandler::VideoThreadFunction::~VideoThreadFunction() { videoCapture = NULL; } void VideoCaptureHandler::VideoThreadFunction::operator()(){ //printf("VideoThreadFunc called\n"); if(NULL != videoCapture && videoCapture->mValid && videoCapture->cvVideoCapture.isOpened()){ handler->onPregrab(*videoCapture, streamIdx); if(videoCapture->grab()){ handler->onVideo(*videoCapture, streamIdx); double fps = videoCapture->fps() * videoCapture->rate(); handler->mWorkThreads[streamIdx].thread.period(1./fps); } } } VideoCaptureHandler::WorkThread::~WorkThread(){ stop(); } void VideoCaptureHandler::WorkThread::start(){ //printf("WorkThread::start(): %p %p\n", func.videoCapture, func.handler); thread.start(func); } void VideoCaptureHandler::WorkThread::stop(){ thread.stop(); } VideoCaptureHandler::VideoCaptureHandler(int numStreams){ numVideoStreams(numStreams); } VideoCaptureHandler::~VideoCaptureHandler(){ stopVideo(); } VideoCaptureHandler& VideoCaptureHandler::numVideoStreams(int num){ mWorkThreads.resize(num); return *this; } int VideoCaptureHandler::numVideoStreams() const { return int(mWorkThreads.size()); } VideoCaptureHandler& VideoCaptureHandler::attach(VideoCapture& vid, int streamIdx){ if(streamIdx>=0 && streamIdx<numVideoStreams()){ WorkThread& t = mWorkThreads[streamIdx]; t.func.handler = this; t.func.videoCapture = &vid; t.func.streamIdx = streamIdx; } return *this; } void VideoCaptureHandler::startVideo(){ for( WorkThreads::iterator it = mWorkThreads.begin(); it != mWorkThreads.end(); ++it ){ (*it).start(); } } void VideoCaptureHandler::stopVideo(){ for( WorkThreads::iterator it = mWorkThreads.begin(); it != mWorkThreads.end(); ++it ){ (*it).stop(); } } } // al::<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>""" PHCpy --- a package for Polynomial Homotopy Continuation ======================================================== PHCpy is a collection of Python modules to compute solutions of polynomial systems using PHCpack. A homotopy defines the deformation of a start system (system with known solutions) into the target system (system that has to be solved). Continuation or path tracking methods apply numerical predictor-corrector techniques to track the solution paths defined by the homotopy, starting at the known solutions of the start system and ending at the solutions of the target system. <|fim▁hole|>----------------- solver exports the blackbox solver of PHCpack, a mixed volume calculator, a path tracker, functions to construct start systems, and deflation to recondition isolated singular solutions. solutions solutions of phcpy.solve are lists of PHCpack solution strings and this module exports operations to convert the solution strings into Python dictionaries, e.g. for evaluation. interface data transfer from string representations of polynomials and solutions as the interface between Python and the C interface of PHCpack. trackers offers functions to track solution paths defined by a homotopy between a given start system with known solutions and a target system. maps module to work with monomial maps, defined as solution of systems that have exactly two monomials in every equation (binomial systems). sets offers tools to work with positive dimensional solution sets. examples defines some interesting examples taken from the research literature, the test() solves all systems, performing a regression test. families polynomial system often occur in families and are defined for any number of equations and variables, e.g.: the cyclic n-roots system. schubert exports the hypersurface and quantum Pieri homotopies to compute isolated solutions to problems in enumerative geometry. polytopes functions to work with Newton polytopes, to compute mixed volumes of Newton polytopes, given by tuples of support sets. phcwulf defines a simple client/server interaction to solve random trinomials. Calling the blackbox solver --------------------------- Polynomials and solutions are represented as strings. Below is an illustration of a session with the blackbox solver on a system of two random trinomials, polynomials with three monomials with random coefficients. >>> from phcpy.solver import random_trinomials >>> f = random_trinomials() >>> print f[0] (0.583339727743+0.81222826966115*i)*x^0*y^0\ +(-0.730410130891-0.68300881450520*i)*x^5*y^5\ +(0.547878834338+0.83655769847920*i)*x^5*y^0; >>> print f[1] (0.830635910813+0.55681593338247*i)*x^0*y^4\ +(0.456430547798-0.88975904324518*i)*x^1*y^4\ +(0.034113254002-0.99941797357332*i)*x^2*y^1; >>> from phcpy.solver import solve >>> s = solve(f,silent=True) >>> len(s) 30 >>> print s[2] t : 1.00000000000000E+00 0.00000000000000E+00 m : 1 the solution for t : x : -9.99963006604849E-01 8.60147787997449E-03 y : 0.00000000000000E+00 0.00000000000000E+00 == err : 4.325E-17 = rco : 2.020E-01 = res : 1.665E-16 = >>> The solve command returned a list of 30 strings in s, each string represents a solution that makes the polynomials in f vanish. The module solutions offers function to evaluate the solutions in the polynomials given as strings. """ try: from phcpy.phcpy2c3 import py2c_PHCpack_version_string print(py2c_PHCpack_version_string() + ' works!') except: print('Is the phcpy2c3.so not suited for this platform?') # The version number is defined as a data attribute. __version__ = '0.4.1'<|fim▁end|>
Available modules
<|file_name|>dynamic_factor.py<|end_file_name|><|fim▁begin|>""" Dynamic factor model Author: Chad Fulton License: Simplified-BSD """ from __future__ import division, absolute_import, print_function from warnings import warn from statsmodels.compat.collections import OrderedDict import numpy as np import pandas as pd from .kalman_filter import KalmanFilter, FilterResults from .mlemodel import MLEModel, MLEResults, MLEResultsWrapper from .tools import ( companion_matrix, diff, is_invertible, constrain_stationary_univariate, unconstrain_stationary_univariate, constrain_stationary_multivariate, unconstrain_stationary_multivariate ) from scipy.linalg import solve_discrete_lyapunov from statsmodels.multivariate.pca import PCA from statsmodels.regression.linear_model import OLS from statsmodels.tsa.vector_ar.var_model import VAR from statsmodels.tools.tools import Bunch from statsmodels.tools.data import _is_using_pandas from statsmodels.tsa.tsatools import lagmat from statsmodels.tools.decorators import cache_readonly from statsmodels.tools.sm_exceptions import ValueWarning import statsmodels.base.wrapper as wrap class DynamicFactor(MLEModel): r""" Dynamic factor model Parameters ---------- endog : array_like The observed time-series process :math:`y` exog : array_like, optional Array of exogenous regressors for the observation equation, shaped nobs x k_exog. k_factors : int The number of unobserved factors. factor_order : int The order of the vector autoregression followed by the factors. error_cov_type : {'scalar', 'diagonal', 'unstructured'}, optional The structure of the covariance matrix of the observation error term, where "unstructured" puts no restrictions on the matrix, "diagonal" requires it to be any diagonal matrix (uncorrelated errors), and "scalar" requires it to be a scalar times the identity matrix. Default is "diagonal". error_order : int, optional The order of the vector autoregression followed by the observation error component. Default is None, corresponding to white noise errors. error_var : boolean, optional Whether or not to model the errors jointly via a vector autoregression, rather than as individual autoregressions. Has no effect unless `error_order` is set. Default is False. enforce_stationarity : boolean, optional Whether or not to transform the AR parameters to enforce stationarity in the autoregressive component of the model. Default is True. **kwargs Keyword arguments may be used to provide default values for state space matrices or for Kalman filtering options. See `Representation`, and `KalmanFilter` for more details. Attributes ---------- exog : array_like, optional Array of exogenous regressors for the observation equation, shaped nobs x k_exog. k_factors : int The number of unobserved factors. factor_order : int The order of the vector autoregression followed by the factors. error_cov_type : {'diagonal', 'unstructured'} The structure of the covariance matrix of the error term, where "unstructured" puts no restrictions on the matrix and "diagonal" requires it to be a diagonal matrix (uncorrelated errors). error_order : int The order of the vector autoregression followed by the observation error component. error_var : boolean Whether or not to model the errors jointly via a vector autoregression, rather than as individual autoregressions. Has no effect unless `error_order` is set. enforce_stationarity : boolean, optional Whether or not to transform the AR parameters to enforce stationarity in the autoregressive component of the model. Default is True. Notes ----- The dynamic factor model considered here is in the so-called static form, and is specified: .. math:: y_t & = \Lambda f_t + B x_t + u_t \\ f_t & = A_1 f_{t-1} + \dots + A_p f_{t-p} + \eta_t \\ u_t & = C_1 u_{t-1} + \dots + C_1 f_{t-q} + \varepsilon_t where there are `k_endog` observed series and `k_factors` unobserved factors. Thus :math:`y_t` is a `k_endog` x 1 vector and :math:`f_t` is a `k_factors` x 1 vector. :math:`x_t` are optional exogenous vectors, shaped `k_exog` x 1. :math:`\eta_t` and :math:`\varepsilon_t` are white noise error terms. In order to identify the factors, :math:`Var(\eta_t) = I`. Denote :math:`Var(\varepsilon_t) \equiv \Sigma`. Options related to the unobserved factors: - `k_factors`: this is the dimension of the vector :math:`f_t`, above. To exclude factors completely, set `k_factors = 0`. - `factor_order`: this is the number of lags to include in the factor evolution equation, and corresponds to :math:`p`, above. To have static factors, set `factor_order = 0`. Options related to the observation error term :math:`u_t`: - `error_order`: the number of lags to include in the error evolution equation; corresponds to :math:`q`, above. To have white noise errors, set `error_order = 0` (this is the default). - `error_cov_type`: this controls the form of the covariance matrix :math:`\Sigma`. If it is "dscalar", then :math:`\Sigma = \sigma^2 I`. If it is "diagonal", then :math:`\Sigma = \text{diag}(\sigma_1^2, \dots, \sigma_n^2)`. If it is "unstructured", then :math:`\Sigma` is any valid variance / covariance matrix (i.e. symmetric and positive definite). - `error_var`: this controls whether or not the errors evolve jointly according to a VAR(q), or individually according to separate AR(q) processes. In terms of the formulation above, if `error_var = False`, then the matrices :math:C_i` are diagonal, otherwise they are general VAR matrices. References ---------- .. [1] Lutkepohl, Helmut. 2007. New Introduction to Multiple Time Series Analysis. Berlin: Springer. """ def __init__(self, endog, k_factors, factor_order, exog=None, error_order=0, error_var=False, error_cov_type='diagonal', enforce_stationarity=True, **kwargs): # Model properties self.enforce_stationarity = enforce_stationarity # Factor-related properties self.k_factors = k_factors self.factor_order = factor_order # Error-related properties self.error_order = error_order self.error_var = error_var and error_order > 0 self.error_cov_type = error_cov_type # Exogenous data self.k_exog = 0 if exog is not None: exog_is_using_pandas = _is_using_pandas(exog, None) if not exog_is_using_pandas: exog = np.asarray(exog) # Make sure we have 2-dimensional array if exog.ndim == 1: if not exog_is_using_pandas: exog = exog[:, None] else: exog = pd.DataFrame(exog) self.k_exog = exog.shape[1] # Note: at some point in the future might add state regression, as in # SARIMAX. self.mle_regression = self.k_exog > 0 # We need to have an array or pandas at this point if not _is_using_pandas(endog, None): endog = np.asanyarray(endog, order='C') # Save some useful model orders, internally used k_endog = endog.shape[1] if endog.ndim > 1 else 1 self._factor_order = max(1, self.factor_order) * self.k_factors self._error_order = self.error_order * k_endog # Calculate the number of states k_states = self._factor_order k_posdef = self.k_factors if self.error_order > 0: k_states += self._error_order k_posdef += k_endog if k_states == 0: k_states = 1 k_posdef = 1 # Test for non-multivariate endog if k_endog < 2: raise ValueError('The dynamic factors model is only valid for' ' multivariate time series.') # Test for too many factors if self.k_factors >= k_endog: raise ValueError('Number of factors must be less than the number' ' of endogenous variables.') # Test for invalid error_cov_type if self.error_cov_type not in ['scalar', 'diagonal', 'unstructured']: raise ValueError('Invalid error covariance matrix type' ' specification.') # By default, initialize as stationary kwargs.setdefault('initialization', 'stationary') # Initialize the state space model super(DynamicFactor, self).__init__( endog, exog=exog, k_states=k_states, k_posdef=k_posdef, **kwargs ) # Initialize the components self.parameters = OrderedDict() self._initialize_loadings() self._initialize_exog() self._initialize_error_cov() self._initialize_factor_transition() self._initialize_error_transition() self.k_params = sum(self.parameters.values()) # Cache parameter vector slices def _slice(key, offset): length = self.parameters[key] param_slice = np.s_[offset:offset + length] offset += length return param_slice, offset offset = 0 self._params_loadings, offset = _slice('factor_loadings', offset) self._params_exog, offset = _slice('exog', offset) self._params_error_cov, offset = _slice('error_cov', offset) self._params_factor_transition, offset = ( _slice('factor_transition', offset)) self._params_error_transition, offset = ( _slice('error_transition', offset)) def _initialize_loadings(self): # Initialize the parameters self.parameters['factor_loadings'] = self.k_endog * self.k_factors # Setup fixed components of state space matrices if self.error_order > 0: start = self._factor_order end = self._factor_order + self.k_endog self.ssm['design', :, start:end] = np.eye(self.k_endog) # Setup indices of state space matrices self._idx_loadings = np.s_['design', :, :self.k_factors] def _initialize_exog(self): # Initialize the parameters self.parameters['exog'] = self.k_exog * self.k_endog # If we have exog effects, then the obs intercept needs to be # time-varying if self.k_exog > 0: self.ssm['obs_intercept'] = np.zeros((self.k_endog, self.nobs)) # Setup indices of state space matrices self._idx_exog = np.s_['obs_intercept', :self.k_endog, :] def _initialize_error_cov(self): if self.error_cov_type == 'scalar': self._initialize_error_cov_diagonal(scalar=True) elif self.error_cov_type == 'diagonal': self._initialize_error_cov_diagonal(scalar=False) elif self.error_cov_type == 'unstructured': self._initialize_error_cov_unstructured() def _initialize_error_cov_diagonal(self, scalar=False): # Initialize the parameters self.parameters['error_cov'] = 1 if scalar else self.k_endog # Setup fixed components of state space matrices # Setup indices of state space matrices k_endog = self.k_endog k_factors = self.k_factors idx = np.diag_indices(k_endog) if self.error_order > 0: matrix = 'state_cov' idx = (idx[0] + k_factors, idx[1] + k_factors) else: matrix = 'obs_cov' self._idx_error_cov = (matrix,) + idx def _initialize_error_cov_unstructured(self): # Initialize the parameters k_endog = self.k_endog self.parameters['error_cov'] = int(k_endog * (k_endog + 1) / 2) # Setup fixed components of state space matrices # Setup indices of state space matrices self._idx_lower_error_cov = np.tril_indices(self.k_endog) if self.error_order > 0: start = self.k_factors end = self.k_factors + self.k_endog self._idx_error_cov = ( np.s_['state_cov', start:end, start:end]) else: self._idx_error_cov = np.s_['obs_cov', :, :] def _initialize_factor_transition(self): order = self.factor_order * self.k_factors k_factors = self.k_factors # Initialize the parameters self.parameters['factor_transition'] = ( self.factor_order * self.k_factors**2) # Setup fixed components of state space matrices # VAR(p) for factor transition if self.k_factors > 0: if self.factor_order > 0: self.ssm['transition', k_factors:order, :order - k_factors] = ( np.eye(order - k_factors)) self.ssm['selection', :k_factors, :k_factors] = np.eye(k_factors) # Identification requires constraining the state covariance to an # identity matrix self.ssm['state_cov', :k_factors, :k_factors] = np.eye(k_factors) # Setup indices of state space matrices self._idx_factor_transition = np.s_['transition', :k_factors, :order] def _initialize_error_transition(self): # Initialize the appropriate situation if self.error_order == 0: self._initialize_error_transition_white_noise() else: # Generic setup fixed components of state space matrices # VAR(q) for error transition # (in the individual AR case, we still have the VAR(q) companion # matrix structure, but force the coefficient matrices to be # diagonal) k_endog = self.k_endog k_factors = self.k_factors _factor_order = self._factor_order _error_order = self._error_order _slice = np.s_['selection', _factor_order:_factor_order + k_endog, k_factors:k_factors + k_endog] self.ssm[_slice] = np.eye(k_endog) _slice = np.s_[ 'transition', _factor_order + k_endog:_factor_order + _error_order, _factor_order:_factor_order + _error_order - k_endog] self.ssm[_slice] = np.eye(_error_order - k_endog) # Now specialized setups if self.error_var: self._initialize_error_transition_var() else: self._initialize_error_transition_individual() def _initialize_error_transition_white_noise(self): # Initialize the parameters self.parameters['error_transition'] = 0 # No fixed components of state space matrices # Setup indices of state space matrices (just an empty slice) self._idx_error_transition = np.s_['transition', 0:0, 0:0] def _initialize_error_transition_var(self): k_endog = self.k_endog _factor_order = self._factor_order _error_order = self._error_order # Initialize the parameters self.parameters['error_transition'] = _error_order * k_endog # Fixed components already setup above # Setup indices of state space matrices # Here we want to set all of the elements of the coefficient matrices, # the same as in a VAR specification self._idx_error_transition = np.s_[ 'transition', _factor_order:_factor_order + k_endog, _factor_order:_factor_order + _error_order] def _initialize_error_transition_individual(self): k_endog = self.k_endog _factor_order = self._factor_order _error_order = self._error_order # Initialize the parameters self.parameters['error_transition'] = _error_order # Fixed components already setup above # Setup indices of state space matrices # Here we want to set only the diagonal elements of the coefficient # matrices, and we want to set them in order by equation, not by # matrix (i.e. set the first element of the first matrix's diagonal, # then set the first element of the second matrix's diagonal, then...) # The basic setup is a tiled list of diagonal indices, one for each # coefficient matrix idx = np.tile(np.diag_indices(k_endog), self.error_order) # Now we need to shift the rows down to the correct location row_shift = self._factor_order # And we need to shift the columns in an increasing way col_inc = self._factor_order + np.repeat( [i * k_endog for i in range(self.error_order)], k_endog) idx[0] += row_shift idx[1] += col_inc # Make a copy (without the row shift) so that we can easily get the # diagonal parameters back out of a generic coefficients matrix array idx_diag = idx.copy() idx_diag[0] -= row_shift idx_diag[1] -= self._factor_order idx_diag = idx_diag[:, np.lexsort((idx_diag[1], idx_diag[0]))] self._idx_error_diag = (idx_diag[0], idx_diag[1]) # Finally, we want to fill the entries in in the correct order, which # is to say we want to fill in lexicographically, first by row then by # column idx = idx[:, np.lexsort((idx[1], idx[0]))] self._idx_error_transition = np.s_['transition', idx[0], idx[1]] def filter(self, params, **kwargs): kwargs.setdefault('results_class', DynamicFactorResults) kwargs.setdefault('results_wrapper_class', DynamicFactorResultsWrapper) return super(DynamicFactor, self).filter(params, **kwargs) def smooth(self, params, **kwargs): kwargs.setdefault('results_class', DynamicFactorResults) kwargs.setdefault('results_wrapper_class', DynamicFactorResultsWrapper) return super(DynamicFactor, self).smooth(params, **kwargs) @property def start_params(self): params = np.zeros(self.k_params, dtype=np.float64) endog = self.endog.copy() # 1. Factor loadings (estimated via PCA) if self.k_factors > 0: # Use principal components + OLS as starting values res_pca = PCA(endog, ncomp=self.k_factors) mod_ols = OLS(endog, res_pca.factors) res_ols = mod_ols.fit() # Using OLS params for the loadings tends to gives higher starting # log-likelihood. params[self._params_loadings] = res_ols.params.T.ravel() # params[self._params_loadings] = res_pca.loadings.ravel() # However, using res_ols.resid tends to causes non-invertible # starting VAR coefficients for error VARs # endog = res_ols.resid endog = endog - np.dot(res_pca.factors, res_pca.loadings.T) # 2. Exog (OLS on residuals) if self.k_exog > 0: mod_ols = OLS(endog, exog=self.exog) res_ols = mod_ols.fit() # In the form: beta.x1.y1, beta.x2.y1, beta.x1.y2, ... params[self._params_exog] = res_ols.params.T.ravel() endog = res_ols.resid # 3. Factors (VAR on res_pca.factors) stationary = True if self.k_factors > 1 and self.factor_order > 0: # 3a. VAR transition (OLS on factors estimated via PCA) mod_factors = VAR(res_pca.factors) res_factors = mod_factors.fit(maxlags=self.factor_order, ic=None, trend='nc') # Save the parameters params[self._params_factor_transition] = ( res_factors.params.T.ravel()) # Test for stationarity coefficient_matrices = ( params[self._params_factor_transition].reshape( self.k_factors * self.factor_order, self.k_factors ).T ).reshape(self.k_factors, self.k_factors, self.factor_order).T stationary = is_invertible([1] + list(-coefficient_matrices)) elif self.k_factors > 0 and self.factor_order > 0: # 3b. AR transition Y = res_pca.factors[self.factor_order:] X = lagmat(res_pca.factors, self.factor_order, trim='both') params_ar = np.linalg.pinv(X).dot(Y) stationary = is_invertible(np.r_[1, -params_ar.squeeze()]) params[self._params_factor_transition] = params_ar[:, 0] # Check for stationarity if not stationary and self.enforce_stationarity: raise ValueError('Non-stationary starting autoregressive' ' parameters found with `enforce_stationarity`' ' set to True.') # 4. Errors if self.error_order == 0: error_params = [] if self.error_cov_type == 'scalar': params[self._params_error_cov] = endog.var(axis=0).mean() elif self.error_cov_type == 'diagonal': params[self._params_error_cov] = endog.var(axis=0) elif self.error_cov_type == 'unstructured': cov_factor = np.diag(endog.std(axis=0)) params[self._params_error_cov] = ( cov_factor[self._idx_lower_error_cov].ravel()) else: mod_errors = VAR(endog) res_errors = mod_errors.fit(maxlags=self.error_order, ic=None, trend='nc') # Test for stationarity coefficient_matrices = ( np.array(res_errors.params.T).ravel().reshape( self.k_endog * self.error_order, self.k_endog ).T ).reshape(self.k_endog, self.k_endog, self.error_order).T stationary = is_invertible([1] + list(-coefficient_matrices)) if not stationary and self.enforce_stationarity: raise ValueError('Non-stationary starting error autoregressive' ' parameters found with' ' `enforce_stationarity` set to True.') # Get the error autoregressive parameters if self.error_var: params[self._params_error_transition] = ( np.array(res_errors.params.T).ravel()) else: # In the case of individual autoregressions, extract just the # diagonal elements params[self._params_error_transition] = ( res_errors.params.T[self._idx_error_diag]) # Get the error covariance parameters if self.error_cov_type == 'scalar': params[self._params_error_cov] = ( res_errors.sigma_u.diagonal().mean()) elif self.error_cov_type == 'diagonal': params[self._params_error_cov] = res_errors.sigma_u.diagonal() elif self.error_cov_type == 'unstructured': try: cov_factor = np.linalg.cholesky(res_errors.sigma_u) except np.linalg.LinAlgError: cov_factor = np.eye(res_errors.sigma_u.shape[0]) * ( res_errors.sigma_u.diagonal().mean()**0.5) cov_factor = np.eye(res_errors.sigma_u.shape[0]) * ( res_errors.sigma_u.diagonal().mean()**0.5) params[self._params_error_cov] = ( cov_factor[self._idx_lower_error_cov].ravel()) return params @property def param_names(self): param_names = [] endog_names = self.endog_names # 1. Factor loadings param_names += [ 'loading.f%d.%s' % (j+1, endog_names[i]) for i in range(self.k_endog) for j in range(self.k_factors) ] # 2. Exog # Recall these are in the form: beta.x1.y1, beta.x2.y1, beta.x1.y2, ... param_names += [ 'beta.%s.%s' % (self.exog_names[j], endog_names[i]) for i in range(self.k_endog) for j in range(self.k_exog) ] # 3. Error covariances if self.error_cov_type == 'scalar': param_names += ['sigma2'] elif self.error_cov_type == 'diagonal': param_names += [ 'sigma2.%s' % endog_names[i] for i in range(self.k_endog) ] elif self.error_cov_type == 'unstructured': param_names += [ ('sqrt.var.%s' % endog_names[i] if i == j else 'sqrt.cov.%s.%s' % (endog_names[j], endog_names[i])) for i in range(self.k_endog) for j in range(i+1) ] # 4. Factor transition VAR param_names += [ 'L%d.f%d.f%d' % (i+1, k+1, j+1) for j in range(self.k_factors) for i in range(self.factor_order) for k in range(self.k_factors) ] # 5. Error transition VAR if self.error_var: param_names += [ 'L%d.e(%s).e(%s)' % (i+1, endog_names[k], endog_names[j]) for j in range(self.k_endog) for i in range(self.error_order) for k in range(self.k_endog) ] else: param_names += [ 'L%d.e(%s).e(%s)' % (i+1, endog_names[j], endog_names[j]) for j in range(self.k_endog) for i in range(self.error_order) ] return param_names def transform_params(self, unconstrained): """ Transform unconstrained parameters used by the optimizer to constrained parameters used in likelihood evaluation Parameters ---------- unconstrained : array_like Array of unconstrained parameters used by the optimizer, to be transformed. Returns ------- constrained : array_like Array of constrained parameters which may be used in likelihood evalation. Notes ----- Constrains the factor transition to be stationary and variances to be positive. """ unconstrained = np.array(unconstrained, ndmin=1) dtype = unconstrained.dtype constrained = np.zeros(unconstrained.shape, dtype=dtype) # 1. Factor loadings # The factor loadings do not need to be adjusted constrained[self._params_loadings] = ( unconstrained[self._params_loadings]) # 2. Exog # The regression coefficients do not need to be adjusted constrained[self._params_exog] = ( unconstrained[self._params_exog]) # 3. Error covariances # If we have variances, force them to be positive if self.error_cov_type in ['scalar', 'diagonal']: constrained[self._params_error_cov] = ( unconstrained[self._params_error_cov]**2) # Otherwise, nothing needs to be done elif self.error_cov_type == 'unstructured': constrained[self._params_error_cov] = ( unconstrained[self._params_error_cov]) # 4. Factor transition VAR # VAR transition: optionally force to be stationary if self.enforce_stationarity and self.factor_order > 0: # Transform the parameters unconstrained_matrices = ( unconstrained[self._params_factor_transition].reshape( self.k_factors, self._factor_order)) # This is always an identity matrix, but because the transform # done prior to update (where the ssm representation matrices # change), it may be complex cov = self.ssm[ 'state_cov', :self.k_factors, :self.k_factors].real coefficient_matrices, variance = ( constrain_stationary_multivariate(unconstrained_matrices, cov)) constrained[self._params_factor_transition] = ( coefficient_matrices.ravel()) else: constrained[self._params_factor_transition] = ( unconstrained[self._params_factor_transition]) # 5. Error transition VAR # VAR transition: optionally force to be stationary if self.enforce_stationarity and self.error_order > 0: # Joint VAR specification if self.error_var: unconstrained_matrices = ( unconstrained[self._params_error_transition].reshape( self.k_endog, self._error_order)) start = self.k_factors end = self.k_factors + self.k_endog cov = self.ssm['state_cov', start:end, start:end].real coefficient_matrices, variance = ( constrain_stationary_multivariate( unconstrained_matrices, cov)) constrained[self._params_error_transition] = ( coefficient_matrices.ravel()) # Separate AR specifications else: coefficients = ( unconstrained[self._params_error_transition].copy()) for i in range(self.k_endog): start = i * self.error_order end = (i + 1) * self.error_order coefficients[start:end] = constrain_stationary_univariate( coefficients[start:end]) constrained[self._params_error_transition] = coefficients else: constrained[self._params_error_transition] = ( unconstrained[self._params_error_transition]) return constrained def untransform_params(self, constrained): """ Transform constrained parameters used in likelihood evaluation to unconstrained parameters used by the optimizer. Parameters ---------- constrained : array_like Array of constrained parameters used in likelihood evalution, to be transformed. Returns ------- unconstrained : array_like Array of unconstrained parameters used by the optimizer. """ constrained = np.array(constrained, ndmin=1) dtype=constrained.dtype unconstrained = np.zeros(constrained.shape, dtype=dtype) # 1. Factor loadings # The factor loadings do not need to be adjusted unconstrained[self._params_loadings] = ( constrained[self._params_loadings]) # 2. Exog # The regression coefficients do not need to be adjusted unconstrained[self._params_exog] = ( constrained[self._params_exog]) # 3. Error covariances # If we have variances, force them to be positive if self.error_cov_type in ['scalar', 'diagonal']: unconstrained[self._params_error_cov] = ( constrained[self._params_error_cov]**0.5) # Otherwise, nothing needs to be done elif self.error_cov_type == 'unstructured': unconstrained[self._params_error_cov] = ( constrained[self._params_error_cov]) # 3. Factor transition VAR # VAR transition: optionally force to be stationary if self.enforce_stationarity and self.factor_order > 0: # Transform the parameters constrained_matrices = ( constrained[self._params_factor_transition].reshape( self.k_factors, self._factor_order)) cov = self.ssm[ 'state_cov', :self.k_factors, :self.k_factors].real coefficient_matrices, variance = ( unconstrain_stationary_multivariate( constrained_matrices, cov)) unconstrained[self._params_factor_transition] = ( coefficient_matrices.ravel()) else: unconstrained[self._params_factor_transition] = ( constrained[self._params_factor_transition]) # 5. Error transition VAR # VAR transition: optionally force to be stationary if self.enforce_stationarity and self.error_order > 0: # Joint VAR specification if self.error_var: constrained_matrices = ( constrained[self._params_error_transition].reshape( self.k_endog, self._error_order)) start = self.k_factors end = self.k_factors + self.k_endog cov = self.ssm['state_cov', start:end, start:end].real coefficient_matrices, variance = ( unconstrain_stationary_multivariate( constrained_matrices, cov)) unconstrained[self._params_error_transition] = ( coefficient_matrices.ravel()) # Separate AR specifications else: coefficients = ( constrained[self._params_error_transition].copy()) for i in range(self.k_endog): start = i * self.error_order end = (i + 1) * self.error_order coefficients[start:end] = ( unconstrain_stationary_univariate( coefficients[start:end])) unconstrained[self._params_error_transition] = coefficients else: unconstrained[self._params_error_transition] = ( constrained[self._params_error_transition]) return unconstrained def update(self, params, transformed=True, complex_step=False): """ Update the parameters of the model Updates the representation matrices to fill in the new parameter values. Parameters ---------- params : array_like Array of new parameters. transformed : boolean, optional Whether or not `params` is already transformed. If set to False, `transform_params` is called. Default is True.. Returns ------- params : array_like Array of parameters. Notes ----- Let `n = k_endog`, `m = k_factors`, and `p = factor_order`. Then the `params` vector has length :math:`[n \times m] + [n] + [m^2 \times p]`. It is expanded in the following way: - The first :math:`n \times m` parameters fill out the factor loading matrix, starting from the [0,0] entry and then proceeding along rows. These parameters are not modified in `transform_params`. - The next :math:`n` parameters provide variances for the error_cov errors in the observation equation. They fill in the diagonal of the observation covariance matrix, and are constrained to be positive by `transofrm_params`. - The next :math:`m^2 \times p` parameters are used to create the `p` coefficient matrices for the vector autoregression describing the factor transition. They are transformed in `transform_params` to enforce stationarity of the VAR(p). They are placed so as to make the transition matrix a companion matrix for the VAR. In particular, we assume that the first :math:`m^2` parameters fill the first coefficient matrix (starting at [0,0] and filling along rows), the second :math:`m^2` parameters fill the second matrix, etc. """ params = super(DynamicFactor, self).update( params, transformed=transformed, complex_step=complex_step) # 1. Factor loadings # Update the design / factor loading matrix self.ssm[self._idx_loadings] = ( params[self._params_loadings].reshape(self.k_endog, self.k_factors) ) # 2. Exog if self.k_exog > 0: exog_params = params[self._params_exog].reshape( self.k_endog, self.k_exog).T self.ssm[self._idx_exog] = np.dot(self.exog, exog_params).T # 3. Error covariances if self.error_cov_type in ['scalar', 'diagonal']: self.ssm[self._idx_error_cov] = ( params[self._params_error_cov]) elif self.error_cov_type == 'unstructured': error_cov_lower = np.zeros((self.k_endog, self.k_endog), dtype=params.dtype) error_cov_lower[self._idx_lower_error_cov] = ( params[self._params_error_cov]) self.ssm[self._idx_error_cov] = ( np.dot(error_cov_lower, error_cov_lower.T)) # 4. Factor transition VAR self.ssm[self._idx_factor_transition] = ( params[self._params_factor_transition].reshape( self.k_factors, self.factor_order * self.k_factors)) # 5. Error transition VAR if self.error_var: self.ssm[self._idx_error_transition] = ( params[self._params_error_transition].reshape( self.k_endog, self._error_order)) else: self.ssm[self._idx_error_transition] = ( params[self._params_error_transition]) class DynamicFactorResults(MLEResults): """ Class to hold results from fitting an DynamicFactor model. Parameters ---------- model : DynamicFactor instance The fitted model instance Attributes ---------- specification : dictionary Dictionary including all attributes from the DynamicFactor model instance. coefficient_matrices_var : array Array containing autoregressive lag polynomial coefficient matrices, ordered from lowest degree to highest. See Also -------- statsmodels.tsa.statespace.kalman_filter.FilterResults statsmodels.tsa.statespace.mlemodel.MLEResults """ def __init__(self, model, params, filter_results, cov_type='opg', **kwargs): super(DynamicFactorResults, self).__init__(model, params, filter_results, cov_type, **kwargs) self.df_resid = np.inf # attribute required for wald tests self.specification = Bunch(**{ # Model properties 'k_endog' : self.model.k_endog, 'enforce_stationarity': self.model.enforce_stationarity, # Factor-related properties 'k_factors': self.model.k_factors, 'factor_order': self.model.factor_order, # Error-related properties 'error_order': self.model.error_order, 'error_var': self.model.error_var, 'error_cov_type': self.model.error_cov_type, # Other properties 'k_exog': self.model.k_exog }) # Polynomials / coefficient matrices self.coefficient_matrices_var = None if self.model.factor_order > 0: ar_params = ( np.array(self.params[self.model._params_factor_transition])) k_factors = self.model.k_factors factor_order = self.model.factor_order self.coefficient_matrices_var = ( ar_params.reshape(k_factors * factor_order, k_factors).T ).reshape(k_factors, k_factors, factor_order).T self.coefficient_matrices_error = None if self.model.error_order > 0: ar_params = ( np.array(self.params[self.model._params_error_transition])) k_endog = self.model.k_endog error_order = self.model.error_order if self.model.error_var: self.coefficient_matrices_error = ( ar_params.reshape(k_endog * error_order, k_endog).T ).reshape(k_endog, k_endog, error_order).T else: mat = np.zeros((k_endog, k_endog * error_order)) mat[self.model._idx_error_diag] = ar_params self.coefficient_matrices_error = ( mat.T.reshape(error_order, k_endog, k_endog)) @property def factors(self): """ Estimates of unobserved factors Returns ------- out: Bunch Has the following attributes: - `filtered`: a time series array with the filtered estimate of the component - `filtered_cov`: a time series array with the filtered estimate of the variance/covariance of the component - `smoothed`: a time series array with the smoothed estimate of the component - `smoothed_cov`: a time series array with the smoothed estimate of the variance/covariance of the component - `offset`: an integer giving the offset in the state vector where this component begins """ # If present, level is always the first component of the state vector out = None spec = self.specification if spec.k_factors > 0: offset = 0 end = spec.k_factors res = self.filter_results out = Bunch( filtered=res.filtered_state[offset:end], filtered_cov=res.filtered_state_cov[offset:end, offset:end], smoothed=None, smoothed_cov=None, offset=offset) if self.smoothed_state is not None: out.smoothed = self.smoothed_state[offset:end] if self.smoothed_state_cov is not None: out.smoothed_cov = ( self.smoothed_state_cov[offset:end, offset:end]) return out @cache_readonly def coefficients_of_determination(self): """ Coefficients of determination (:math:`R^2`) from regressions of individual estimated factors on endogenous variables. Returns ------- coefficients_of_determination : array A `k_endog` x `k_factors` array, where `coefficients_of_determination[i, j]` represents the :math:`R^2` value from a regression of factor `j` and a constant on endogenous variable `i`. Notes ----- Although it can be difficult to interpret the estimated factor loadings and factors, it is often helpful to use the cofficients of determination from univariate regressions to assess the importance of each factor in explaining the variation in each endogenous variable. In models with many variables and factors, this can sometimes lend interpretation to the factors (for example sometimes one factor will load primarily on real variables and another on nominal variables). See Also -------- plot_coefficients_of_determination """ from statsmodels.tools import add_constant spec = self.specification coefficients = np.zeros((spec.k_endog, spec.k_factors)) which = 'filtered' if self.smoothed_state is None else 'smoothed' for i in range(spec.k_factors): exog = add_constant(self.factors[which][i]) for j in range(spec.k_endog): endog = self.filter_results.endog[j] coefficients[j, i] = OLS(endog, exog).fit().rsquared return coefficients def plot_coefficients_of_determination(self, endog_labels=None, fig=None, figsize=None): """ Plot the coefficients of determination Parameters ---------- endog_labels : boolean, optional Whether or not to label the endogenous variables along the x-axis of the plots. Default is to include labels if there are 5 or fewer endogenous variables. fig : Matplotlib Figure instance, optional If given, subplots are created in this figure instead of in a new figure. Note that the grid will be created in the provided figure using `fig.add_subplot()`. figsize : tuple, optional If a figure is created, this argument allows specifying a size. The tuple is (width, height). Notes ----- Produces a `k_factors` x 1 plot grid. The `i`th plot shows a bar plot of the coefficients of determination associated with factor `i`. The endogenous variables are arranged along the x-axis according to their position in the `endog` array. See Also -------- coefficients_of_determination """ from statsmodels.graphics.utils import _import_mpl, create_mpl_fig _import_mpl() fig = create_mpl_fig(fig, figsize) spec = self.specification # Should we label endogenous variables? if endog_labels is None: endog_labels = spec.k_endog <= 5 # Plot the coefficients of determination coefficients_of_determination = self.coefficients_of_determination plot_idx = 1 locations = np.arange(spec.k_endog) for coeffs in coefficients_of_determination.T: # Create the new axis ax = fig.add_subplot(spec.k_factors, 1, plot_idx) ax.set_ylim((0,1)) ax.set(title='Factor %i' % plot_idx, ylabel=r'$R^2$') bars = ax.bar(locations, coeffs) if endog_labels: width = bars[0].get_width() ax.xaxis.set_ticks(locations + width / 2) ax.xaxis.set_ticklabels(self.model.endog_names) else: ax.set(xlabel='Endogenous variables') ax.xaxis.set_ticks([]) plot_idx += 1 return fig def predict(self, start=None, end=None, exog=None, dynamic=False, **kwargs): """ In-sample prediction and out-of-sample forecasting Parameters ---------- start : int, str, or datetime, optional Zero-indexed observation number at which to start forecasting, ie., the first forecast is start. Can also be a date string to parse or a datetime type. Default is the the zeroth observation. end : int, str, or datetime, optional Zero-indexed observation number at which to end forecasting, ie., the first forecast is start. Can also be a date string to parse or a datetime type. However, if the dates index does not have a fixed frequency, end must be an integer index if you want out of sample prediction. Default is the last observation in the sample. exog : array_like, optional If the model includes exogenous regressors, you must provide exactly enough out-of-sample values for the exogenous variables if end is beyond the last observation in the sample. dynamic : boolean, int, str, or datetime, optional Integer offset relative to `start` at which to begin dynamic prediction. Can also be an absolute date string to parse or a datetime type (these are not interpreted as offsets). Prior to this observation, true endogenous values will be used for prediction; starting with this observation and continuing through the end of prediction, forecasted endogenous values will be used instead. **kwargs Additional arguments may required for forecasting beyond the end of the sample. See `FilterResults.predict` for more details. Returns ------- forecast : array Array of out of sample forecasts. """ if start is None: start = 0 # Handle end (e.g. date) _start = self.model._get_predict_start(start) _end, _out_of_sample = self.model._get_predict_end(end) # Handle exogenous parameters if _out_of_sample and self.model.k_exog > 0: # Create a new faux VARMAX model for the extended dataset nobs = self.model.data.orig_endog.shape[0] + _out_of_sample endog = np.zeros((nobs, self.model.k_endog)) if self.model.k_exog > 0:<|fim▁hole|> ' `exog` argument.') exog = np.array(exog) required_exog_shape = (_out_of_sample, self.model.k_exog) if not exog.shape == required_exog_shape: raise ValueError('Provided exogenous values are not of the' ' appropriate shape. Required %s, got %s.' % (str(required_exog_shape), str(exog.shape))) exog = np.c_[self.model.data.orig_exog.T, exog.T].T # TODO replace with init_kwds or specification or similar model = DynamicFactor( endog, k_factors=self.model.k_factors, factor_order=self.model.factor_order, exog=exog, error_order=self.model.error_order, error_var=self.model.error_var, error_cov_type=self.model.error_cov_type, enforce_stationarity=self.model.enforce_stationarity ) model.update(self.params) # Set the kwargs with the update time-varying state space # representation matrices for name in self.filter_results.shapes.keys(): if name == 'obs': continue mat = getattr(model.ssm, name) if mat.shape[-1] > 1: if len(mat.shape) == 2: kwargs[name] = mat[:, -_out_of_sample:] else: kwargs[name] = mat[:, :, -_out_of_sample:] elif self.model.k_exog == 0 and exog is not None: warn('Exogenous array provided to predict, but additional data not' ' required. `exog` argument ignored.', ValueWarning) return super(DynamicFactorResults, self).predict( start=start, end=end, exog=exog, dynamic=dynamic, **kwargs ) def forecast(self, steps=1, exog=None, **kwargs): """ Out-of-sample forecasts Parameters ---------- steps : int, optional The number of out of sample forecasts from the end of the sample. Default is 1. exog : array_like, optional If the model includes exogenous regressors, you must provide exactly enough out-of-sample values for the exogenous variables for each step forecasted. **kwargs Additional arguments may required for forecasting beyond the end of the sample. See `FilterResults.predict` for more details. Returns ------- forecast : array Array of out of sample forecasts. """ return super(DynamicFactorResults, self).forecast(steps, exog=exog, **kwargs) def summary(self, alpha=.05, start=None, separate_params=True): from statsmodels.iolib.summary import summary_params spec = self.specification # Create the model name model_name = [] if spec.k_factors > 0: if spec.factor_order > 0: model_type = ('DynamicFactor(factors=%d, order=%d)' % (spec.k_factors, spec.factor_order)) else: model_type = 'StaticFactor(factors=%d)' % spec.k_factors model_name.append(model_type) if spec.k_exog > 0: model_name.append('%d regressors' % spec.k_exog) else: model_name.append('SUR(%d regressors)' % spec.k_exog) if spec.error_order > 0: error_type = 'VAR' if spec.error_var else 'AR' model_name.append('%s(%d) errors' % (error_type, spec.error_order)) summary = super(DynamicFactorResults, self).summary( alpha=alpha, start=start, model_name=model_name, display_params=not separate_params ) if separate_params: indices = np.arange(len(self.params)) def make_table(self, mask, title, strip_end=True): res = (self, self.params[mask], self.bse[mask], self.zvalues[mask], self.pvalues[mask], self.conf_int(alpha)[mask]) param_names = [ '.'.join(name.split('.')[:-1]) if strip_end else name for name in np.array(self.data.param_names)[mask].tolist() ] return summary_params(res, yname=None, xname=param_names, alpha=alpha, use_t=False, title=title) k_endog = self.model.k_endog k_exog = self.model.k_exog k_factors = self.model.k_factors factor_order = self.model.factor_order _factor_order = self.model._factor_order _error_order = self.model._error_order # Add parameter tables for each endogenous variable loading_indices = indices[self.model._params_loadings] loading_masks = [] exog_indices = indices[self.model._params_exog] exog_masks = [] for i in range(k_endog): offset = 0 # 1. Factor loadings # Recall these are in the form: # 'loading.f1.y1', 'loading.f2.y1', 'loading.f1.y2', ... loading_mask = ( loading_indices[i * k_factors:(i + 1) * k_factors]) loading_masks.append(loading_mask) # 2. Exog # Recall these are in the form: # beta.x1.y1, beta.x2.y1, beta.x1.y2, ... exog_mask = exog_indices[i * k_exog:(i + 1) * k_exog] exog_masks.append(exog_mask) # Create the table mask = np.concatenate([loading_mask, exog_mask]) title = "Results for equation %s" % self.model.endog_names[i] table = make_table(self, mask, title) summary.tables.append(table) # Add parameter tables for each factor factor_indices = indices[self.model._params_factor_transition] factor_masks = [] if factor_order > 0: for i in range(k_factors): start = i * _factor_order factor_mask = factor_indices[start: start + _factor_order] factor_masks.append(factor_mask) # Create the table title = "Results for factor equation f%d" % (i+1) table = make_table(self, factor_mask, title) summary.tables.append(table) # Add parameter tables for error transitions error_masks = [] if spec.error_order > 0: error_indices = indices[self.model._params_error_transition] for i in range(k_endog): if spec.error_var: start = i * _error_order end = (i + 1) * _error_order else: start = i * spec.error_order end = (i + 1) * spec.error_order error_mask = error_indices[start:end] error_masks.append(error_mask) # Create the table title = ("Results for error equation e(%s)" % self.model.endog_names[i]) table = make_table(self, error_mask, title) summary.tables.append(table) # Error covariance terms error_cov_mask = indices[self.model._params_error_cov] table = make_table(self, error_cov_mask, "Error covariance matrix", strip_end=False) summary.tables.append(table) # Add a table for all other parameters masks = [] for m in (loading_masks, exog_masks, factor_masks, error_masks, [error_cov_mask]): m = np.array(m).flatten() if len(m) > 0: masks.append(m) masks = np.concatenate(masks) inverse_mask = np.array(list(set(indices).difference(set(masks)))) if len(inverse_mask) > 0: table = make_table(self, inverse_mask, "Other parameters", strip_end=False) summary.tables.append(table) return summary summary.__doc__ = MLEResults.summary.__doc__ class DynamicFactorResultsWrapper(MLEResultsWrapper): _attrs = {} _wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs, _attrs) _methods = {} _wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods, _methods) wrap.populate_wrapper(DynamicFactorResultsWrapper, DynamicFactorResults)<|fim▁end|>
if exog is None: raise ValueError('Out-of-sample forecasting in a model' ' with a regression component requires' ' additional exogenous values via the'
<|file_name|>jhelum.py<|end_file_name|><|fim▁begin|># Third-party import numpy as np import astropy.units as u import astropy.coordinates as coord from astropy.coordinates import frame_transform_graph from astropy.coordinates.matrix_utilities import matrix_transpose __all__ = ["JhelumBonaca19"] class JhelumBonaca19(coord.BaseCoordinateFrame): """ A Heliocentric spherical coordinate system defined by the orbit of the Jhelum stream, as described in Bonaca et al. 2019. For more information about this class, see the Astropy documentation on coordinate frames in :mod:`~astropy.coordinates`. Parameters ---------- representation : :class:`~astropy.coordinates.BaseRepresentation` or None A representation object or None to have no data (or use the other keywords) phi1 : angle_like, optional, must be keyword The longitude-like angle aligned with the stream. phi2 : angle_like, optional, must be keyword The latitude-like angle aligned perpendicular to the stream. distance : :class:`~astropy.units.Quantity`, optional, must be keyword The Distance for this object along the line-of-sight. pm_phi1_cosphi2 : :class:`~astropy.units.Quantity`, optional, must be keyword The proper motion in the longitude-like direction corresponding to the GD-1 stream's orbit. pm_phi2 : :class:`~astropy.units.Quantity`, optional, must be keyword The proper motion in the latitude-like direction perpendicular to the GD-1 stream's orbit. radial_velocity : :class:`~astropy.units.Quantity`, optional, must be keyword The Distance for this object along the line-of-sight. """ default_representation = coord.SphericalRepresentation default_differential = coord.SphericalCosLatDifferential frame_specific_representation_info = { coord.SphericalRepresentation: [ coord.RepresentationMapping('lon', 'phi1'), coord.RepresentationMapping('lat', 'phi2'), coord.RepresentationMapping('distance', 'distance')], } _default_wrap_angle = 180*u.deg def __init__(self, *args, **kwargs): wrap = kwargs.pop('wrap_longitude', True) super().__init__(*args, **kwargs) if wrap and isinstance(self._data, (coord.UnitSphericalRepresentation, coord.SphericalRepresentation)): self._data.lon.wrap_angle = self._default_wrap_angle # TODO: remove this. This is a hack required as of astropy v3.1 in order # to have the longitude components wrap at the desired angle def represent_as(self, base, s='base', in_frame_units=False): r = super().represent_as(base, s=s, in_frame_units=in_frame_units) if hasattr(r, "lon"): r.lon.wrap_angle = self._default_wrap_angle return r represent_as.__doc__ = coord.BaseCoordinateFrame.represent_as.__doc__ # Rotation matrix as defined in Bonaca+2019 R = np.array([[0.6173151074, -0.0093826715, -0.7866600433], [-0.0151801852, -0.9998847743, 0.0000135163], [-0.7865695266, 0.0119333013, -0.6173864075]]) @frame_transform_graph.transform(coord.StaticMatrixTransform, coord.ICRS, JhelumBonaca19) def icrs_to_jhelum(): """ Compute the transformation from Galactic spherical to heliocentric Jhelum coordinates. """ return R @frame_transform_graph.transform(coord.StaticMatrixTransform, JhelumBonaca19, coord.ICRS) def gd1_to_icrs():<|fim▁hole|> return matrix_transpose(icrs_to_jhelum())<|fim▁end|>
""" Compute the transformation from heliocentric Jhelum coordinates to spherical ICRS. """
<|file_name|>xrtest.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::dom::bindings::callback::ExceptionHandling; use crate::dom::bindings::cell::DomRefCell; use crate::dom::bindings::codegen::Bindings::FunctionBinding::Function; use crate::dom::bindings::codegen::Bindings::XRTestBinding::{ self, FakeXRDeviceInit, XRTestMethods, }; use crate::dom::bindings::refcounted::{Trusted, TrustedPromise}; use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector}; use crate::dom::bindings::root::{Dom, DomRoot}; use crate::dom::fakexrdevice::{get_origin, get_views, FakeXRDevice}; use crate::dom::globalscope::GlobalScope; use crate::dom::promise::Promise; use crate::script_thread::ScriptThread; use crate::task_source::TaskSource; use dom_struct::dom_struct; use ipc_channel::ipc::IpcSender; use ipc_channel::router::ROUTER; use profile_traits::ipc; use std::rc::Rc; use webxr_api::{self, Error as XRError, MockDeviceInit, MockDeviceMsg}; #[dom_struct] pub struct XRTest { reflector: Reflector, devices_connected: DomRefCell<Vec<Dom<FakeXRDevice>>>, } impl XRTest { pub fn new_inherited() -> XRTest { XRTest { reflector: Reflector::new(), devices_connected: DomRefCell::new(vec![]), } } pub fn new(global: &GlobalScope) -> DomRoot<XRTest> { reflect_dom_object( Box::new(XRTest::new_inherited()), global, XRTestBinding::Wrap, ) } fn device_obtained( &self, response: Result<IpcSender<MockDeviceMsg>, XRError>, trusted: TrustedPromise, ) { let promise = trusted.root(); if let Ok(sender) = response { let device = FakeXRDevice::new(&self.global(), sender); self.devices_connected .borrow_mut() .push(Dom::from_ref(&device)); promise.resolve_native(&device); } else { promise.reject_native(&()); } } } impl XRTestMethods for XRTest { /// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md #[allow(unsafe_code)] fn SimulateDeviceConnection(&self, init: &FakeXRDeviceInit) -> Rc<Promise> { let global = self.global(); let p = Promise::new(&global); let origin = if let Some(ref o) = init.viewerOrigin { match get_origin(&o) { Ok(origin) => Some(origin), Err(e) => { p.reject_error(e); return p; }, } } else { None }; let floor_origin = if let Some(ref o) = init.floorOrigin { match get_origin(&o) { Ok(origin) => Some(origin), Err(e) => { p.reject_error(e);<|fim▁hole|> } } else { None }; let views = match get_views(&init.views) { Ok(views) => views, Err(e) => { p.reject_error(e); return p; }, }; let supported_features = if let Some(ref s) = init.supportedFeatures { s.iter().cloned().map(String::from).collect() } else { vec![] }; let init = MockDeviceInit { viewer_origin: origin, views, supports_immersive: init.supportsImmersive, supports_unbounded: init.supportsUnbounded, floor_origin, supported_features, }; let global = self.global(); let window = global.as_window(); let this = Trusted::new(self); let mut trusted = Some(TrustedPromise::new(p.clone())); let (task_source, canceller) = window .task_manager() .dom_manipulation_task_source_with_canceller(); let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap(); ROUTER.add_route( receiver.to_opaque(), Box::new(move |message| { let trusted = trusted .take() .expect("SimulateDeviceConnection callback called twice"); let this = this.clone(); let message = message .to() .expect("SimulateDeviceConnection callback given incorrect payload"); let _ = task_source.queue_with_canceller( task!(request_session: move || { this.root().device_obtained(message, trusted); }), &canceller, ); }), ); window .webxr_registry() .simulate_device_connection(init, sender); p } /// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md fn SimulateUserActivation(&self, f: Rc<Function>) { ScriptThread::set_user_interacting(true); let _ = f.Call__(vec![], ExceptionHandling::Rethrow); ScriptThread::set_user_interacting(false); } /// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md fn DisconnectAllDevices(&self) -> Rc<Promise> { // XXXManishearth implement device disconnection and session ending let global = self.global(); let p = Promise::new(&global); let mut devices = self.devices_connected.borrow_mut(); if devices.is_empty() { p.resolve_native(&()); } else { let mut len = devices.len(); let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap(); let mut rooted_devices: Vec<_> = devices.iter().map(|x| DomRoot::from_ref(&**x)).collect(); devices.clear(); let mut trusted = Some(TrustedPromise::new(p.clone())); let (task_source, canceller) = global .as_window() .task_manager() .dom_manipulation_task_source_with_canceller(); ROUTER.add_route( receiver.to_opaque(), Box::new(move |_| { len -= 1; if len == 0 { let trusted = trusted .take() .expect("DisconnectAllDevices disconnected more devices than expected"); let _ = task_source.queue_with_canceller(trusted.resolve_task(()), &canceller); } }), ); for device in rooted_devices.drain(..) { device.disconnect(sender.clone()); } }; p } }<|fim▁end|>
return p; },
<|file_name|>server.js<|end_file_name|><|fim▁begin|>// Init ES2015 + .jsx environments for .require()<|fim▁hole|>require('babel-register'); var express = require('express'); var fluxexapp = require('./fluxexapp'); var serverAction = require('./actions/server'); var fluxexServerExtra = require('fluxex/extra/server'); var app = express(); // Provide /static/js/main.js fluxexServerExtra.initStatic(app); // Mount test page at /test app.use('/product', fluxexServerExtra.createMiddlewareByAction(fluxexapp, serverAction.samplePage)); // Start server app.listen(process.env.TESTPORT || 3000); console.log('Fluxex started! Go http://localhost:3001/product?id=124');<|fim▁end|>
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># e Django settings for dss project. import os import mimetypes from django.core.urlresolvers import reverse_lazy import djcelery from django.conf import global_settings from dss import logsettings from utils import here from localsettings import * from pipelinesettings import * from storagesettings import * from paymentsettings import * DEVELOPMENT = DEBUG <|fim▁hole|>TEMPLATE_DEBUG = DEBUG ADMINS = ( ('Fergal Moran', '[email protected]'), ) MANAGERS = ADMINS AUTH_PROFILE_MODULE = 'spa.UserProfile' ALLOWED_HOSTS = ['*'] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'deepsouthsounds', 'ADMINUSER': 'postgres', 'USER': DATABASE_USER, 'PASSWORD': DATABASE_PASSWORD, 'HOST': DATABASE_HOST, } } import sys if 'test' in sys.argv or 'test_coverage' in sys.argv: print "Testing" DATABASES['default']['ENGINE'] = 'django.db.backends.sqlite3' ROOT_URLCONF = 'dss.urls' TIME_ZONE = 'Europe/Dublin' LANGUAGE_CODE = 'en-ie' SITE_ID = 1 USE_I18N = False USE_L10N = True s = True SITE_ROOT = here('') ADMIN_MEDIA_PREFIX = STATIC_URL + "grappelli/" STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', 'compressor.finders.CompressorFinder', 'pipeline.finders.PipelineFinder', 'pipeline.finders.CachedFileFinder', ) STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage' STATICFILES_DIRS = ( here('static'), ) TEMPLATE_CONTEXT_PROCESSORS = global_settings.TEMPLATE_CONTEXT_PROCESSORS + ( 'django_facebook.context_processors.facebook', 'django.core.context_processors.request', 'django.core.context_processors.i18n', 'django.core.context_processors.media', 'django.core.context_processors.static', 'django.contrib.auth.context_processors.auth', "allauth.socialaccount.context_processors.socialaccount", "allauth.account.context_processors.account", "spa.context_processors.dss_context" ) AUTHENTICATION_BACKENDS = global_settings.AUTHENTICATION_BACKENDS + ( "allauth.account.auth_backends.AuthenticationBackend", ) MIDDLEWARE_CLASSES = ( 'django.middleware.gzip.GZipMiddleware', 'django.middleware.common.CommonMiddleware', 'user_sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'spa.middleware.cors.XsSharingMiddleware', # 'htmlmin.middleware.HtmlMinifyMiddleware', #'htmlmin.middleware.MarkRequestMiddleware', 'django_user_agents.middleware.UserAgentMiddleware', 'pipeline.middleware.MinifyHTMLMiddleware', # 'spa.middleware.uploadify.SWFUploadMiddleware', #'spa.middleware.sqlprinter.SqlPrintingMiddleware' if DEBUG else None, #'debug_toolbar.middleware.DebugToolbarMiddleware', ) WSGI_APPLICATION = 'dss.wsgi.application' TEMPLATE_DIRS = (here('templates'),) INSTALLED_APPS = ( 'grappelli', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'user_sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.admindocs', 'django_facebook', 'django_extensions', 'django_gravatar', 'notification', 'djcelery', 'django_nose', 'sorl.thumbnail', 'south', 'pipeline', 'avatar', 'spa', 'spa.signals', 'core', 'dirtyfields', 'allauth', 'allauth.account', 'allauth.socialaccount', 'allauth.socialaccount.providers.facebook', 'allauth.socialaccount.providers.twitter', 'allauth.socialaccount.providers.google', 'debug_toolbar', 'django_jenkins', 'dbbackup', 'schedule', 'djrill', 'paypal.standard.ipn', 'django_user_agents', 'storages', 'rest_framework', ) # where to redirect users to after logging in LOGIN_REDIRECT_URL = reverse_lazy('home') LOGOUT_URL = reverse_lazy('home') LOGGING = logsettings.LOGGING FACEBOOK_APP_ID = '154504534677009' djcelery.setup_loader() SOCIALACCOUNT_AVATAR_SUPPORT = True SOCIALACCOUNT_PROVIDERS = { 'facebook': { 'SCOPE': ['email', 'publish_stream', 'publish_actions'], 'METHOD': 'oauth2', 'LOCALE_FUNC': lambda request: 'en_IE' }, 'google': { 'SCOPE': ['https://www.googleapis.com/auth/userinfo.profile'], 'AUTH_PARAMS': {'access_type': 'online'} } } AVATAR_STORAGE_DIR = MEDIA_ROOT + '/avatars/' ACCOUNT_LOGOUT_REDIRECT_URL = '/' INTERNAL_IPS = ('127.0.0.1', '86.44.166.21', '192.168.1.111') TASTYPIE_DATETIME_FORMATTING = 'rfc-2822' TASTYPIE_ALLOW_MISSING_SLASH = True SENDFILE_ROOT = os.path.join(MEDIA_ROOT, 'mixes') SENDFILE_URL = '/media/mixes' SESSION_ENGINE = 'user_sessions.backends.db' mimetypes.add_type("text/xml", ".plist", False) HTML_MINIFY = not DEBUG DEFAULT_FROM_EMAIL = 'DSS ChatBot <[email protected]>' DEFAULT_HTTP_PROTOCOL = 'http' EMAIL_BACKEND = 'djrill.mail.backends.djrill.DjrillBackend' if DEBUG: import mimetypes mimetypes.add_type("image/png", ".png", True) mimetypes.add_type("image/png", ".png", True) mimetypes.add_type("application/x-font-woff", ".woff", True) mimetypes.add_type("application/vnd.ms-fontobject", ".eot", True) mimetypes.add_type("font/ttf", ".ttf", True) mimetypes.add_type("font/otf", ".otf", True) REALTIME_HEADERS = { 'content-type': 'application/json' } TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' if 'test' in sys.argv: try: from test_settings import * except ImportError: pass REST_FRAMEWORK = { # Use hyperlinked styles by default. # Only used if the `serializer_class` attribute is not set on a view. 'DEFAULT_MODEL_SERIALIZER_CLASS': 'rest_framework.serializers.HyperlinkedModelSerializer', # Use Django's standard `django.contrib.auth` permissions, # or allow read-only access for unauthenticated users. 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly' ] } #Trying this to see if it stops the user being logged out SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2 # Age of cookie, in seconds (default: 2 weeks).<|fim▁end|>
<|file_name|>ramp.py<|end_file_name|><|fim▁begin|># Copyright 2015 TellApart, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from datetime import datetime, timedelta from gevent import spawn_later from gevent.event import Event from tellapart.aurproxy.audit import AuditItem from tellapart.aurproxy.share.adjuster import ShareAdjuster <|fim▁hole|> return 0.0 if end_time <= as_of: return 1.0 else: total = end_time - start_time elapsed = as_of - start_time p = float(elapsed.total_seconds()) / float(total.total_seconds()) return p _CURVE_FNS = { 'linear': linear } class RampingShareAdjuster(ShareAdjuster): def __init__(self, endpoint, signal_update_fn, ramp_delay, ramp_seconds, curve='linear', update_frequency=10, as_of=None): super(RampingShareAdjuster, self).__init__(endpoint, signal_update_fn) self._ramp_delay = ramp_delay self._ramp_seconds = ramp_seconds self._curve_fn = _CURVE_FNS[curve] self._update_frequency = update_frequency self._start_time = as_of self._stop_event = Event() def start(self): """Start maintaining share adjustment factor for endpoint. """ if not self._start_time: self._start_time = datetime.now() + timedelta(seconds=self._ramp_delay) spawn_later(self._update_frequency, self._update) def stop(self): """Stop maintaining share adjustment factor for endpoint. """ self._stop_event.set() def _update(self): if not self._stop_event.is_set(): try: self._signal_update_fn() finally: if datetime.now() > self._end_time: self.stop() else: spawn_later(self._update_frequency, self._update) @property def _end_time(self): return self._start_time + timedelta(seconds=self._ramp_seconds) @property def auditable_share(self): """Return current share adjustment factor. """ as_of = datetime.now() share = self._curve_fn(self._start_time, self._end_time, as_of) return share, AuditItem('ramp', str(share))<|fim▁end|>
def linear(start_time, end_time, as_of): if start_time >= as_of:
<|file_name|>test_runner.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright (c) 2019-2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Run fuzz test targets. """ from concurrent.futures import ThreadPoolExecutor, as_completed import argparse import configparser import logging import os import subprocess import sys def get_fuzz_env(*, target, source_dir): return { 'FUZZ': target, 'UBSAN_OPTIONS': f'suppressions={source_dir}/test/sanitizer_suppressions/ubsan:print_stacktrace=1:halt_on_error=1:report_error_type=1', 'ASAN_OPTIONS': # symbolizer disabled due to https://github.com/google/sanitizers/issues/1364#issuecomment-761072085 'symbolize=0:detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1', } def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, description='''Run the fuzz targets with all inputs from the seed_dir once.''', ) parser.add_argument( "-l", "--loglevel", dest="loglevel", default="INFO", help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console.", ) parser.add_argument( '--valgrind', action='store_true', help='If true, run fuzzing binaries under the valgrind memory error detector', ) parser.add_argument( '-x', '--exclude', help="A comma-separated list of targets to exclude", ) parser.add_argument( '--par', '-j', type=int, default=4, help='How many targets to merge or execute in parallel.', ) parser.add_argument( 'seed_dir', help='The seed corpus to run on (must contain subfolders for each fuzz target).', ) parser.add_argument( 'target', nargs='*', help='The target(s) to run. Default is to run all targets.', ) parser.add_argument( '--m_dir', help='Merge inputs from this directory into the seed_dir.', ) parser.add_argument( '-g', '--generate', action='store_true', help='Create new corpus seeds (or extend the existing ones) by running' ' the given targets for a finite number of times. Outputs them to' ' the passed seed_dir.' ) args = parser.parse_args() # Set up logging logging.basicConfig( format='%(message)s', level=int(args.loglevel) if args.loglevel.isdigit() else args.loglevel.upper(), ) # Read config generated by configure. config = configparser.ConfigParser() configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini" config.read_file(open(configfile, encoding="utf8")) if not config["components"].getboolean("ENABLE_FUZZ"): logging.error("Must have fuzz targets built") sys.exit(1) # Build list of tests test_list_all = parse_test_list(fuzz_bin=os.path.join(config["environment"]["BUILDDIR"], 'src', 'test', 'fuzz', 'fuzz')) if not test_list_all: logging.error("No fuzz targets found") sys.exit(1) logging.debug("{} fuzz target(s) found: {}".format(len(test_list_all), " ".join(sorted(test_list_all)))) args.target = args.target or test_list_all # By default run all test_list_error = list(set(args.target).difference(set(test_list_all))) if test_list_error: logging.error("Unknown fuzz targets selected: {}".format(test_list_error)) test_list_selection = list(set(test_list_all).intersection(set(args.target))) if not test_list_selection: logging.error("No fuzz targets selected") if args.exclude: for excluded_target in args.exclude.split(","): if excluded_target not in test_list_selection: logging.error("Target \"{}\" not found in current target list.".format(excluded_target)) continue test_list_selection.remove(excluded_target) test_list_selection.sort() logging.info("{} of {} detected fuzz target(s) selected: {}".format(len(test_list_selection), len(test_list_all), " ".join(test_list_selection))) if not args.generate: test_list_seedless = [] for t in test_list_selection: corpus_path = os.path.join(args.seed_dir, t) if not os.path.exists(corpus_path) or len(os.listdir(corpus_path)) == 0: test_list_seedless.append(t) test_list_seedless.sort() if test_list_seedless: logging.info( "Fuzzing harnesses lacking a seed corpus: {}".format( " ".join(test_list_seedless) ) ) logging.info("Please consider adding a fuzz seed corpus at https://github.com/bitcoin-core/qa-assets") try: help_output = subprocess.run( args=[ os.path.join(config["environment"]["BUILDDIR"], 'src', 'test', 'fuzz', 'fuzz'), '-help=1', ], env=get_fuzz_env(target=test_list_selection[0], source_dir=config['environment']['SRCDIR']), timeout=20, check=True, stderr=subprocess.PIPE, universal_newlines=True, ).stderr if "libFuzzer" not in help_output: logging.error("Must be built with libFuzzer") sys.exit(1) except subprocess.TimeoutExpired: logging.error("subprocess timed out: Currently only libFuzzer is supported") sys.exit(1) with ThreadPoolExecutor(max_workers=args.par) as fuzz_pool: if args.generate: return generate_corpus_seeds( fuzz_pool=fuzz_pool, src_dir=config['environment']['SRCDIR'], build_dir=config["environment"]["BUILDDIR"], seed_dir=args.seed_dir, targets=test_list_selection, ) if args.m_dir: merge_inputs( fuzz_pool=fuzz_pool, corpus=args.seed_dir, test_list=test_list_selection, src_dir=config['environment']['SRCDIR'], build_dir=config["environment"]["BUILDDIR"], merge_dir=args.m_dir, ) return run_once( fuzz_pool=fuzz_pool, corpus=args.seed_dir, test_list=test_list_selection, src_dir=config['environment']['SRCDIR'], build_dir=config["environment"]["BUILDDIR"], use_valgrind=args.valgrind, ) def generate_corpus_seeds(*, fuzz_pool, src_dir, build_dir, seed_dir, targets): """Generates new corpus seeds. Run {targets} without input, and outputs the generated corpus seeds to {seed_dir}. """ logging.info("Generating corpus seeds to {}".format(seed_dir)) def job(command, t): logging.debug("Running '{}'\n".format(" ".join(command))) logging.debug("Command '{}' output:\n'{}'\n".format( ' '.join(command), subprocess.run( command, env=get_fuzz_env(target=t, source_dir=src_dir), check=True, stderr=subprocess.PIPE, universal_newlines=True, ).stderr)) futures = [] for target in targets: target_seed_dir = os.path.join(seed_dir, target) os.makedirs(target_seed_dir, exist_ok=True) command = [ os.path.join(build_dir, 'src', 'test', 'fuzz', 'fuzz'), "-runs=100000", target_seed_dir, ] futures.append(fuzz_pool.submit(job, command, target)) for future in as_completed(futures): future.result() def merge_inputs(*, fuzz_pool, corpus, test_list, src_dir, build_dir, merge_dir): logging.info("Merge the inputs from the passed dir into the seed_dir. Passed dir {}".format(merge_dir)) jobs = [] for t in test_list: args = [ os.path.join(build_dir, 'src', 'test', 'fuzz', 'fuzz'), '-merge=1', '-shuffle=0', '-prefer_small=1', '-use_value_profile=1', # Also done by oss-fuzz https://github.com/google/oss-fuzz/issues/1406#issuecomment-387790487 os.path.join(corpus, t), os.path.join(merge_dir, t), ] os.makedirs(os.path.join(corpus, t), exist_ok=True) os.makedirs(os.path.join(merge_dir, t), exist_ok=True) def job(t, args): output = 'Run {} with args {}\n'.format(t, " ".join(args)) output += subprocess.run( args, env=get_fuzz_env(target=t, source_dir=src_dir), check=True, stderr=subprocess.PIPE, universal_newlines=True, ).stderr logging.debug(output) jobs.append(fuzz_pool.submit(job, t, args)) for future in as_completed(jobs): future.result() def run_once(*, fuzz_pool, corpus, test_list, src_dir, build_dir, use_valgrind): jobs = [] for t in test_list: corpus_path = os.path.join(corpus, t) os.makedirs(corpus_path, exist_ok=True) args = [<|fim▁hole|> ] if use_valgrind: args = ['valgrind', '--quiet', '--error-exitcode=1'] + args def job(t, args): output = 'Run {} with args {}'.format(t, args) result = subprocess.run( args, env=get_fuzz_env(target=t, source_dir=src_dir), stderr=subprocess.PIPE, universal_newlines=True, ) output += result.stderr return output, result jobs.append(fuzz_pool.submit(job, t, args)) for future in as_completed(jobs): output, result = future.result() logging.debug(output) try: result.check_returncode() except subprocess.CalledProcessError as e: if e.stdout: logging.info(e.stdout) if e.stderr: logging.info(e.stderr) logging.info("Target \"{}\" failed with exit code {}".format(" ".join(result.args), e.returncode)) sys.exit(1) def parse_test_list(*, fuzz_bin): test_list_all = subprocess.run( fuzz_bin, env={ 'PRINT_ALL_FUZZ_TARGETS_AND_ABORT': '' }, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, universal_newlines=True, ).stdout.splitlines() return test_list_all if __name__ == '__main__': main()<|fim▁end|>
os.path.join(build_dir, 'src', 'test', 'fuzz', 'fuzz'), '-runs=1', corpus_path,
<|file_name|>problem_9.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # A Pythagorean triplet is a set of three natural numbers, a < b < c, for which,<|fim▁hole|># There exists exactly one Pythagorean triplet for which a + b + c = 1000. # Find the product abc. def resolve(n): for a in range(1, n / 2 - 2): for b in range(a, n / 2 - 1): c = n - a - b if not c > b: continue if a ** 2 + b ** 2 == c ** 2: return a, b, c return None if __name__ == '__main__': ret = resolve(1000) print ret, reduce(lambda x, y: x * y, ret)<|fim▁end|>
# # a² + b² = c² # For example, 3² + 4² = 9 + 16 = 25 = 52. #
<|file_name|>svn_utils.py<|end_file_name|><|fim▁begin|>import os import re import sys from distutils import log import xml.dom.pulldom import shlex import locale import codecs import unicodedata import warnings from setuptools.compat import unicode from setuptools.py31compat import TemporaryDirectory from xml.sax.saxutils import unescape try: import urlparse except ImportError: import urllib.parse as urlparse from subprocess import Popen as _Popen, PIPE as _PIPE #NOTE: Use of the command line options require SVN 1.3 or newer (December 2005) # and SVN 1.3 hasn't been supported by the developers since mid 2008. #subprocess is called several times with shell=(sys.platform=='win32') #see the follow for more information: # http://bugs.python.org/issue8557 # http://stackoverflow.com/questions/5658622/ # python-subprocess-popen-environment-path def _run_command(args, stdout=_PIPE, stderr=_PIPE, encoding=None, stream=0): #regarding the shell argument, see: http://bugs.python.org/issue8557 try: proc = _Popen(args, stdout=stdout, stderr=stderr, shell=(sys.platform == 'win32')) data = proc.communicate()[stream] except OSError: return 1, '' #doubled checked and data = decode_as_string(data, encoding) #communciate calls wait() return proc.returncode, data def _get_entry_schedule(entry): schedule = entry.getElementsByTagName('schedule')[0] return "".join([t.nodeValue for t in schedule.childNodes if t.nodeType == t.TEXT_NODE]) def _get_target_property(target): property_text = target.getElementsByTagName('property')[0] return "".join([t.nodeValue for t in property_text.childNodes if t.nodeType == t.TEXT_NODE]) def _get_xml_data(decoded_str): if sys.version_info < (3, 0): #old versions want an encoded string data = decoded_str.encode('utf-8') else: data = decoded_str return data def joinpath(prefix, *suffix): if not prefix or prefix == '.': return os.path.join(*suffix) return os.path.join(prefix, *suffix) def determine_console_encoding(): try: #try for the preferred encoding encoding = locale.getpreferredencoding() #see if the locale.getdefaultlocale returns null #some versions of python\platforms return US-ASCII #when it cannot determine an encoding if not encoding or encoding == "US-ASCII": encoding = locale.getdefaultlocale()[1] if encoding: codecs.lookup(encoding) # make sure a lookup error is not made except (locale.Error, LookupError): encoding = None is_osx = sys.platform == "darwin" if not encoding: return ["US-ASCII", "utf-8"][is_osx] elif encoding.startswith("mac-") and is_osx: #certain versions of python would return mac-roman as default #OSX as a left over of earlier mac versions. return "utf-8" else: return encoding _console_encoding = determine_console_encoding() def decode_as_string(text, encoding=None): """ Decode the console or file output explicitly using getpreferredencoding. The text paraemeter should be a encoded string, if not no decode occurs If no encoding is given, getpreferredencoding is used. If encoding is specified, that is used instead. This would be needed for SVN --xml output. Unicode is explicitly put in composed NFC form. --xml should be UTF-8 (SVN Issue 2938) the discussion on the Subversion DEV List from 2007 seems to indicate the same. """ #text should be a byte string if encoding is None: encoding = _console_encoding if not isinstance(text, unicode): text = text.decode(encoding) text = unicodedata.normalize('NFC', text) return text def parse_dir_entries(decoded_str): '''Parse the entries from a recursive info xml''' doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str)) entries = list() for event, node in doc: if event == 'START_ELEMENT' and node.nodeName == 'entry': doc.expandNode(node) if not _get_entry_schedule(node).startswith('delete'): entries.append((node.getAttribute('path'), node.getAttribute('kind'))) return entries[1:] # do not want the root directory def parse_externals_xml(decoded_str, prefix=''): '''Parse a propget svn:externals xml''' prefix = os.path.normpath(prefix) prefix = os.path.normcase(prefix) doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str)) externals = list() for event, node in doc: if event == 'START_ELEMENT' and node.nodeName == 'target': doc.expandNode(node) path = os.path.normpath(node.getAttribute('path')) if os.path.normcase(path).startswith(prefix): path = path[len(prefix)+1:] data = _get_target_property(node) #data should be decoded already for external in parse_external_prop(data): externals.append(joinpath(path, external)) return externals # do not want the root directory def parse_external_prop(lines): """ Parse the value of a retrieved svn:externals entry. possible token setups (with quotng and backscaping in laters versions) URL[@#] EXT_FOLDERNAME [-r#] URL EXT_FOLDERNAME EXT_FOLDERNAME [-r#] URL """ externals = [] for line in lines.splitlines(): line = line.lstrip() # there might be a "\ " if not line: continue <|fim▁hole|> line = shlex.split(line) if sys.version_info < (3, 0): line = [x.decode('utf-8') for x in line] #EXT_FOLDERNAME is either the first or last depending on where #the URL falls if urlparse.urlsplit(line[-1])[0]: external = line[0] else: external = line[-1] external = decode_as_string(external, encoding="utf-8") externals.append(os.path.normpath(external)) return externals def parse_prop_file(filename, key): found = False f = open(filename, 'rt') data = '' try: for line in iter(f.readline, ''): # can't use direct iter! parts = line.split() if len(parts) == 2: kind, length = parts data = f.read(int(length)) if kind == 'K' and data == key: found = True elif kind == 'V' and found: break finally: f.close() return data class SvnInfo(object): ''' Generic svn_info object. No has little knowledge of how to extract information. Use cls.load to instatiate according svn version. Paths are not filesystem encoded. ''' @staticmethod def get_svn_version(): # Temp config directory should be enough to check for repository # This is needed because .svn always creates .subversion and # some operating systems do not handle dot directory correctly. # Real queries in real svn repos with be concerned with it creation with TemporaryDirectory() as tempdir: code, data = _run_command(['svn', '--config-dir', tempdir, '--version', '--quiet']) if code == 0 and data: return data.strip() else: return '' #svnversion return values (previous implementations return max revision) # 4123:4168 mixed revision working copy # 4168M modified working copy # 4123S switched working copy # 4123:4168MS mixed revision, modified, switched working copy revision_re = re.compile(r'(?:([\-0-9]+):)?(\d+)([a-z]*)\s*$', re.I) @classmethod def load(cls, dirname=''): normdir = os.path.normpath(dirname) # Temp config directory should be enough to check for repository # This is needed because .svn always creates .subversion and # some operating systems do not handle dot directory correctly. # Real queries in real svn repos with be concerned with it creation with TemporaryDirectory() as tempdir: code, data = _run_command(['svn', '--config-dir', tempdir, 'info', normdir]) # Must check for some contents, as some use empty directories # in testcases, however only enteries is needed also the info # command above MUST have worked svn_dir = os.path.join(normdir, '.svn') is_svn_wd = (not code or os.path.isfile(os.path.join(svn_dir, 'entries'))) svn_version = tuple(cls.get_svn_version().split('.')) try: base_svn_version = tuple(int(x) for x in svn_version[:2]) except ValueError: base_svn_version = tuple() if not is_svn_wd: #return an instance of this NO-OP class return SvnInfo(dirname) if code or not base_svn_version or base_svn_version < (1, 3): warnings.warn(("No SVN 1.3+ command found: falling back " "on pre 1.7 .svn parsing"), DeprecationWarning) return SvnFileInfo(dirname) if base_svn_version < (1, 5): return Svn13Info(dirname) return Svn15Info(dirname) def __init__(self, path=''): self.path = path self._entries = None self._externals = None def get_revision(self): 'Retrieve the directory revision informatino using svnversion' code, data = _run_command(['svnversion', '-c', self.path]) if code: log.warn("svnversion failed") return 0 parsed = self.revision_re.match(data) if parsed: return int(parsed.group(2)) else: return 0 @property def entries(self): if self._entries is None: self._entries = self.get_entries() return self._entries @property def externals(self): if self._externals is None: self._externals = self.get_externals() return self._externals def iter_externals(self): ''' Iterate over the svn:external references in the repository path. ''' for item in self.externals: yield item def iter_files(self): ''' Iterate over the non-deleted file entries in the repository path ''' for item, kind in self.entries: if kind.lower() == 'file': yield item def iter_dirs(self, include_root=True): ''' Iterate over the non-deleted file entries in the repository path ''' if include_root: yield self.path for item, kind in self.entries: if kind.lower() == 'dir': yield item def get_entries(self): return [] def get_externals(self): return [] class Svn13Info(SvnInfo): def get_entries(self): code, data = _run_command(['svn', 'info', '-R', '--xml', self.path], encoding="utf-8") if code: log.debug("svn info failed") return [] return parse_dir_entries(data) def get_externals(self): #Previous to 1.5 --xml was not supported for svn propget and the -R #output format breaks the shlex compatible semantics. cmd = ['svn', 'propget', 'svn:externals'] result = [] for folder in self.iter_dirs(): code, lines = _run_command(cmd + [folder], encoding="utf-8") if code != 0: log.warn("svn propget failed") return [] #lines should a str for external in parse_external_prop(lines): if folder: external = os.path.join(folder, external) result.append(os.path.normpath(external)) return result class Svn15Info(Svn13Info): def get_externals(self): cmd = ['svn', 'propget', 'svn:externals', self.path, '-R', '--xml'] code, lines = _run_command(cmd, encoding="utf-8") if code: log.debug("svn propget failed") return [] return parse_externals_xml(lines, prefix=os.path.abspath(self.path)) class SvnFileInfo(SvnInfo): def __init__(self, path=''): super(SvnFileInfo, self).__init__(path) self._directories = None self._revision = None def _walk_svn(self, base): entry_file = joinpath(base, '.svn', 'entries') if os.path.isfile(entry_file): entries = SVNEntriesFile.load(base) yield (base, False, entries.parse_revision()) for path in entries.get_undeleted_records(): path = decode_as_string(path) path = joinpath(base, path) if os.path.isfile(path): yield (path, True, None) elif os.path.isdir(path): for item in self._walk_svn(path): yield item def _build_entries(self): entries = list() rev = 0 for path, isfile, dir_rev in self._walk_svn(self.path): if isfile: entries.append((path, 'file')) else: entries.append((path, 'dir')) rev = max(rev, dir_rev) self._entries = entries self._revision = rev def get_entries(self): if self._entries is None: self._build_entries() return self._entries def get_revision(self): if self._revision is None: self._build_entries() return self._revision def get_externals(self): prop_files = [['.svn', 'dir-prop-base'], ['.svn', 'dir-props']] externals = [] for dirname in self.iter_dirs(): prop_file = None for rel_parts in prop_files: filename = joinpath(dirname, *rel_parts) if os.path.isfile(filename): prop_file = filename if prop_file is not None: ext_prop = parse_prop_file(prop_file, 'svn:externals') #ext_prop should be utf-8 coming from svn:externals ext_prop = decode_as_string(ext_prop, encoding="utf-8") externals.extend(parse_external_prop(ext_prop)) return externals def svn_finder(dirname=''): #combined externals due to common interface #combined externals and entries due to lack of dir_props in 1.7 info = SvnInfo.load(dirname) for path in info.iter_files(): yield path for path in info.iter_externals(): sub_info = SvnInfo.load(path) for sub_path in sub_info.iter_files(): yield sub_path class SVNEntriesFile(object): def __init__(self, data): self.data = data @classmethod def load(class_, base): filename = os.path.join(base, '.svn', 'entries') f = open(filename) try: result = SVNEntriesFile.read(f) finally: f.close() return result @classmethod def read(class_, fileobj): data = fileobj.read() is_xml = data.startswith('<?xml') class_ = [SVNEntriesFileText, SVNEntriesFileXML][is_xml] return class_(data) def parse_revision(self): all_revs = self.parse_revision_numbers() + [0] return max(all_revs) class SVNEntriesFileText(SVNEntriesFile): known_svn_versions = { '1.4.x': 8, '1.5.x': 9, '1.6.x': 10, } def __get_cached_sections(self): return self.sections def get_sections(self): SECTION_DIVIDER = '\f\n' sections = self.data.split(SECTION_DIVIDER) sections = [x for x in map(str.splitlines, sections)] try: # remove the SVN version number from the first line svn_version = int(sections[0].pop(0)) if not svn_version in self.known_svn_versions.values(): log.warn("Unknown subversion verson %d", svn_version) except ValueError: return self.sections = sections self.get_sections = self.__get_cached_sections return self.sections def is_valid(self): return bool(self.get_sections()) def get_url(self): return self.get_sections()[0][4] def parse_revision_numbers(self): revision_line_number = 9 rev_numbers = [ int(section[revision_line_number]) for section in self.get_sections() if (len(section) > revision_line_number and section[revision_line_number]) ] return rev_numbers def get_undeleted_records(self): undeleted = lambda s: s and s[0] and (len(s) < 6 or s[5] != 'delete') result = [ section[0] for section in self.get_sections() if undeleted(section) ] return result class SVNEntriesFileXML(SVNEntriesFile): def is_valid(self): return True def get_url(self): "Get repository URL" urlre = re.compile('url="([^"]+)"') return urlre.search(self.data).group(1) def parse_revision_numbers(self): revre = re.compile(r'committed-rev="(\d+)"') return [ int(m.group(1)) for m in revre.finditer(self.data) ] def get_undeleted_records(self): entries_pattern = \ re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I) results = [ unescape(match.group(1)) for match in entries_pattern.finditer(self.data) ] return results if __name__ == '__main__': for name in svn_finder(sys.argv[1]): print(name)<|fim▁end|>
if sys.version_info < (3, 0): #shlex handles NULLs just fine and shlex in 2.7 tries to encode #as ascii automatiically line = line.encode('utf-8')
<|file_name|>collections.py<|end_file_name|><|fim▁begin|>import logging from datetime import datetime from collections import defaultdict from servicelayer.jobs import Job from aleph.core import db, cache from aleph.authz import Authz from aleph.queues import cancel_queue, ingest_entity, get_status from aleph.model import Collection, Entity, Document, Mapping from aleph.model import Permission, Events, EntitySet from aleph.index import collections as index from aleph.index import xref as xref_index from aleph.index import entities as entities_index from aleph.logic.notifications import publish, flush_notifications from aleph.logic.documents import ingest_flush, MODEL_ORIGIN from aleph.logic.aggregator import get_aggregator log = logging.getLogger(__name__) def create_collection(data, authz, sync=False): now = datetime.utcnow() collection = Collection.create(data, authz, created_at=now) if collection.created_at == now: publish( Events.CREATE_COLLECTION, params={"collection": collection}, channels=[collection, authz.role], actor_id=authz.id, ) db.session.commit() return update_collection(collection, sync=sync) def update_collection(collection, sync=False): """Update a collection and re-index.""" Authz.flush() refresh_collection(collection.id) return index.index_collection(collection, sync=sync) def refresh_collection(collection_id): """Operations to execute after updating a collection-related domain object. This will refresh stats and flush cache.""" cache.kv.delete( cache.object_key(Collection, collection_id), cache.object_key(Collection, collection_id, "stats"), ) def get_deep_collection(collection): mappings = Mapping.by_collection(collection.id).count() entitysets = EntitySet.type_counts(collection_id=collection.id) return { "statistics": index.get_collection_stats(collection.id), "counts": {"mappings": mappings, "entitysets": entitysets}, "status": get_status(collection), "shallow": False, } def compute_collections(): """Update collection caches, including the global stats cache.""" authz = Authz.from_role(None) schemata = defaultdict(int) countries = defaultdict(int) categories = defaultdict(int) for collection in Collection.all(): compute_collection(collection) if authz.can(collection.id, authz.READ): categories[collection.category] += 1 things = index.get_collection_things(collection.id) for schema, count in things.items(): schemata[schema] += count for country in collection.countries: countries[country] += 1 log.info("Updating global statistics cache...") data = { "collections": sum(categories.values()), "schemata": dict(schemata), "countries": dict(countries), "categories": dict(categories), "things": sum(schemata.values()), } key = cache.key(cache.STATISTICS) cache.set_complex(key, data, expires=cache.EXPIRE) def compute_collection(collection, force=False, sync=False): key = cache.object_key(Collection, collection.id, "stats") if cache.get(key) is not None and not force: return refresh_collection(collection.id) log.info("[%s] Computing statistics...", collection) index.update_collection_stats(collection.id) cache.set(key, datetime.utcnow().isoformat()) index.index_collection(collection, sync=sync)<|fim▁hole|> """Sync up the aggregator from the Aleph domain model.""" log.debug("[%s] Aggregating model...", collection) aggregator.delete(origin=MODEL_ORIGIN) writer = aggregator.bulk() for document in Document.by_collection(collection.id): proxy = document.to_proxy(ns=collection.ns) writer.put(proxy, fragment="db", origin=MODEL_ORIGIN) for entity in Entity.by_collection(collection.id): proxy = entity.to_proxy() aggregator.delete(entity_id=proxy.id) writer.put(proxy, fragment="db", origin=MODEL_ORIGIN) writer.flush() def index_aggregator( collection, aggregator, entity_ids=None, skip_errors=False, sync=False ): def _generate(): idx = 0 entities = aggregator.iterate(entity_id=entity_ids, skip_errors=skip_errors) for idx, proxy in enumerate(entities, 1): if idx > 0 and idx % 1000 == 0: log.debug("[%s] Index: %s...", collection, idx) yield proxy log.debug("[%s] Indexed %s entities", collection, idx) entities_index.index_bulk(collection, _generate(), sync=sync) def reingest_collection(collection, job_id=None, index=False, flush=True): """Trigger a re-ingest for all documents in the collection.""" job_id = job_id or Job.random_id() if flush: ingest_flush(collection) for document in Document.by_collection(collection.id): proxy = document.to_proxy(ns=collection.ns) ingest_entity(collection, proxy, job_id=job_id, index=index) def reindex_collection(collection, skip_errors=True, sync=False, flush=False): """Re-index all entities from the model, mappings and aggregator cache.""" from aleph.logic.mapping import map_to_aggregator from aleph.logic.profiles import profile_fragments aggregator = get_aggregator(collection) for mapping in collection.mappings: if mapping.disabled: log.debug("[%s] Skip mapping: %r", collection, mapping) continue try: map_to_aggregator(collection, mapping, aggregator) except Exception: # More or less ignore broken models. log.exception("Failed mapping: %r", mapping) aggregate_model(collection, aggregator) profile_fragments(collection, aggregator) if flush: log.debug("[%s] Flushing...", collection) index.delete_entities(collection.id, sync=True) index_aggregator(collection, aggregator, skip_errors=skip_errors, sync=sync) compute_collection(collection, force=True) def delete_collection(collection, keep_metadata=False, sync=False): deleted_at = collection.deleted_at or datetime.utcnow() cancel_queue(collection) aggregator = get_aggregator(collection) aggregator.delete() flush_notifications(collection, sync=sync) index.delete_entities(collection.id, sync=sync) xref_index.delete_xref(collection, sync=sync) Mapping.delete_by_collection(collection.id) EntitySet.delete_by_collection(collection.id, deleted_at) Entity.delete_by_collection(collection.id) Document.delete_by_collection(collection.id) if not keep_metadata: Permission.delete_by_collection(collection.id) collection.delete(deleted_at=deleted_at) db.session.commit() if not keep_metadata: index.delete_collection(collection.id, sync=True) aggregator.drop() refresh_collection(collection.id) Authz.flush() def upgrade_collections(): for collection in Collection.all(deleted=True): if collection.deleted_at is not None: delete_collection(collection, keep_metadata=True, sync=True) else: compute_collection(collection, force=True) # update global cache: compute_collections()<|fim▁end|>
def aggregate_model(collection, aggregator):
<|file_name|>hello.py<|end_file_name|><|fim▁begin|>from helper import greeting<|fim▁hole|><|fim▁end|>
if "__name__" == "__main__": greeting('hello')
<|file_name|>DistrhoPluginLV2export.cpp<|end_file_name|><|fim▁begin|>/* * DISTRHO Plugin Framework (DPF) * Copyright (C) 2012-2014 Filipe Coelho <[email protected]> * * Permission to use, copy, modify, and/or distribute this software for any purpose with * or without fee is hereby granted, provided that the above copyright notice and this * permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD * TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN * NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER * IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include "DistrhoPluginInternal.hpp" #include "lv2/atom.h" #include "lv2/buf-size.h" #include "lv2/data-access.h" #include "lv2/instance-access.h" #include "lv2/midi.h" #include "lv2/options.h" #include "lv2/port-props.h" #include "lv2/resize-port.h" #include "lv2/state.h" #include "lv2/time.h" #include "lv2/ui.h" #include "lv2/units.h" #include "lv2/urid.h" #include "lv2/worker.h" #include "lv2/lv2_kxstudio_properties.h" #include "lv2/lv2_programs.h" #include <fstream> #include <iostream> #ifndef DISTRHO_PLUGIN_URI # error DISTRHO_PLUGIN_URI undefined! #endif #ifndef DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE # define DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE 2048 #endif #define DISTRHO_LV2_USE_EVENTS_IN (DISTRHO_PLUGIN_HAS_MIDI_INPUT || DISTRHO_PLUGIN_WANT_TIMEPOS || (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI)) #define DISTRHO_LV2_USE_EVENTS_OUT (DISTRHO_PLUGIN_HAS_MIDI_OUTPUT || (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI)) // ----------------------------------------------------------------------- DISTRHO_PLUGIN_EXPORT void lv2_generate_ttl(const char* const basename) { USE_NAMESPACE_DISTRHO // Dummy plugin to get data from d_lastBufferSize = 512; d_lastSampleRate = 44100.0; PluginExporter plugin; d_lastBufferSize = 0; d_lastSampleRate = 0.0; d_string pluginDLL(basename); d_string pluginTTL(pluginDLL + ".ttl"); // --------------------------------------------- { std::cout << "Writing manifest.ttl..."; std::cout.flush(); std::fstream manifestFile("manifest.ttl", std::ios::out); d_string manifestString; manifestString += "@prefix lv2: <" LV2_CORE_PREFIX "> .\n"; manifestString += "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n"; #if DISTRHO_PLUGIN_HAS_UI manifestString += "@prefix ui: <" LV2_UI_PREFIX "> .\n"; #endif manifestString += "\n"; manifestString += "<" DISTRHO_PLUGIN_URI ">\n"; manifestString += " a lv2:Plugin ;\n"; manifestString += " lv2:binary <" + pluginDLL + "." DISTRHO_DLL_EXTENSION "> ;\n"; manifestString += " rdfs:seeAlso <" + pluginTTL + "> .\n"; manifestString += "\n"; #if DISTRHO_PLUGIN_HAS_UI manifestString += "<" DISTRHO_UI_URI ">\n"; # if DISTRHO_OS_HAIKU manifestString += " a ui:BeUI ;\n"; # elif DISTRHO_OS_MAC manifestString += " a ui:CocoaUI ;\n"; # elif DISTRHO_OS_WINDOWS manifestString += " a ui:WindowsUI ;\n"; # else manifestString += " a ui:X11UI ;\n"; # endif # if ! DISTRHO_PLUGIN_WANT_DIRECT_ACCESS d_string pluginUI(pluginDLL); pluginUI.truncate(pluginDLL.rfind("_dsp")); pluginUI += "_ui"; manifestString += " ui:binary <" + pluginUI + "." DISTRHO_DLL_EXTENSION "> ;\n"; # else manifestString += " ui:binary <" + pluginDLL + "." DISTRHO_DLL_EXTENSION "> ;\n"; #endif manifestString += " lv2:extensionData ui:idleInterface ,\n"; # if DISTRHO_PLUGIN_WANT_PROGRAMS manifestString += " ui:showInterface ,\n"; manifestString += " <" LV2_PROGRAMS__Interface "> ;\n"; # else manifestString += " ui:showInterface ;\n"; # endif manifestString += " lv2:optionalFeature ui:noUserResize ,\n"; manifestString += " ui:resize ,\n"; manifestString += " ui:touch ;\n"; # if DISTRHO_PLUGIN_WANT_DIRECT_ACCESS manifestString += " lv2:requiredFeature <" LV2_DATA_ACCESS_URI "> ,\n"; manifestString += " <" LV2_INSTANCE_ACCESS_URI "> ,\n"; manifestString += " <" LV2_OPTIONS__options "> ,\n"; # else manifestString += " lv2:requiredFeature <" LV2_OPTIONS__options "> ,\n"; # endif manifestString += " <" LV2_URID__map "> .\n"; #endif manifestFile << manifestString << std::endl; manifestFile.close(); std::cout << " done!" << std::endl; } // --------------------------------------------- { std::cout << "Writing " << pluginTTL << "..."; std::cout.flush(); std::fstream pluginFile(pluginTTL, std::ios::out); d_string pluginString; // header #if DISTRHO_LV2_USE_EVENTS_IN pluginString += "@prefix atom: <" LV2_ATOM_PREFIX "> .\n"; #endif pluginString += "@prefix doap: <http://usefulinc.com/ns/doap#> .\n"; pluginString += "@prefix foaf: <http://xmlns.com/foaf/0.1/> .\n"; pluginString += "@prefix lv2: <" LV2_CORE_PREFIX "> .\n"; pluginString += "@prefix rsz: <" LV2_RESIZE_PORT_PREFIX "> .\n"; #if DISTRHO_PLUGIN_HAS_UI pluginString += "@prefix ui: <" LV2_UI_PREFIX "> .\n"; #endif pluginString += "@prefix unit: <" LV2_UNITS_PREFIX "> .\n"; pluginString += "\n"; // plugin pluginString += "<" DISTRHO_PLUGIN_URI ">\n"; #if DISTRHO_PLUGIN_IS_SYNTH pluginString += " a lv2:InstrumentPlugin, lv2:Plugin ;\n"; #else pluginString += " a lv2:Plugin ;\n"; #endif pluginString += "\n"; // extensionData pluginString += " lv2:extensionData <" LV2_STATE__interface "> "; #if DISTRHO_PLUGIN_WANT_STATE pluginString += ",\n <" LV2_OPTIONS__interface "> "; pluginString += ",\n <" LV2_WORKER__interface "> ";<|fim▁hole|>#if DISTRHO_PLUGIN_WANT_PROGRAMS pluginString += ",\n <" LV2_PROGRAMS__Interface "> "; #endif pluginString += ";\n\n"; // optionalFeatures #if DISTRHO_PLUGIN_IS_RT_SAFE pluginString += " lv2:optionalFeature <" LV2_CORE__hardRTCapable "> ,\n"; pluginString += " <" LV2_BUF_SIZE__boundedBlockLength "> ;\n"; #else pluginString += " lv2:optionalFeature <" LV2_BUF_SIZE__boundedBlockLength "> ;\n"; #endif pluginString += "\n"; // requiredFeatures pluginString += " lv2:requiredFeature <" LV2_OPTIONS__options "> "; pluginString += ",\n <" LV2_URID__map "> "; #if DISTRHO_PLUGIN_WANT_STATE pluginString += ",\n <" LV2_WORKER__schedule "> "; #endif pluginString += ";\n\n"; // UI #if DISTRHO_PLUGIN_HAS_UI pluginString += " ui:ui <" DISTRHO_UI_URI "> ;\n"; pluginString += "\n"; #endif { uint32_t portIndex = 0; #if DISTRHO_PLUGIN_NUM_INPUTS > 0 for (uint32_t i=0; i < DISTRHO_PLUGIN_NUM_INPUTS; ++i, ++portIndex) { if (i == 0) pluginString += " lv2:port [\n"; else pluginString += " [\n"; pluginString += " a lv2:InputPort, lv2:AudioPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:symbol \"lv2_audio_in_" + d_string(i+1) + "\" ;\n"; pluginString += " lv2:name \"Audio Input " + d_string(i+1) + "\" ;\n"; if (i+1 == DISTRHO_PLUGIN_NUM_INPUTS) pluginString += " ] ;\n\n"; else pluginString += " ] ,\n"; } pluginString += "\n"; #endif #if DISTRHO_PLUGIN_NUM_OUTPUTS > 0 for (uint32_t i=0; i < DISTRHO_PLUGIN_NUM_OUTPUTS; ++i, ++portIndex) { if (i == 0) pluginString += " lv2:port [\n"; else pluginString += " [\n"; pluginString += " a lv2:OutputPort, lv2:AudioPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:symbol \"lv2_audio_out_" + d_string(i+1) + "\" ;\n"; pluginString += " lv2:name \"Audio Output " + d_string(i+1) + "\" ;\n"; if (i+1 == DISTRHO_PLUGIN_NUM_OUTPUTS) pluginString += " ] ;\n\n"; else pluginString += " ] ,\n"; } pluginString += "\n"; #endif #if DISTRHO_LV2_USE_EVENTS_IN pluginString += " lv2:port [\n"; pluginString += " a lv2:InputPort, atom:AtomPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:name \"Events Input\" ;\n"; pluginString += " lv2:symbol \"lv2_events_in\" ;\n"; pluginString += " rsz:minimumSize " + d_string(DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE) + " ;\n"; pluginString += " atom:bufferType atom:Sequence ;\n"; # if (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI) pluginString += " atom:supports <" LV2_ATOM__String "> ;\n"; # endif # if DISTRHO_PLUGIN_HAS_MIDI_INPUT pluginString += " atom:supports <" LV2_MIDI__MidiEvent "> ;\n"; # endif # if DISTRHO_PLUGIN_WANT_TIMEPOS pluginString += " atom:supports <" LV2_TIME__Position "> ;\n"; # endif pluginString += " ] ;\n\n"; ++portIndex; #endif #if DISTRHO_LV2_USE_EVENTS_OUT pluginString += " lv2:port [\n"; pluginString += " a lv2:OutputPort, atom:AtomPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:name \"Events Output\" ;\n"; pluginString += " lv2:symbol \"lv2_events_out\" ;\n"; pluginString += " rsz:minimumSize " + d_string(DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE) + " ;\n"; pluginString += " atom:bufferType atom:Sequence ;\n"; # if (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI) pluginString += " atom:supports <" LV2_ATOM__String "> ;\n"; # endif # if DISTRHO_PLUGIN_HAS_MIDI_OUTPUT pluginString += " atom:supports <" LV2_MIDI__MidiEvent "> ;\n"; # endif pluginString += " ] ;\n\n"; ++portIndex; #endif #if DISTRHO_PLUGIN_WANT_LATENCY pluginString += " lv2:port [\n"; pluginString += " a lv2:OutputPort, lv2:ControlPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:name \"Latency\" ;\n"; pluginString += " lv2:symbol \"lv2_latency\" ;\n"; pluginString += " lv2:designation lv2:latency ;\n"; pluginString += " lv2:portProperty lv2:reportsLatency, lv2:integer ;\n"; pluginString += " ] ;\n\n"; ++portIndex; #endif for (uint32_t i=0, count=plugin.getParameterCount(); i < count; ++i, ++portIndex) { if (i == 0) pluginString += " lv2:port [\n"; else pluginString += " [\n"; if (plugin.isParameterOutput(i)) pluginString += " a lv2:OutputPort, lv2:ControlPort ;\n"; else pluginString += " a lv2:InputPort, lv2:ControlPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:name \"" + plugin.getParameterName(i) + "\" ;\n"; // symbol { d_string symbol(plugin.getParameterSymbol(i)); if (symbol.isEmpty()) symbol = "lv2_port_" + d_string(portIndex-1); pluginString += " lv2:symbol \"" + symbol + "\" ;\n"; } // ranges { const ParameterRanges& ranges(plugin.getParameterRanges(i)); if (plugin.getParameterHints(i) & kParameterIsInteger) { pluginString += " lv2:default " + d_string(int(plugin.getParameterValue(i))) + " ;\n"; pluginString += " lv2:minimum " + d_string(int(ranges.min)) + " ;\n"; pluginString += " lv2:maximum " + d_string(int(ranges.max)) + " ;\n"; } else { pluginString += " lv2:default " + d_string(plugin.getParameterValue(i)) + " ;\n"; pluginString += " lv2:minimum " + d_string(ranges.min) + " ;\n"; pluginString += " lv2:maximum " + d_string(ranges.max) + " ;\n"; } } // unit { const d_string& unit(plugin.getParameterUnit(i)); if (! unit.isEmpty()) { if (unit == "db" || unit == "dB") { pluginString += " unit:unit unit:db ;\n"; } else if (unit == "hz" || unit == "Hz") { pluginString += " unit:unit unit:hz ;\n"; } else if (unit == "khz" || unit == "kHz") { pluginString += " unit:unit unit:khz ;\n"; } else if (unit == "mhz" || unit == "mHz") { pluginString += " unit:unit unit:mhz ;\n"; } else if (unit == "%") { pluginString += " unit:unit unit:pc ;\n"; } else { pluginString += " unit:unit [\n"; pluginString += " a unit:Unit ;\n"; pluginString += " unit:name \"" + unit + "\" ;\n"; pluginString += " unit:symbol \"" + unit + "\" ;\n"; pluginString += " unit:render \"%f " + unit + "\" ;\n"; pluginString += " ] ;\n"; } } } // hints { const uint32_t hints(plugin.getParameterHints(i)); if (hints & kParameterIsBoolean) pluginString += " lv2:portProperty lv2:toggled ;\n"; if (hints & kParameterIsInteger) pluginString += " lv2:portProperty lv2:integer ;\n"; if (hints & kParameterIsLogarithmic) pluginString += " lv2:portProperty <" LV2_PORT_PROPS__logarithmic "> ;\n"; if ((hints & kParameterIsAutomable) == 0 && ! plugin.isParameterOutput(i)) { pluginString += " lv2:portProperty <" LV2_PORT_PROPS__expensive "> ,\n"; pluginString += " <" LV2_KXSTUDIO_PROPERTIES__NonAutomable "> ;\n"; } } if (i+1 == count) pluginString += " ] ;\n\n"; else pluginString += " ] ,\n"; } } pluginString += " doap:name \"" + d_string(plugin.getName()) + "\" ;\n"; pluginString += " doap:maintainer [ foaf:name \"" + d_string(plugin.getMaker()) + "\" ] .\n"; pluginFile << pluginString << std::endl; pluginFile.close(); std::cout << " done!" << std::endl; } }<|fim▁end|>
#endif
<|file_name|>daterangepicker.js<|end_file_name|><|fim▁begin|>/** * @version: 1.0.1 * @author: Dan Grossman http://www.dangrossman.info/ * @date: 2012-08-20 * @copyright: Copyright (c) 2012 Dan Grossman. All rights reserved. * @license: Licensed under Apache License v2.0. See http://www.apache.org/licenses/LICENSE-2.0 * @website: http://www.improvely.com/ */ !function ($) { <|fim▁hole|> var DateRangePicker = function (element, options, cb) { var hasOptions = typeof options == 'object' var localeObject; //state this.startDate = Date.today(); this.endDate = Date.today(); this.minDate = false; this.maxDate = false; this.changed = false; this.ranges = {}; this.opens = 'right'; this.cb = function () { }; this.format = 'MM/dd/yyyy'; this.separator = ' - '; this.showWeekNumbers = false; this.buttonClasses = ['btn-primary']; this.locale = { applyLabel: 'Apply', fromLabel: 'From', toLabel: 'To', weekLabel: 'W', customRangeLabel: 'Custom Range', daysOfWeek: Date.CultureInfo.shortestDayNames, monthNames: Date.CultureInfo.monthNames, firstDay: 0 }; localeObject = this.locale; this.leftCalendar = { month: Date.today().set({ day: 1, month: this.startDate.getMonth(), year: this.startDate.getFullYear() }), calendar: Array() }; this.rightCalendar = { month: Date.today().set({ day: 1, month: this.endDate.getMonth(), year: this.endDate.getFullYear() }), calendar: Array() }; // by default, the daterangepicker element is placed at the bottom of HTML body this.parentEl = 'body'; //element that triggered the date range picker this.element = $(element); if (this.element.hasClass('pull-right')) this.opens = 'left'; if (this.element.is('input')) { this.element.on({ click: $.proxy(this.show, this), focus: $.proxy(this.show, this) }); } else { this.element.on('click', $.proxy(this.show, this)); } if (hasOptions) { if(typeof options.locale == 'object') { $.each(localeObject, function (property, value) { localeObject[property] = options.locale[property] || value; }); } } var DRPTemplate = '<div class="daterangepicker dropdown-menu">' + '<div class="calendar left"></div>' + '<div class="calendar right"></div>' + '<div class="ranges">' + '<div class="range_inputs">' + '<div>' + '<label for="daterangepicker_start">' + this.locale.fromLabel + '</label>' + '<input class="input-mini form-control" type="text" name="daterangepicker_start" value="" disabled="disabled" />' + '</div>' + '<div>' + '<label for="daterangepicker_end">' + this.locale.toLabel + '</label>' + '<input class="input-mini form-control" type="text" name="daterangepicker_end" value="" disabled="disabled" />' + '</div>' + '<button class="btn btn-small" disabled="disabled">' + this.locale.applyLabel + '</button>' + '</div>' + '</div>' + '</div>'; this.parentEl = (hasOptions && options.parentEl && $(options.parentEl)) || $(this.parentEl); //the date range picker this.container = $(DRPTemplate).appendTo(this.parentEl); if (hasOptions) { if (typeof options.format == 'string') this.format = options.format; if (typeof options.separator == 'string') this.separator = options.separator; if (typeof options.startDate == 'string') this.startDate = Date.parse(options.startDate, this.format); if (typeof options.endDate == 'string') this.endDate = Date.parse(options.endDate, this.format); if (typeof options.minDate == 'string') this.minDate = Date.parse(options.minDate, this.format); if (typeof options.maxDate == 'string') this.maxDate = Date.parse(options.maxDate, this.format); if (typeof options.startDate == 'object') this.startDate = options.startDate; if (typeof options.endDate == 'object') this.endDate = options.endDate; if (typeof options.minDate == 'object') this.minDate = options.minDate; if (typeof options.maxDate == 'object') this.maxDate = options.maxDate; if (typeof options.ranges == 'object') { for (var range in options.ranges) { var start = options.ranges[range][0]; var end = options.ranges[range][1]; if (typeof start == 'string') start = Date.parse(start); if (typeof end == 'string') end = Date.parse(end); // If we have a min/max date set, bound this range // to it, but only if it would otherwise fall // outside of the min/max. if (this.minDate && start < this.minDate) start = this.minDate; if (this.maxDate && end > this.maxDate) end = this.maxDate; // If the end of the range is before the minimum (if min is set) OR // the start of the range is after the max (also if set) don't display this // range option. if ((this.minDate && end < this.minDate) || (this.maxDate && start > this.maxDate)) { continue; } this.ranges[range] = [start, end]; } var list = '<ul>'; for (var range in this.ranges) { list += '<li>' + range + '</li>'; } list += '<li>' + this.locale.customRangeLabel + '</li>'; list += '</ul>'; this.container.find('.ranges').prepend(list); } // update day names order to firstDay if (typeof options.locale == 'object') { if (typeof options.locale.firstDay == 'number') { this.locale.firstDay = options.locale.firstDay; var iterator = options.locale.firstDay; while (iterator > 0) { this.locale.daysOfWeek.push(this.locale.daysOfWeek.shift()); iterator--; } } } if (typeof options.opens == 'string') this.opens = options.opens; if (typeof options.showWeekNumbers == 'boolean') { this.showWeekNumbers = options.showWeekNumbers; } if (typeof options.buttonClasses == 'string') { this.buttonClasses = [options.buttonClasses]; } if (typeof options.buttonClasses == 'object') { this.buttonClasses = options.buttonClasses; } } //apply CSS classes to buttons var c = this.container; $.each(this.buttonClasses, function (idx, val) { c.find('button').addClass(val); }); if (this.opens == 'right') { //swap calendar positions var left = this.container.find('.calendar.left'); var right = this.container.find('.calendar.right'); left.removeClass('left').addClass('right'); right.removeClass('right').addClass('left'); } if (typeof options == 'undefined' || typeof options.ranges == 'undefined') this.container.find('.calendar').show(); if (typeof cb == 'function') this.cb = cb; this.container.addClass('opens' + this.opens); //event listeners this.container.on('mousedown', $.proxy(this.mousedown, this)); this.container.find('.calendar').on('click', '.prev', $.proxy(this.clickPrev, this)); this.container.find('.calendar').on('click', '.next', $.proxy(this.clickNext, this)); this.container.find('.ranges').on('click', 'button', $.proxy(this.clickApply, this)); this.container.find('.calendar').on('click', 'td.available', $.proxy(this.clickDate, this)); this.container.find('.calendar').on('mouseenter', 'td.available', $.proxy(this.enterDate, this)); this.container.find('.calendar').on('mouseleave', 'td.available', $.proxy(this.updateView, this)); this.container.find('.ranges').on('click', 'li', $.proxy(this.clickRange, this)); this.container.find('.ranges').on('mouseenter', 'li', $.proxy(this.enterRange, this)); this.container.find('.ranges').on('mouseleave', 'li', $.proxy(this.updateView, this)); this.element.on('keyup', $.proxy(this.updateFromControl, this)); this.updateView(); this.updateCalendars(); }; DateRangePicker.prototype = { constructor: DateRangePicker, mousedown: function (e) { e.stopPropagation(); e.preventDefault(); }, updateView: function () { this.leftCalendar.month.set({ month: this.startDate.getMonth(), year: this.startDate.getFullYear() }); this.rightCalendar.month.set({ month: this.endDate.getMonth(), year: this.endDate.getFullYear() }); this.container.find('input[name=daterangepicker_start]').val(this.startDate.toString(this.format)); this.container.find('input[name=daterangepicker_end]').val(this.endDate.toString(this.format)); if (this.startDate.equals(this.endDate) || this.startDate.isBefore(this.endDate)) { this.container.find('button').removeAttr('disabled'); } else { this.container.find('button').attr('disabled', 'disabled'); } }, updateFromControl: function () { if (!this.element.is('input')) return; var dateString = this.element.val().split(this.separator); var start = Date.parseExact(dateString[0], this.format); var end = Date.parseExact(dateString[1], this.format); if (start == null || end == null) return; if (end.isBefore(start)) return; this.startDate = start; this.endDate = end; this.updateView(); this.cb(this.startDate, this.endDate); this.updateCalendars(); }, notify: function () { this.updateView(); if (this.element.is('input')) { this.element.val(this.startDate.toString(this.format) + this.separator + this.endDate.toString(this.format)); } this.cb(this.startDate, this.endDate); }, move: function () { var parentOffset = { top: this.parentEl.offset().top - this.parentEl.scrollTop(), left: this.parentEl.offset().left - this.parentEl.scrollLeft() }; if (this.opens == 'left') { this.container.css({ top: this.element.offset().top + this.element.outerHeight(), right: $(window).width() - this.element.offset().left - this.element.outerWidth() - parentOffset.left, left: 'auto' }); } else { this.container.css({ top: this.element.offset().top + this.element.outerHeight(), left: this.element.offset().left - parentOffset.left, right: 'auto' }); } }, show: function (e) { this.container.show(); this.move(); if (e) { e.stopPropagation(); e.preventDefault(); } this.changed = false; $(document).on('mousedown', $.proxy(this.hide, this)); }, hide: function (e) { this.container.hide(); $(document).off('mousedown', this.hide); if (this.changed) { this.changed = false; this.notify(); } }, enterRange: function (e) { var label = e.target.innerHTML; if (label == this.locale.customRangeLabel) { this.updateView(); } else { var dates = this.ranges[label]; this.container.find('input[name=daterangepicker_start]').val(dates[0].toString(this.format)); this.container.find('input[name=daterangepicker_end]').val(dates[1].toString(this.format)); } }, clickRange: function (e) { var label = e.target.innerHTML; if (label == this.locale.customRangeLabel) { this.container.find('.calendar').show(); } else { var dates = this.ranges[label]; this.startDate = dates[0]; this.endDate = dates[1]; this.leftCalendar.month.set({ month: this.startDate.getMonth(), year: this.startDate.getFullYear() }); this.rightCalendar.month.set({ month: this.endDate.getMonth(), year: this.endDate.getFullYear() }); this.updateCalendars(); this.changed = true; this.container.find('.calendar').hide(); this.hide(); } }, clickPrev: function (e) { var cal = $(e.target).parents('.calendar'); if (cal.hasClass('left')) { this.leftCalendar.month.add({ months: -1 }); } else { this.rightCalendar.month.add({ months: -1 }); } this.updateCalendars(); }, clickNext: function (e) { var cal = $(e.target).parents('.calendar'); if (cal.hasClass('left')) { this.leftCalendar.month.add({ months: 1 }); } else { this.rightCalendar.month.add({ months: 1 }); } this.updateCalendars(); }, enterDate: function (e) { var title = $(e.target).attr('title'); var row = title.substr(1, 1); var col = title.substr(3, 1); var cal = $(e.target).parents('.calendar'); if (cal.hasClass('left')) { this.container.find('input[name=daterangepicker_start]').val(this.leftCalendar.calendar[row][col].toString(this.format)); } else { this.container.find('input[name=daterangepicker_end]').val(this.rightCalendar.calendar[row][col].toString(this.format)); } }, clickDate: function (e) { var title = $(e.target).attr('title'); var row = title.substr(1, 1); var col = title.substr(3, 1); var cal = $(e.target).parents('.calendar'); if (cal.hasClass('left')) { startDate = this.leftCalendar.calendar[row][col]; endDate = this.endDate; } else { startDate = this.startDate; endDate = this.rightCalendar.calendar[row][col]; } cal.find('td').removeClass('active'); if (startDate.equals(endDate) || startDate.isBefore(endDate)) { $(e.target).addClass('active'); if (!startDate.equals(this.startDate) || !endDate.equals(this.endDate)) this.changed = true; this.startDate = startDate; this.endDate = endDate; } this.leftCalendar.month.set({ month: this.startDate.getMonth(), year: this.startDate.getFullYear() }); this.rightCalendar.month.set({ month: this.endDate.getMonth(), year: this.endDate.getFullYear() }); this.updateCalendars(); }, clickApply: function (e) { this.hide(); }, updateCalendars: function () { this.leftCalendar.calendar = this.buildCalendar(this.leftCalendar.month.getMonth(), this.leftCalendar.month.getFullYear()); this.rightCalendar.calendar = this.buildCalendar(this.rightCalendar.month.getMonth(), this.rightCalendar.month.getFullYear()); this.container.find('.calendar.left').html(this.renderCalendar(this.leftCalendar.calendar, this.startDate, this.minDate, this.endDate)); this.container.find('.calendar.right').html(this.renderCalendar(this.rightCalendar.calendar, this.endDate, this.startDate, this.maxDate)); }, buildCalendar: function (month, year) { var firstDay = Date.today().set({ day: 1, month: month, year: year }); var lastMonth = firstDay.clone().add(-1).day().getMonth(); var lastYear = firstDay.clone().add(-1).day().getFullYear(); var daysInMonth = Date.getDaysInMonth(year, month); var daysInLastMonth = Date.getDaysInMonth(lastYear, lastMonth); var dayOfWeek = firstDay.getDay(); //initialize a 6 rows x 7 columns array for the calendar var calendar = Array(); for (var i = 0; i < 6; i++) { calendar[i] = Array(); } //populate the calendar with date objects var startDay = daysInLastMonth - dayOfWeek + this.locale.firstDay + 1; if (startDay > daysInLastMonth) startDay -= 7; if (dayOfWeek == this.locale.firstDay) startDay = daysInLastMonth - 6; var curDate = Date.today().set({ day: startDay, month: lastMonth, year: lastYear }); for (var i = 0, col = 0, row = 0; i < 42; i++, col++, curDate = curDate.clone().add(1).day()) { if (i > 0 && col % 7 == 0) { col = 0; row++; } calendar[row][col] = curDate; } return calendar; }, renderCalendar: function (calendar, selected, minDate, maxDate) { var html = '<table class="table-condensed">'; html += '<thead>'; html += '<tr>'; // add empty cell for week number if (this.showWeekNumbers) html += '<th></th>'; if (!minDate || minDate < calendar[1][1]) { html += '<th class="prev available"><i class="icon-arrow-left"></i></th>'; } else { html += '<th></th>'; } html += '<th colspan="5" style="width: auto">' + this.locale.monthNames[calendar[1][1].getMonth()] + calendar[1][1].toString(" yyyy") + '</th>'; if (!maxDate || maxDate > calendar[1][1]) { html += '<th class="next available"><i class="icon-arrow-right"></i></th>'; } else { html += '<th></th>'; } html += '</tr>'; html += '<tr>'; // add week number label if (this.showWeekNumbers) html += '<th class="week">' + this.locale.weekLabel + '</th>'; $.each(this.locale.daysOfWeek, function (index, dayOfWeek) { html += '<th>' + dayOfWeek + '</th>'; }); html += '</tr>'; html += '</thead>'; html += '<tbody>'; for (var row = 0; row < 6; row++) { html += '<tr>'; // add week number if (this.showWeekNumbers) html += '<td class="week">' + calendar[row][0].getWeek() + '</td>'; for (var col = 0; col < 7; col++) { var cname = 'available '; cname += (calendar[row][col].getMonth() == calendar[1][1].getMonth()) ? '' : 'off'; // Normalise the time so the comparison won't fail selected.setHours(0,0,0,0); if ( (minDate && calendar[row][col] < minDate) || (maxDate && calendar[row][col] > maxDate)) { cname = 'off disabled'; } else if (calendar[row][col].equals(selected)) { cname += 'active'; } var title = 'r' + row + 'c' + col; html += '<td class="' + cname + '" title="' + title + '">' + calendar[row][col].getDate() + '</td>'; } html += '</tr>'; } html += '</tbody>'; html += '</table>'; return html; } }; $.fn.daterangepicker = function (options, cb) { this.each(function() { var el = $(this); if (!el.data('daterangepicker')) el.data('daterangepicker', new DateRangePicker(el, options, cb)); }); return this; }; } (window.jQuery);<|fim▁end|>
<|file_name|>babylon.geometryBufferRenderer.ts<|end_file_name|><|fim▁begin|>module BABYLON { export class GeometryBufferRenderer { private _scene: Scene; private _multiRenderTarget: MultiRenderTarget; private _effect: Effect; private _ratio: number; private _cachedDefines: string; private _enablePosition: boolean = false; public set renderList(meshes: Mesh[]) { this._multiRenderTarget.renderList = meshes; } public get isSupported(): boolean { return this._multiRenderTarget.isSupported; } public get enablePosition(): boolean { return this._enablePosition; } public set enablePosition(enable: boolean) { this._enablePosition = enable; this.dispose(); this._createRenderTargets(); } constructor(scene: Scene, ratio: number = 1) { this._scene = scene; this._ratio = ratio; // Render target this._createRenderTargets(); } public isReady(subMesh: SubMesh, useInstances: boolean): boolean { var material: any = subMesh.getMaterial(); if (material && material.disableDepthWrite) { return false; } var defines = []; var attribs = [VertexBuffer.PositionKind, VertexBuffer.NormalKind]; var mesh = subMesh.getMesh(); // Alpha test if (material && material.needAlphaTesting()) { defines.push("#define ALPHATEST"); if (mesh.isVerticesDataPresent(VertexBuffer.UVKind)) { attribs.push(VertexBuffer.UVKind); defines.push("#define UV1"); } if (mesh.isVerticesDataPresent(VertexBuffer.UV2Kind)) { attribs.push(VertexBuffer.UV2Kind); defines.push("#define UV2"); } } // Buffers if (this._enablePosition) { defines.push("#define POSITION"); } // Bones if (mesh.useBones && mesh.computeBonesUsingShaders) { <|fim▁hole|> attribs.push(VertexBuffer.MatricesWeightsKind); if (mesh.numBoneInfluencers > 4) { attribs.push(VertexBuffer.MatricesIndicesExtraKind); attribs.push(VertexBuffer.MatricesWeightsExtraKind); } defines.push("#define NUM_BONE_INFLUENCERS " + mesh.numBoneInfluencers); defines.push("#define BonesPerMesh " + (mesh.skeleton ? mesh.skeleton.bones.length + 1 : 0)); } else { defines.push("#define NUM_BONE_INFLUENCERS 0"); } // Instances if (useInstances) { defines.push("#define INSTANCES"); attribs.push("world0"); attribs.push("world1"); attribs.push("world2"); attribs.push("world3"); } // Get correct effect var join = defines.join("\n"); if (this._cachedDefines !== join) { this._cachedDefines = join; this._effect = this._scene.getEngine().createEffect("geometry", attribs, ["world", "mBones", "viewProjection", "diffuseMatrix", "view"], ["diffuseSampler"], join, undefined, undefined, undefined, { buffersCount: this._enablePosition ? 3 : 2 }); } return this._effect.isReady(); } public getGBuffer(): MultiRenderTarget { return this._multiRenderTarget; } public get samples(): number { return this._multiRenderTarget.samples; } public set samples(value: number) { this._multiRenderTarget.samples = value; } // Methods public dispose(): void { this.getGBuffer().dispose(); } private _createRenderTargets(): void { var engine = this._scene.getEngine(); var count = this._enablePosition ? 3 : 2; this._multiRenderTarget = new MultiRenderTarget("gBuffer", { width: engine.getRenderWidth() * this._ratio, height: engine.getRenderHeight() * this._ratio }, count, this._scene, { generateMipMaps: false, generateDepthTexture: true, defaultType: Engine.TEXTURETYPE_FLOAT }); if (!this.isSupported) { return; } this._multiRenderTarget.wrapU = Texture.CLAMP_ADDRESSMODE; this._multiRenderTarget.wrapV = Texture.CLAMP_ADDRESSMODE; this._multiRenderTarget.refreshRate = 1; this._multiRenderTarget.renderParticles = false; this._multiRenderTarget.renderList = null; // set default depth value to 1.0 (far away) this._multiRenderTarget.onClearObservable.add((engine: Engine) => { engine.clear(new Color4(0.0, 0.0, 0.0, 1.0), true, true, true); }); // Custom render function var renderSubMesh = (subMesh: SubMesh): void => { var mesh = subMesh.getRenderingMesh(); var scene = this._scene; var engine = scene.getEngine(); let material = subMesh.getMaterial(); if (!material) { return; } // Culling engine.setState(material.backFaceCulling, 0, false, scene.useRightHandedSystem); // Managing instances var batch = mesh._getInstancesRenderList(subMesh._id); if (batch.mustReturn) { return; } var hardwareInstancedRendering = (engine.getCaps().instancedArrays) && (batch.visibleInstances[subMesh._id] !== null); if (this.isReady(subMesh, hardwareInstancedRendering)) { engine.enableEffect(this._effect); mesh._bind(subMesh, this._effect, Material.TriangleFillMode); this._effect.setMatrix("viewProjection", scene.getTransformMatrix()); this._effect.setMatrix("view", scene.getViewMatrix()); // Alpha test if (material && material.needAlphaTesting()) { var alphaTexture = material.getAlphaTestTexture(); if (alphaTexture) { this._effect.setTexture("diffuseSampler", alphaTexture); this._effect.setMatrix("diffuseMatrix", alphaTexture.getTextureMatrix()); } } // Bones if (mesh.useBones && mesh.computeBonesUsingShaders && mesh.skeleton) { this._effect.setMatrices("mBones", mesh.skeleton.getTransformMatrices(mesh)); } // Draw mesh._processRendering(subMesh, this._effect, Material.TriangleFillMode, batch, hardwareInstancedRendering, (isInstance, world) => this._effect.setMatrix("world", world)); } }; this._multiRenderTarget.customRenderFunction = (opaqueSubMeshes: SmartArray<SubMesh>, alphaTestSubMeshes: SmartArray<SubMesh>, transparentSubMeshes: SmartArray<SubMesh>, depthOnlySubMeshes: SmartArray<SubMesh>): void => { var index; if (depthOnlySubMeshes.length) { engine.setColorWrite(false); for (index = 0; index < depthOnlySubMeshes.length; index++) { renderSubMesh(depthOnlySubMeshes.data[index]); } engine.setColorWrite(true); } for (index = 0; index < opaqueSubMeshes.length; index++) { renderSubMesh(opaqueSubMeshes.data[index]); } for (index = 0; index < alphaTestSubMeshes.length; index++) { renderSubMesh(alphaTestSubMeshes.data[index]); } }; } } }<|fim▁end|>
attribs.push(VertexBuffer.MatricesIndicesKind);
<|file_name|>premium.js<|end_file_name|><|fim▁begin|>jQuery( document ).ready( function() { jQuery( '#lightslider' ).lightSlider({ item: 1, autoWidth: false, slideMove: 1, // slidemove will be 1 if loop is true slideMargin: 10, addClass: '', mode: "slide", useCSS: true, cssEasing: 'ease', //'cubic-bezier(0.25, 0, 0.25, 1)',// easing: 'linear', //'for jquery animation',//// speed: 400, //ms' auto: true, loop: true, slideEndAnimation: true, pause: 5000, keyPress: false, controls: true, prevHtml: '<span class="dashicons dashicons-arrow-left-alt2">', nextHtml: '<span class="dashicons dashicons-arrow-right-alt2">', rtl:false, adaptiveHeight:false,<|fim▁hole|> vThumbWidth:100, thumbItem:10, pager: true, gallery: false, galleryMargin: 5, thumbMargin: 5, currentPagerPosition: 'middle', enableTouch:true, enableDrag:true, freeMove:true, swipeThreshold: 40, responsive : [], onBeforeStart: function (el) {}, onSliderLoad: function (el) {}, onBeforeSlide: function (el) {}, onAfterSlide: function (el) {}, onBeforeNextSlide: function (el) {}, onBeforePrevSlide: function (el) {} }); });<|fim▁end|>
vertical:false, verticalHeight:500,
<|file_name|>vote_genesis.rs<|end_file_name|><|fim▁begin|>//this module spits out a structured vote as json data //save the vote to a file use safex::genesis::key_generation::KeyPair; use utils::get_address_methods::OmniList; use utils::dirs::{make_app_root_dir, touch}; use voting::poll_genesis::PollRound; use voting::validate_genesis::VotingOutcome; use rustc_serialize::{Decodable, Decoder}; use rustc_serialize::json::{self, ToJson, Json}; use bitcoin::util::hash::Sha256dHash; use std::error::Error; use std::fs; use std::fs::File; use std::path::Path; use std::env; use std::io::Write; use std::io; use std::fs::OpenOptions; use std::io::Read; use std::io::{BufRead}; pub struct VotePersona { voter_keys: KeyPair, voting_round: VoteRound, }<|fim▁hole|> pub fn import_keys() -> VotePersona { println!("input your private key"); let mut input2 = String::new(); let stdin2 = io::stdin(); stdin2.lock().read_line(&mut input2).unwrap(); let trimmed = input2.trim_right_matches("\n"); let persona = VotePersona::persona_fromstring(trimmed.to_string()); persona } pub fn persona_fromstring(secret: String) -> VotePersona { let new_keys = KeyPair::keypair_frombase64(secret); let votings = VoteRound::new(); VotePersona { voter_keys: new_keys, voting_round: votings, } } pub fn return_keys(&self) -> &KeyPair { &self.voter_keys } } #[derive(Clone, RustcDecodable, RustcEncodable)] pub struct VoteHash { pub poll_hash: Vec<u8>, pub vote_message: String, pub vote_msgindex: i32, pub vote_publickey: String, } impl VoteHash { pub fn return_hash(&self) -> String { let encoded = json::encode(&self).unwrap(); let the_sha = Sha256dHash::from_data(&encoded.as_bytes()); the_sha.to_string() } } #[derive(RustcDecodable, RustcEncodable)] pub struct VoteRound { pub poll_hash: Vec<u8>, pub vote_hash: Vec<u8>, pub vote_message: String, pub vote_msgindex: i32, pub vote_signature: Vec<u8>, pub vote_publickey: String, } impl VoteRound { pub fn new() -> VoteRound { VoteRound { poll_hash: Vec::new(), vote_hash: Vec::new(), vote_message: String::new(), vote_msgindex: 0, vote_signature: Vec::new(), vote_publickey: String::new(), } } ///form a vote taking a poll json string, and a VotePersona pub fn from_poll(poll_round: String, persona: VotePersona) -> VoteRound { //get the poll's hash //need to validate the poll contents as well let poll = PollRound::poll_fromjson(poll_round); let poll_hash = poll.return_pollhash(); let mut pollhash: Vec<u8> = Vec::new(); for a in poll_hash.iter() { pollhash.push(*a); } let pollhash_clone = pollhash.clone(); let poll_choices = poll.return_pollchoices(); let vote_index = VoteRound::select_answer(poll_choices); let vote_string = poll_choices[vote_index as usize].to_string(); let vstring_clone = vote_string.clone(); let keys = persona.voter_keys; let pk_string = KeyPair::address_base58(&keys.public); let pkstr_clone = pk_string.clone(); let vote_hash = VoteHash { poll_hash: pollhash, vote_message: vote_string, vote_msgindex: vote_index, vote_publickey: pk_string, }; let vote_hash = vote_hash.return_hash(); let vhash_clone = vote_hash.clone(); let vote_signature = KeyPair::sign(&keys.secret, vote_hash.into_bytes()); let the_vote = VoteRound { poll_hash: pollhash_clone, vote_hash: vhash_clone.into_bytes(), vote_message: vstring_clone, vote_msgindex: vote_index, vote_signature: vote_signature, vote_publickey: pkstr_clone, }; the_vote //let poll_data: PollRound = json::decode(&poll_round).unwrap(); //let poll_hash = } ///forms a vote using a VotePersona import keys pub fn form_vote() { let persona = VotePersona::import_keys(); println!("please enter path of the poll you intend to vote on"); let mut path = String::new(); let stdin = io::stdin(); stdin.lock().read_line(&mut path).unwrap(); let path_trim = path.trim_right_matches("\n"); let path = Path::new(&path_trim); let display = "a"; let mut file = match OpenOptions::new().read(true).write(false).open(path) { // The `description` method of `io::Error` returns a string that // describes the error Err(why) => panic!("couldn't open {}: {}", display, Error::description(&why)), Ok(file) => file, }; let mut file_string = String::new(); match file.read_to_string(&mut file_string) { Err(why) => panic!("couldn't read {}: {}", display, Error::description(&why)), Ok(_) => println!("ok"), } let the_poll: PollRound = json::decode(&file_string).unwrap(); let key_hash160 = KeyPair::address_base58(&persona.voter_keys.public); let key_hashclone = key_hash160.clone(); let addresses = the_poll.return_eligibleaddresses(); if addresses.check_existence(key_hash160) == true { let vote = VoteRound::from_poll(the_poll.return_jsonstring(), persona); vote.write_vote(); } else { println!("you have the wrong kind of key"); } } ///helper function to accept answers from a poll through commandline by index pub fn select_answer(poll_choices: &[String]) -> i32 { println!("choices are: "); let mut index = 0; for choice in poll_choices.iter() { println!("index {:?}, {:?}", index, choice); index += 1; } println!("enter the index number of your selection"); let mut input2 = String::new(); let stdin2 = io::stdin(); stdin2.lock().read_line(&mut input2).unwrap(); let trimmed = input2.trim_right_matches("\n"); let the_index: i32 = trimmed.parse().ok().expect("invalid input"); the_index } ///writes the vote to a file pub fn write_vote(&self) { let mut the_home_dir = String::new(); let home_dirclone = the_home_dir.clone(); match env::home_dir() { Some(ref p) => the_home_dir = p.display().to_string(), None => println!("Impossible to get your home dir!") } let vote_hash = self.return_votehash(); let mut votehash: Vec<u8> = Vec::new(); for a in vote_hash.iter() { votehash.push(*a); } let hash_path = String::from_utf8(votehash).unwrap(); let path_string = String::from("/make_votes/"); let app_root = home_dirclone + "/make_votes/"; make_app_root_dir(app_root); let path_string2 = path_string + &hash_path; let path_string3 = path_string2 + ".vote"; let path_string4 = the_home_dir + &path_string3; let path = Path::new(&path_string4); println!("{:?}", path); touch(&path).unwrap_or_else(|why| { println!("! {:?}", why.kind()); }); let display = "a"; let mut file = match OpenOptions::new().read(true).write(true).open(path) { // The `description` method of `io::Error` returns a string that // describes the error Err(why) => panic!("couldn't open {}: {}", display, Error::description(&why)), Ok(file) => file, }; let encoded = VoteRound::return_jsonstring(self); let json_str = encoded.to_string(); file.write_all(&encoded.as_bytes()).unwrap(); } ///returns a json encoded string from the VoteRound struct pub fn return_jsonstring(&self) -> String { let encoded = json::encode(&self).unwrap(); encoded } ///returns a VoteRound struct based on a json encoded string pub fn vote_fromjson(json: String) -> VoteRound { let vote_data: VoteRound = json::decode(&json).unwrap(); vote_data } ///returns the vote hash from the VoteRound struct pub fn return_votehash(&self) -> &[u8] { &self.vote_hash[..] } ///returns the poll hash from the VoteRound struct pub fn return_pollhash(&self) -> &[u8] { &self.poll_hash[..] } ///returns the signature from the VoteRound struct pub fn return_signature(&self) -> &[u8] { &self.vote_signature } ///returns the string of the vote answer from the poll pub fn return_votemsg(&self) -> String { let our_string = self.vote_message.to_string(); our_string } ///returns the index of the vote as per the poll pub fn return_voteindex(&self) -> i32 { let mut int = 0; int += self.vote_msgindex; int } ///returns the index of the vote as per the poll pub fn return_votecount(&self, list: &OmniList) -> i32 { list.return_balance(self.vote_publickey.to_string()) } ///returns a VoteRound from a file path pub fn return_votefromfile(path: &Path) -> VoteRound { let display = "a"; let mut file = match OpenOptions::new().read(true).write(false).open(path) { // The `description` method of `io::Error` returns a string that // describes the error Err(why) => panic!("couldn't open {}: {}", display, Error::description(&why)), Ok(file) => file, }; let mut file_string = String::new(); match file.read_to_string(&mut file_string) { Err(why) => panic!("couldn't read {}: {}", display, Error::description(&why)), Ok(_) => println!("ok"), } let the_vote: VoteRound = json::decode(&file_string).unwrap(); the_vote } }<|fim▁end|>
impl VotePersona {
<|file_name|>test_warnings.rs<|end_file_name|><|fim▁begin|>use robotparser::parser::{parse_robots_txt, WarningReason}; use std::convert::From; use url::{Host, Origin}; #[derive(PartialEq, Eq, Debug, Clone)] enum WarningReasonKind { InvalidDirectiveFormat, DirectiveKeyIsEmpty, UnsupportedDirectiveKey, UserAgentCannotBeEmpty, DirectiveWithoutUserAgent, ParseCrawlDelayError, WrongRequestRateFormat, ParseRequestRate, ParseUrl, WrongCleanParamFormat, IgnoredCleanParams, WrongPathFormat, } fn validate_warnings(input: &str, expected_warnings: &[WarningReasonKind]) { let host = Host::Domain("python.org".into()); let origin = Origin::Tuple("http".into(), host, 80); let warnings = parse_robots_txt(origin, &input).get_warnings().to_vec(); assert_eq!(warnings.len(), expected_warnings.len()); for (warning, expected_warning) in warnings.iter().zip(expected_warnings.iter()) { let warning: WarningReasonKind = warning.get_reason().into(); assert_eq!(expected_warning.clone(), warning); } } #[test] fn test_warning_invalid_directive_format() { let input = "`"; validate_warnings(input, &[WarningReasonKind::InvalidDirectiveFormat]); let input = " \t ` \t "; validate_warnings(input, &[WarningReasonKind::InvalidDirectiveFormat]); } #[test] fn test_warning_directive_key_is_empty() { let input = ":"; validate_warnings(input, &[WarningReasonKind::DirectiveKeyIsEmpty]); } #[test] fn test_warning_supported_directive_key() { let input = "X-Directive:"; validate_warnings(input, &[WarningReasonKind::UnsupportedDirectiveKey]); let input = "\t X-Directive\t :\t "; validate_warnings(input, &[WarningReasonKind::UnsupportedDirectiveKey]); } #[test] fn test_warning_user_agent_cannot_be_empty() { let input = "User-Agent:";<|fim▁hole|> validate_warnings(input, &[WarningReasonKind::UserAgentCannotBeEmpty]); let input = "\t User-Agent\t :\t *"; validate_warnings(input, &[]); } #[test] fn test_warning_directive_without_user_agent() { let input = "Crawl-Delay: 5s"; validate_warnings(input, &[WarningReasonKind::DirectiveWithoutUserAgent]); let input = "User-Agent: *\nCrawl-Delay: 5"; validate_warnings(input, &[]); } #[test] fn test_warning_parse_crawl_delay_error() { let input = "User-Agent: *\nCrawl-Delay: "; validate_warnings(input, &[WarningReasonKind::ParseCrawlDelayError]); let input = "User-Agent: *\nCrawl-Delay: -"; validate_warnings(input, &[WarningReasonKind::ParseCrawlDelayError]); let input = "User-Agent: *\nCrawl-Delay: 5h9"; validate_warnings(input, &[WarningReasonKind::ParseCrawlDelayError]); let input = "User-Agent: *\nCrawl-Delay: 5"; validate_warnings(input, &[]); } #[test] fn test_warning_request_rate_format() { let input = "User-Agent: *\nRequest-rate: 1/5"; validate_warnings(input, &[]); let input = "User-Agent: *\nRequest-rate: 1//5"; validate_warnings(input, &[WarningReasonKind::WrongRequestRateFormat]); let input = "User-Agent: *\nRequest-rate: 1"; validate_warnings(input, &[WarningReasonKind::WrongRequestRateFormat]); } #[test] fn test_warning_request_rate() { let input = "User-Agent: *\nRequest-rate: a/b"; validate_warnings(input, &[WarningReasonKind::ParseRequestRate]); let input = "User-Agent: *\nRequest-rate: a/5"; validate_warnings(input, &[WarningReasonKind::ParseRequestRate]); let input = "User-Agent: *\nRequest-rate: 5/b"; validate_warnings(input, &[WarningReasonKind::ParseRequestRate]); let input = "User-Agent: *\nRequest-rate: 1.0/5.0"; validate_warnings(input, &[WarningReasonKind::ParseRequestRate]); } #[test] fn test_warning_parsing_url() { let input = "User-Agent: *\nSitemap: https://python.org/sitemap.xml"; validate_warnings(input, &[]); let input = "User-Agent: *\nSitemap: http$$$://python.org/sitemap.xml"; validate_warnings(input, &[WarningReasonKind::ParseUrl]); } #[test] fn test_wrong_clean_param() { let input = "User-Agent: *\nClean-param: ref "; validate_warnings(input, &[]); let input = "User-Agent: *\nClean-param: "; validate_warnings(input, &[WarningReasonKind::WrongCleanParamFormat]); let input = "User-Agent: *\nClean-param: &"; validate_warnings(input, &[]); let input = "User-Agent: *\nClean-param: ?"; validate_warnings(input, &[WarningReasonKind::IgnoredCleanParams]); let input = "User-Agent: *\nClean-param: abc$"; validate_warnings(input, &[WarningReasonKind::IgnoredCleanParams]); } #[test] fn test_warning_wrong_path_format() { let input = "User-Agent: *\nAllow: \\"; validate_warnings(input, &[WarningReasonKind::WrongPathFormat]); let input = "User-Agent: *\nDisallow: \\"; validate_warnings(input, &[WarningReasonKind::WrongPathFormat]); } impl From<&WarningReason> for WarningReasonKind { fn from(reason: &WarningReason) -> Self { match *reason { WarningReason::InvalidDirectiveFormat => WarningReasonKind::InvalidDirectiveFormat, WarningReason::DirectiveKeyIsEmpty => WarningReasonKind::DirectiveKeyIsEmpty, WarningReason::UnsupportedDirectiveKey { .. } => WarningReasonKind::UnsupportedDirectiveKey, WarningReason::UserAgentCannotBeEmpty => WarningReasonKind::UserAgentCannotBeEmpty, WarningReason::DirectiveWithoutUserAgent => WarningReasonKind::DirectiveWithoutUserAgent, WarningReason::ParseCrawlDelayError { .. } => WarningReasonKind::ParseCrawlDelayError, WarningReason::WrongRequestRateFormat => WarningReasonKind::WrongRequestRateFormat, WarningReason::ParseRequestRate { .. } => WarningReasonKind::ParseRequestRate, WarningReason::ParseUrl { .. } => WarningReasonKind::ParseUrl, WarningReason::WrongCleanParamFormat => WarningReasonKind::WrongCleanParamFormat, WarningReason::IgnoredCleanParams { .. } => WarningReasonKind::IgnoredCleanParams, WarningReason::WrongPathFormat => WarningReasonKind::WrongPathFormat, } } }<|fim▁end|>
validate_warnings(input, &[WarningReasonKind::UserAgentCannotBeEmpty]); let input = "\t User-Agent\t :\t ";
<|file_name|>test_vec.py<|end_file_name|><|fim▁begin|>import moose foo = moose.Pool('/foo1', 500)<|fim▁hole|><|fim▁end|>
bar = moose.vec('/foo1') assert len(bar) == 500
<|file_name|>test_refcounts.py<|end_file_name|><|fim▁begin|>import unittest import ctypes import gc MyCallback = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int) OtherCallback = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int, ctypes.c_ulonglong) import _ctypes_test dll = ctypes.CDLL(_ctypes_test.__file__) class RefcountTestCase(unittest.TestCase): def test_1(self): from sys import getrefcount as grc f = dll._testfunc_callback_i_if f.restype = ctypes.c_int f.argtypes = [ctypes.c_int, MyCallback] def callback(value): #print "called back with", value return value self.assertEqual(grc(callback), 2) cb = MyCallback(callback) self.assertTrue(grc(callback) > 2) result = f(-10, cb) self.assertEqual(result, -18) cb = None gc.collect() self.assertEqual(grc(callback), 2) def test_refcount(self): from sys import getrefcount as grc def func(*args): pass # this is the standard refcount for func self.assertEqual(grc(func), 2) # the CFuncPtr instance holds atr least one refcount on func: f = OtherCallback(func) self.assertTrue(grc(func) > 2) # and may release it again del f self.assertTrue(grc(func) >= 2) # but now it must be gone gc.collect() self.assertTrue(grc(func) == 2) class X(ctypes.Structure): _fields_ = [("a", OtherCallback)] x = X() x.a = OtherCallback(func) # the CFuncPtr instance holds atr least one refcount on func: self.assertTrue(grc(func) > 2) # and may release it again del x self.assertTrue(grc(func) >= 2) # and now it must be gone again gc.collect() self.assertEqual(grc(func), 2) f = OtherCallback(func) # the CFuncPtr instance holds atr least one refcount on func: self.assertTrue(grc(func) > 2) # create a cycle f.cycle = f del f gc.collect() self.assertEqual(grc(func), 2) class AnotherLeak(unittest.TestCase): def test_callback(self): import sys proto = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int, ctypes.c_int) def func(a, b): return a * b * 2 f = proto(func) gc.collect() a = sys.getrefcount(ctypes.c_int) f(1, 2)<|fim▁hole|>if __name__ == '__main__': unittest.main()<|fim▁end|>
self.assertEqual(sys.getrefcount(ctypes.c_int), a)
<|file_name|>article_reducer.tsx<|end_file_name|><|fim▁begin|>import { combineReducers } from 'redux'; import { RECEIVED_ARTICLE, RECEIVED_ARTICLE_COMMENTS, CLEAN_DATA, ListAction } from '../actions/news_action'; <|fim▁hole|>function article(state: object={},action: ListAction<object[]|string>){ switch(action.type){ case RECEIVED_ARTICLE: return action.data; case CLEAN_DATA: if(action.data === "articleReducer"){ return {}; } return state; default: return state; } } function comments(state: object[]=[], action: ListAction<object[]|string>){ switch(action.type){ case RECEIVED_ARTICLE_COMMENTS: return action.data; case CLEAN_DATA: if(action.data === "articleReducer"){ return []; } return state; default: return state; } } export default combineReducers({ article: article, comments: comments });<|fim▁end|>
<|file_name|>enum-alignment.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT.<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::cast; use std::ptr; use std::sys; fn addr_of<T>(ptr: &T) -> uint { let ptr = ptr::to_unsafe_ptr(ptr); ptr as uint } fn is_aligned<T>(ptr: &T) -> bool { unsafe { let addr: uint = cast::transmute(ptr); (addr % sys::min_align_of::<T>()) == 0 } } pub fn main() { let x = Some(0u64); match x { None => fail2!(), Some(ref y) => assert!(is_aligned(y)) } }<|fim▁end|>
//
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import eachWeekendOfInterval from '../eachWeekendOfInterval/index.js'; import startOfMonth from '../startOfMonth/index.js'; import endOfMonth from '../endOfMonth/index.js'; import requiredArgs from '../_lib/requiredArgs/index.js'; /** * @name eachWeekendOfMonth * @category Month Helpers * @summary List all the Saturdays and Sundays in the given month. * * @description * Get all the Saturdays and Sundays in the given month. * * @param {Date|Number} date - the given month * @returns {Date[]} an array containing all the Saturdays and Sundays * @throws {TypeError} 1 argument required * @throws {RangeError} The passed date is invalid * * @example * // Lists all Saturdays and Sundays in the given month * var result = eachWeekendOfMonth(new Date(2022, 1, 1)) * //=> [ * // Sat Feb 05 2022 00:00:00, * // Sun Feb 06 2022 00:00:00, * // Sat Feb 12 2022 00:00:00, * // Sun Feb 13 2022 00:00:00, * // Sat Feb 19 2022 00:00:00, * // Sun Feb 20 2022 00:00:00, * // Sat Feb 26 2022 00:00:00, * // Sun Feb 27 2022 00:00:00 * // ] */ export default function eachWeekendOfMonth(dirtyDate) { requiredArgs(1, arguments); var startDate = startOfMonth(dirtyDate); if (isNaN(startDate)) throw new RangeError('The passed date is invalid');<|fim▁hole|> start: startDate, end: endDate }); }<|fim▁end|>
var endDate = endOfMonth(dirtyDate); return eachWeekendOfInterval({
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># Copyright The IETF Trust 2007, All Rights Reserved # Django settings for ietf project. # BASE_DIR and "settings_local" are from # http://code.djangoproject.com/wiki/SplitSettings import os try: import syslog syslog.openlog("datatracker", syslog.LOG_PID, syslog.LOG_USER) except ImportError: pass BASE_DIR = os.path.dirname(os.path.abspath(__file__)) # a place to put ajax logs if necessary. LOG_DIR = '/var/log/datatracker' import sys sys.path.append(os.path.abspath(BASE_DIR + "/..")) sys.path.append(os.path.abspath(BASE_DIR + "/../redesign")) DEBUG = True TEMPLATE_DEBUG = DEBUG # Domain name of the IETF IETF_DOMAIN = 'ietf.org' ADMINS = ( ('IETF Django Developers', 'django-project@' + IETF_DOMAIN), ('GMail Tracker Archive', '[email protected]'), ('Henrik Levkowetz', '[email protected]'),<|fim▁hole|>) # Server name of the tools server TOOLS_SERVER = 'tools.' + IETF_DOMAIN # Override this in the settings_local.py file: SERVER_EMAIL = 'Django Server <django-project@' + TOOLS_SERVER + '>' DEFAULT_FROM_EMAIL = 'IETF Secretariat <ietf-secretariat-reply@' + IETF_DOMAIN + '>' MANAGERS = ADMINS DATABASES = { 'default': { 'NAME': 'ietf_utf8', 'ENGINE': 'django.db.backends.mysql', 'USER': 'ietf', #'PASSWORD': 'ietf', #'OPTIONS': {}, }, # 'legacy': { # 'NAME': 'ietf', # 'ENGINE': 'django.db.backends.mysql', # 'USER': 'ietf', # #'PASSWORD': 'ietf', # }, } DATABASE_TEST_OPTIONS = { # Uncomment this to speed up testing if your database supports InnoDB: # 'init_command': 'SET storage_engine=InnoDB', } # Local time zone for this installation. Choices can be found here: # http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE # although not all variations may be possible on all operating systems. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'PST8PDT' # Language code for this installation. All choices can be found here: # http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes # http://blogs.law.harvard.edu/tech/stories/storyReader$15 LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = False # Absolute path to the directory that holds media. # Example: "/home/media/media.lawrence.com/" MEDIA_ROOT = BASE_DIR + "/../static/" # URL that handles the media served from MEDIA_ROOT. # Example: "http://media.lawrence.com" MEDIA_URL = '' # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a # trailing slash. # Examples: "http://foo.com/media/", "/media/". ADMIN_MEDIA_PREFIX = '/media/' DAJAXICE_MEDIA_PREFIX="dajaxice" AUTH_PROFILE_MODULE = 'person.Person' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.RemoteUserBackend', ) #DATABASE_ROUTERS = ["ietf.legacy_router.LegacyRouter"] SESSION_COOKIE_AGE = 43200 # 12 hours SESSION_EXPIRE_AT_BROWSER_CLOSE = True TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.load_template_source', 'django.template.loaders.app_directories.load_template_source', 'ietf.dbtemplate.template.load_template_source', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.RemoteUserMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.http.ConditionalGetMiddleware', 'django.middleware.doc.XViewMiddleware', 'ietf.middleware.SQLLogMiddleware', 'ietf.middleware.SMTPExceptionMiddleware', 'ietf.middleware.RedirectTrailingPeriod', 'django.middleware.transaction.TransactionMiddleware', 'ietf.middleware.UnicodeNfkcNormalization', 'ietf.secr.middleware.secauth.SecAuthMiddleware' ) ROOT_URLCONF = 'ietf.urls' TEMPLATE_DIRS = ( BASE_DIR + "/templates", BASE_DIR + "/secr/templates", ) TEMPLATE_CONTEXT_PROCESSORS = ( 'django.core.context_processors.auth', 'django.core.context_processors.debug', 'django.core.context_processors.i18n', 'django.core.context_processors.request', 'django.contrib.messages.context_processors.messages', 'ietf.context_processors.server_mode', 'ietf.context_processors.revision_info', 'ietf.secr.context_processors.secr_revision_info', 'ietf.secr.context_processors.static', 'ietf.context_processors.rfcdiff_prefix', ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.sitemaps', 'django.contrib.admin', 'django.contrib.admindocs', 'django.contrib.humanize', 'django.contrib.messages', 'south', 'workflows', 'permissions', 'ietf.person', 'ietf.name', 'ietf.group', 'ietf.doc', 'ietf.message', 'ietf.announcements', 'ietf.idindex', 'ietf.idtracker', 'ietf.ietfauth', 'ietf.iesg', 'ietf.ipr', 'ietf.liaisons', 'ietf.mailinglists', 'ietf.meeting', #'ietf.proceedings', 'ietf.redirects', 'ietf.idrfc', 'ietf.wginfo', 'ietf.submit', 'ietf.ietfworkflows', 'ietf.wgchairs', 'ietf.wgcharter', 'ietf.sync', 'ietf.community', 'ietf.release', # secretariat apps 'form_utils', 'ietf.secr.announcement', 'ietf.secr.areas', 'ietf.secr.drafts', 'ietf.secr.groups', 'ietf.secr.ipradmin', 'ietf.secr.meetings', 'ietf.secr.proceedings', 'ietf.secr.roles', 'ietf.secr.rolodex', 'ietf.secr.telechat', 'ietf.secr.sreq', 'ietf.nomcom', 'ietf.dbtemplate', 'dajaxice', ) INTERNAL_IPS = ( # AMS servers '64.170.98.32', '64.170.98.86', # local '127.0.0.1', '::1', ) # no slash at end IDTRACKER_BASE_URL = "http://datatracker.ietf.org" RFCDIFF_PREFIX = "//www.ietf.org/rfcdiff" # Valid values: # 'production', 'test', 'development' # Override this in settings_local.py if it's not true SERVER_MODE = 'development' # The name of the method to use to invoke the test suite TEST_RUNNER = 'ietf.utils.test_runner.run_tests' # WG Chair configuration MAX_WG_DELEGATES = 3 DATE_FORMAT = "Y-m-d" DATETIME_FORMAT = "Y-m-d H:i" # Override this in settings_local.py if needed # *_PATH variables ends with a slash/ . INTERNET_DRAFT_PATH = '/a/www/ietf-ftp/internet-drafts/' INTERNET_DRAFT_PDF_PATH = '/a/www/ietf-datatracker/pdf/' RFC_PATH = '/a/www/ietf-ftp/rfc/' CHARTER_PATH = '/a/www/ietf-ftp/charter/' CHARTER_TXT_URL = 'http://www.ietf.org/charter/' CONFLICT_REVIEW_PATH = '/a/www/ietf-ftp/conflict-reviews' CONFLICT_REVIEW_TXT_URL = 'http://www.ietf.org/cr/' STATUS_CHANGE_PATH = '/a/www/ietf-ftp/status-changes' STATUS_CHANGE_TXT_URL = 'http://www.ietf.org/sc/' AGENDA_PATH = '/a/www/www6s/proceedings/' AGENDA_PATH_PATTERN = '/a/www/www6s/proceedings/%(meeting)s/agenda/%(wg)s.%(ext)s' MINUTES_PATH_PATTERN = '/a/www/www6s/proceedings/%(meeting)s/minutes/%(wg)s.%(ext)s' SLIDES_PATH_PATTERN = '/a/www/www6s/proceedings/%(meeting)s/slides/%(wg)s-*' IPR_DOCUMENT_PATH = '/a/www/ietf-ftp/ietf/IPR/' IETFWG_DESCRIPTIONS_PATH = '/a/www/www6s/wg-descriptions/' IESG_TASK_FILE = '/a/www/www6/iesg/internal/task.txt' IESG_ROLL_CALL_FILE = '/a/www/www6/iesg/internal/rollcall.txt' IESG_MINUTES_FILE = '/a/www/www6/iesg/internal/minutes.txt' IESG_WG_EVALUATION_DIR = "/a/www/www6/iesg/evaluation" INTERNET_DRAFT_ARCHIVE_DIR = '/a/www/www6s/draft-archive' # Ideally, more of these would be local -- but since we don't support # versions right now, we'll point to external websites DOC_HREFS = { "agenda": "/meeting/{meeting}/agenda/{doc.group.acronym}/", #"charter": "/doc/{doc.name}-{doc.rev}/", "charter": "http://www.ietf.org/charter/{doc.name}-{doc.rev}.txt", #"draft": "/doc/{doc.name}-{doc.rev}/", "draft": "http://tools.ietf.org/html/{doc.name}-{doc.rev}", # I can't figure out the liaison maze. Hopefully someone # who understands this better can take care of it. #"liai-att": None #"liaison": None "minutes": "http://www.ietf.org/proceedings/{meeting}/minutes/{doc.name}", "slides": "http://www.ietf.org/proceedings/{meeting}/slides/{doc.name}", } # Override this in settings_local.py if needed CACHE_MIDDLEWARE_SECONDS = 300 CACHE_MIDDLEWARE_KEY_PREFIX = '' if SERVER_MODE == 'production': CACHE_BACKEND= 'file://'+'/a/www/ietf-datatracker/cache/' else: # Default to no caching in development/test, so that every developer # doesn't have to set CACHE_BACKEND in settings_local CACHE_BACKEND = 'dummy:///' # For readonly database operation # CACHE_BACKEND = 'memcached://127.0.0.1:11211/' # SESSION_ENGINE = "django.contrib.sessions.backends.cache" IPR_EMAIL_TO = ['[email protected]', ] DOC_APPROVAL_EMAIL_CC = ["RFC Editor <[email protected]>", ] # Put real password in settings_local.py IANA_SYNC_PASSWORD = "secret" IANA_SYNC_CHANGES_URL = "https://datatracker.iana.org:4443/data-tracker/changes" IANA_SYNC_PROTOCOLS_URL = "http://www.iana.org/protocols/" RFC_EDITOR_SYNC_PASSWORD="secret" RFC_EDITOR_SYNC_NOTIFICATION_URL = "http://www.rfc-editor.org/parser/parser.php" RFC_EDITOR_QUEUE_URL = "http://www.rfc-editor.org/queue2.xml" RFC_EDITOR_INDEX_URL = "http://www.rfc-editor.org/rfc/rfc-index.xml" # Liaison Statement Tool settings LIAISON_UNIVERSAL_FROM = 'Liaison Statement Management Tool <lsmt@' + IETF_DOMAIN + '>' LIAISON_ATTACH_PATH = '/a/www/ietf-datatracker/documents/LIAISON/' LIAISON_ATTACH_URL = '/documents/LIAISON/' # ID Submission Tool settings IDSUBMIT_FROM_EMAIL = 'IETF I-D Submission Tool <[email protected]>' IDSUBMIT_TO_EMAIL = '[email protected]' IDSUBMIT_ANNOUNCE_FROM_EMAIL = '[email protected]' IDSUBMIT_ANNOUNCE_LIST_EMAIL = '[email protected]' # NomCom Tool settings ROLODEX_URL = "" PUBLIC_KEYS_URL = BASE_DIR + '/nomcom/public_keys/' NOMCOM_FROM_EMAIL = DEFAULT_FROM_EMAIL NOMCOM_ADMIN_EMAIL = DEFAULT_FROM_EMAIL OPENSSL_COMMAND = '/usr/bin/openssl' DAYS_TO_EXPIRE_NOMINATION_LINK = '' DEFAULT_FEEDBACK_TYPE = 'offtopic' NOMINEE_FEEDBACK_TYPES = ['comment', 'questio', 'nomina'] # Days from meeting to cut off dates on submit FIRST_CUTOFF_DAYS = 19 SECOND_CUTOFF_DAYS = 12 CUTOFF_HOUR = 00 # midnight UTC SUBMISSION_START_DAYS = -90 SUBMISSION_CUTOFF_DAYS = 33 SUBMISSION_CORRECTION_DAYS = 52 INTERNET_DRAFT_DAYS_TO_EXPIRE = 185 IDSUBMIT_REPOSITORY_PATH = INTERNET_DRAFT_PATH IDSUBMIT_STAGING_PATH = '/a/www/www6s/staging/' IDSUBMIT_STAGING_URL = 'http://www.ietf.org/staging/' IDSUBMIT_IDNITS_BINARY = '/a/www/ietf-datatracker/scripts/idnits' MAX_PLAIN_DRAFT_SIZE = 6291456 # Max size of the txt draft in bytes # DOS THRESHOLDS PER DAY (Sizes are in MB) MAX_SAME_DRAFT_NAME = 20 MAX_SAME_DRAFT_NAME_SIZE = 50 MAX_SAME_SUBMITTER = 50 MAX_SAME_SUBMITTER_SIZE = 150 MAX_SAME_WG_DRAFT = 150 MAX_SAME_WG_DRAFT_SIZE = 450 MAX_DAILY_SUBMISSION = 1000 MAX_DAILY_SUBMISSION_SIZE = 2000 # End of ID Submission Tool settings # Account settings DAYS_TO_EXPIRE_REGISTRATION_LINK = 3 HTPASSWD_COMMAND = "/usr/bin/htpasswd2" HTPASSWD_FILE = "/www/htpasswd" # DB redesign USE_DB_REDESIGN_PROXY_CLASSES = True SOUTH_TESTS_MIGRATE = False # Generation of bibxml files for xml2rfc BIBXML_BASE_PATH = '/a/www/ietf-ftp/xml2rfc' # Timezone files for iCalendar TZDATA_ICS_PATH = '/www/ietf-datatracker/tz/ics/' CHANGELOG_PATH = '/www/ietf-datatracker/web/changelog' # Secretariat Tool # this is a tuple of regular expressions. if the incoming URL matches one of # these, than non secretariat access is allowed. SECR_AUTH_UNRESTRICTED_URLS = ( #(r'^/$'), (r'^/secr/announcement/'), (r'^/secr/proceedings/'), (r'^/secr/sreq/'), ) SECR_BLUE_SHEET_PATH = '/a/www/ietf-datatracker/documents/blue_sheet.rtf' SECR_BLUE_SHEET_URL = 'https://datatracker.ietf.org/documents/blue_sheet.rtf' SECR_INTERIM_LISTING_DIR = '/a/www/www6/meeting/interim' SECR_MAX_UPLOAD_SIZE = 40960000 SECR_PROCEEDINGS_DIR = '/a/www/www6s/proceedings/' SECR_STATIC_URL = '/secr/' USE_ETAGS=True # Put SECRET_KEY in here, or any other sensitive or site-specific # changes. DO NOT commit settings_local.py to svn. from settings_local import *<|fim▁end|>
('Robert Sparks', '[email protected]'), ('Ole Laursen', '[email protected]'),
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # from django.urls import path <|fim▁hole|> path('login/complete/', views.OpenIDLoginCompleteView.as_view(), name='openid-login-complete'), ]<|fim▁end|>
from . import views urlpatterns = [ path('login/', views.OpenIDLoginView.as_view(), name='openid-login'),
<|file_name|>mkart.rs<|end_file_name|><|fim▁begin|>extern crate jiyunet_core as core; extern crate jiyunet_dag as dag; #[macro_use] extern crate clap; use std::fs; use std::io::Read; use core::io::BinaryComponent; use core::sig::Signed; use dag::artifact; use dag::segment; mod util; fn main() { let matches = clap_app!(jiyu_mkart => (version: "0.1.0") (author: "treyzania <[email protected]>") (about: "Packages an file into a signed Jiyunet segment. Note that the segment is not likely to be valid on the blockchain due to noncing, etc.") (@arg src: +required "Source file to package.") (@arg dest: +required "Output file.") (@arg artifact_type: -a +takes_value "Artifact type. Default: 0x0000")) .get_matches(); let src = matches.value_of("src").unwrap(); let dest = matches.value_of("dest").unwrap(); let atype = match matches.value_of("artifact_type").map(str::parse) { Some(Ok(p)) => p, Some(Err(_)) => panic!("unable to parse artifact type as number"), None => 0x0000 };<|fim▁hole|> // Read the source data, convert to artifact. let data = { let mut f: fs::File = fs::File::open(src).unwrap(); let mut v = Vec::new(); f.read_to_end(&mut v).expect("error reading provided artifact contents"); v }; let art = artifact::ArtifactData::new(atype, data); let seg = segment::Segment::new_artifact_seg(art, util::timestamp()); // Load the keypair, then sign. let kp = util::load_user_keypair().expect("keypair not found"); let signed_seg = Signed::<segment::Segment>::new(kp, seg); // Write the signed artifact segment. let mut out = fs::File::create(dest).expect("unable to create destination"); signed_seg.to_writer(&mut out).expect("unable to write to destination") }<|fim▁end|>
<|file_name|>expression.rs<|end_file_name|><|fim▁begin|>use token::Token; use token_offset::TokenOffset; use operator::Operator; use operator_offset::OperatorOffset;<|fim▁hole|> use std::fmt; #[derive(Debug, Clone, PartialEq)] pub enum Expression { List(Vec<Expression>), Value(OperatorOrToken), } impl fmt::Display for Expression { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Expression::List(ref elements) => { elements.iter().map(|e| e.to_string() ).collect::<Vec<_>>() .join(" ") .replace(" / ", "/") .fmt(f) }, Expression::Value(ref v) => v.fmt(f), } } } impl Expression { pub fn parse<T>(tokenizer: &mut T) -> Result<Expression> where T: Iterator<Item = Result<OperatorOrToken>> { let mut list = vec![]; while let Some(Ok(t)) = tokenizer.next() { match t { OperatorOrToken::Operator(OperatorOffset { operator: Operator::Semicolon, .. }) => { if list.len() == 1 { return Ok(list.pop().unwrap()) } else { return Ok(Expression::List(list)) } }, OperatorOrToken::Token(TokenOffset { token: Token::Comment(_), .. }) => {}, _ => list.push(Expression::Value(t)), } } let error_offset = match list.pop() { Some(Expression::Value(v)) => v.offset().unwrap_or(0), Some(Expression::List(_)) => unreachable!(), // for now until nested lists None => 0, }; Err(SassError { offset: error_offset, kind: ErrorKind::UnexpectedEof, message: String::from( "Expected semicolon while parsing a value expression; reached EOF instead." ), }) } fn apply_slash(first: OperatorOrToken, second: OperatorOrToken, paren_level: i32, offset: Option<usize>) -> Expression { if paren_level == 0 { debug!("Paren level 0. First computed: {}, second computed: {}", first.computed_number(), second.computed_number()); if first.computed_number() || second.computed_number() { Expression::Value(first / second) } else { Expression::List(vec![ Expression::Value(first), Expression::Value(OperatorOrToken::Operator(OperatorOffset { operator: Operator::Slash, offset: offset, })), Expression::Value(second), ]) } } else { debug!("Paren level {}", paren_level); Expression::Value(first / second) } } fn force_list_collapse(list: Vec<Expression>, context: &Context) -> Expression { if list.iter().any(|item| { match *item { Expression::Value(OperatorOrToken::Operator(OperatorOffset { operator: Operator::Slash, .. })) => true, _ => false, } }) { let mut evaluator = ExpressionEvaluator::new(context); evaluator.paren_level = 1; evaluator.evaluate_list(list) } else { Expression::List(list) } } pub fn apply_math(operator: OperatorOffset, first: Expression, second: Expression, context: &Context, paren_level: i32) -> Expression { debug!("Applying math to:\nfirst: {:#?}\nop: {:#?}\nsecond: {:#?}", first, operator, second); match (first, second) { (Expression::Value(f), Expression::Value(s)) => { let result = match operator.operator { Operator::Plus => f + s, Operator::Minus => f - s, Operator::Star => f * s, Operator::Percent => f % s, Operator::Slash => return Expression::apply_slash( f, s, paren_level, operator.offset ), _ => unimplemented!(), }; Expression::Value(result) }, (Expression::List(f), Expression::List(s)) => { let eval_first = Expression::force_list_collapse(f, context); let eval_second = Expression::force_list_collapse(s, context); match (eval_first, eval_second) { (Expression::List(mut fi), Expression::List(se)) => { match operator.operator { Operator::Plus | Operator::Comma => { fi.extend(se); Expression::List(fi) }, _ => panic!("Can't use an operator other than \ plus or comma on two lists"), } }, (eval_first, eval_second) => { Expression::apply_math( operator, eval_first, eval_second, context, paren_level ) } } }, (Expression::List(f), Expression::Value(s)) => { let mut first_evaluator = ExpressionEvaluator::new(context); first_evaluator.paren_level = paren_level; let eval_first = first_evaluator.evaluate_list(f); match eval_first { Expression::List(mut fi) => { match operator.operator { Operator::Plus => { fi.push(Expression::Value(s)); Expression::List(fi) }, Operator::Slash => { if s.computed_number() { let forced = Expression::force_list_collapse( fi, context ); match forced { Expression::List(mut fi) => { fi.push( Expression::Value( OperatorOrToken::Operator( operator ) ) ); fi.push(Expression::Value(s)); Expression::List(fi) }, Expression::Value(fo) => { Expression::Value(fo / s) } } } else { fi.push( Expression::Value( OperatorOrToken::Operator(operator) ) ); fi.push(Expression::Value(s)); Expression::List(fi) } }, _ => panic!("Can't use an operator other than \ plus on a list and a value"), } }, _ => Expression::apply_math( operator, eval_first, Expression::Value(s), context, paren_level ), } }, (Expression::Value(f), Expression::List(s)) => { debug!("Value Op List: {:#?}\n{:#?}\n{:#?}\n", f, operator, s); let eval_second = Expression::force_list_collapse(s, context); match eval_second { Expression::List(se) => { match operator.operator { Operator::Plus => { let (first_in_list, rest) = se.split_first() .expect("Trying to get the first and rest \ of a list that isn't a value failed"); let new_first = format!("{}{}", f, first_in_list); let mut new_list = vec![ Expression::Value(OperatorOrToken::Token( TokenOffset { offset: f.offset(), token: Token::String(new_first), } )) ]; new_list.extend_from_slice(rest); Expression::List(new_list) }, _ => panic!("Can't use an operator other than \ plus on a value and a list"), } }, _ => Expression::apply_math( operator, Expression::Value(f), eval_second, context, paren_level ), } }, } } pub fn create_list(head: Option<Expression>, tail: Expression) -> Expression { let mut list = match head { Some(Expression::List(v)) => v, Some(e) => vec![e], None => vec![], }; list.push(tail); Expression::List(list) } pub fn is_number(&self) -> bool { match *self { Expression::Value(OperatorOrToken::Token(TokenOffset { token: Token::Number { .. }, .. })) => true, _ => false, } } pub fn is_string(&self) -> bool { match *self { Expression::Value(OperatorOrToken::Token(TokenOffset { token: Token::String(_), .. })) => true, _ => false, } } pub fn is_right_paren(&self) -> bool { match *self { Expression::Value(OperatorOrToken::Operator(OperatorOffset { operator: Operator::RightParen, .. })) => true, _ => false, } } pub fn is_left_paren(&self) -> bool { match *self { Expression::Value(OperatorOrToken::Operator(OperatorOffset { operator: Operator::LeftParen, .. })) => true, _ => false, } } pub fn is_operator(&self) -> bool { match *self { Expression::Value(OperatorOrToken::Operator(_)) => true, _ => false, } } pub fn extract_operator_offset(self) -> OperatorOffset { match self { Expression::Value(OperatorOrToken::Operator(operator_offset)) => { operator_offset }, _ => panic!("Can't extract operator offset from {:?}", self), } } pub fn extract_token_offset(self) -> TokenOffset { match self { Expression::Value(OperatorOrToken::Token(token_offset)) => { token_offset }, _ => panic!("Can't extract token offset from {:?}", self), } } } #[cfg(test)] mod tests { use super::*; use token::Token; use token_offset::TokenOffset; use operator_or_token::OperatorOrToken; use operator::Operator; use operator_offset::OperatorOffset; fn semicolon() -> OperatorOrToken { OperatorOrToken::Operator( OperatorOffset { operator: Operator::Semicolon, offset: None } ) } fn blue() -> OperatorOrToken { OperatorOrToken::Token( TokenOffset { token: Token::String("blue".into()), offset: None } ) } fn plus() -> OperatorOrToken { OperatorOrToken::Operator( OperatorOffset { operator: Operator::Plus, offset: None } ) } fn one() -> OperatorOrToken { OperatorOrToken::Token( TokenOffset { token: Token::Number { value: 1.0, units: None, computed: false }, offset: None } ) } fn one_px() -> OperatorOrToken { OperatorOrToken::Token( TokenOffset { token: Token::Number { value: 1.0, units: Some("px".into()), computed: false }, offset: None } ) } #[test] fn it_parses_a_single_string() { let mut fake_tokenizer = vec![Ok(blue()), Ok(semicolon())].into_iter(); assert_eq!( Expression::parse(&mut fake_tokenizer), Ok(Expression::Value(blue())) ); } #[test] fn it_parses_a_list() { let mut fake_tokenizer = vec![ Ok(one()), Ok(plus()), Ok(one()), Ok(semicolon()) ].into_iter(); assert_eq!( Expression::parse(&mut fake_tokenizer), Ok(Expression::List(vec![ Expression::Value(one()), Expression::Value(plus()), Expression::Value(one()), ])) ); } #[test] fn it_parses_a_number_without_units() { let mut fake_tokenizer = vec![ Ok(one()), Ok(semicolon()) ].into_iter(); assert_eq!( Expression::parse(&mut fake_tokenizer), Ok(Expression::Value(one())) ); } #[test] fn it_parses_a_number_with_units() { let mut fake_tokenizer = vec![ Ok(one_px()), Ok(semicolon()) ].into_iter(); assert_eq!( Expression::parse(&mut fake_tokenizer), Ok(Expression::Value(one_px())) ); } }<|fim▁end|>
use operator_or_token::OperatorOrToken; use context::Context; use error::{Result, SassError, ErrorKind}; use expression_evaluator::ExpressionEvaluator;
<|file_name|>unify.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub use self::VarValue::*; use std::kinds::marker; use middle::ty::{expected_found, IntVarValue}; use middle::ty::{mod, Ty}; use middle::typeck::infer::{uok, ures}; use middle::typeck::infer::InferCtxt; use std::cell::RefCell; use std::fmt::Show; use syntax::ast; use util::ppaux::Repr; use util::snapshot_vec as sv; /// This trait is implemented by any type that can serve as a type /// variable. We call such variables *unification keys*. For example, /// this trait is implemented by `IntVid`, which represents integral /// variables. /// /// Each key type has an associated value type `V`. For example, for /// `IntVid`, this is `Option<IntVarValue>`, representing some /// (possibly not yet known) sort of integer. /// /// Implementations of this trait are at the end of this file. pub trait UnifyKey<'tcx, V> : Clone + Show + PartialEq + Repr<'tcx> { fn index(&self) -> uint; fn from_index(u: uint) -> Self; // Given an inference context, returns the unification table // appropriate to this key type. fn unification_table<'v>(infcx: &'v InferCtxt) -> &'v RefCell<UnificationTable<Self,V>>; fn tag(k: Option<Self>) -> &'static str; } /// Trait for valid types that a type variable can be set to. Note that /// this is typically not the end type that the value will take on, but /// rather an `Option` wrapper (where `None` represents a variable /// whose value is not yet set). /// /// Implementations of this trait are at the end of this file. pub trait UnifyValue<'tcx> : Clone + Repr<'tcx> + PartialEq { } /// Value of a unification key. We implement Tarjan's union-find /// algorithm: when two keys are unified, one of them is converted /// into a "redirect" pointing at the other. These redirects form a /// DAG: the roots of the DAG (nodes that are not redirected) are each /// associated with a value of type `V` and a rank. The rank is used /// to keep the DAG relatively balanced, which helps keep the running /// time of the algorithm under control. For more information, see /// <http://en.wikipedia.org/wiki/Disjoint-set_data_structure>. #[deriving(PartialEq,Clone)] pub enum VarValue<K,V> { Redirect(K), Root(V, uint), } /// Table of unification keys and their values. pub struct UnificationTable<K,V> { /// Indicates the current value of each key. values: sv::SnapshotVec<VarValue<K,V>,(),Delegate>, } /// At any time, users may snapshot a unification table. The changes /// made during the snapshot may either be *committed* or *rolled back*. pub struct Snapshot<K> { // Link snapshot to the key type `K` of the table. marker: marker::CovariantType<K>, snapshot: sv::Snapshot, } /// Internal type used to represent the result of a `get()` operation. /// Conveys the current root and value of the key. pub struct Node<K,V> { pub key: K, pub value: V, pub rank: uint, } pub struct Delegate; // We can't use V:LatticeValue, much as I would like to, // because frequently the pattern is that V=Option<U> for some // other type parameter U, and we have no way to say // Option<U>:LatticeValue. impl<'tcx, V:PartialEq+Clone+Repr<'tcx>, K:UnifyKey<'tcx, V>> UnificationTable<K,V> { pub fn new() -> UnificationTable<K,V> { UnificationTable { values: sv::SnapshotVec::new(Delegate), } } /// Starts a new snapshot. Each snapshot must be either /// rolled back or committed in a "LIFO" (stack) order. pub fn snapshot(&mut self) -> Snapshot<K> { Snapshot { marker: marker::CovariantType::<K>, snapshot: self.values.start_snapshot() } } /// Reverses all changes since the last snapshot. Also /// removes any keys that have been created since then. pub fn rollback_to(&mut self, snapshot: Snapshot<K>) { debug!("{}: rollback_to()", UnifyKey::tag(None::<K>)); self.values.rollback_to(snapshot.snapshot); } /// Commits all changes since the last snapshot. Of course, they /// can still be undone if there is a snapshot further out. pub fn commit(&mut self, snapshot: Snapshot<K>) { debug!("{}: commit()", UnifyKey::tag(None::<K>)); self.values.commit(snapshot.snapshot); } pub fn new_key(&mut self, value: V) -> K { let index = self.values.push(Root(value, 0)); let k = UnifyKey::from_index(index); debug!("{}: created new key: {}", UnifyKey::tag(None::<K>), k); k } /// Find the root node for `vid`. This uses the standard union-find algorithm with path /// compression: http://en.wikipedia.org/wiki/Disjoint-set_data_structure pub fn get(&mut self, tcx: &ty::ctxt, vid: K) -> Node<K,V> { let index = vid.index(); let value = (*self.values.get(index)).clone(); match value { Redirect(redirect) => { let node: Node<K,V> = self.get(tcx, redirect.clone()); if node.key != redirect { // Path compression self.values.set(index, Redirect(node.key.clone())); } node } Root(value, rank) => { Node { key: vid, value: value, rank: rank } } } } fn is_root(&self, key: &K) -> bool { match *self.values.get(key.index()) { Redirect(..) => false, Root(..) => true, } } /// Sets the value for `vid` to `new_value`. `vid` MUST be a root node! Also, we must be in the /// middle of a snapshot. pub fn set(&mut self, tcx: &ty::ctxt<'tcx>, key: K, new_value: VarValue<K,V>) { assert!(self.is_root(&key)); debug!("Updating variable {} to {}", key.repr(tcx), new_value.repr(tcx)); self.values.set(key.index(), new_value); } /// Either redirects node_a to node_b or vice versa, depending on the relative rank. Returns /// the new root and rank. You should then update the value of the new root to something /// suitable. pub fn unify(&mut self, tcx: &ty::ctxt<'tcx>, node_a: &Node<K,V>, node_b: &Node<K,V>) -> (K, uint) { debug!("unify(node_a(id={}, rank={}), node_b(id={}, rank={}))", node_a.key.repr(tcx), node_a.rank, node_b.key.repr(tcx), node_b.rank); if node_a.rank > node_b.rank { // a has greater rank, so a should become b's parent, // i.e., b should redirect to a. self.set(tcx, node_b.key.clone(), Redirect(node_a.key.clone())); (node_a.key.clone(), node_a.rank) } else if node_a.rank < node_b.rank { // b has greater rank, so a should redirect to b. self.set(tcx, node_a.key.clone(), Redirect(node_b.key.clone())); (node_b.key.clone(), node_b.rank) } else { // If equal, redirect one to the other and increment the // other's rank. assert_eq!(node_a.rank, node_b.rank); self.set(tcx, node_b.key.clone(), Redirect(node_a.key.clone())); (node_a.key.clone(), node_a.rank + 1) } } } impl<K,V> sv::SnapshotVecDelegate<VarValue<K,V>,()> for Delegate { fn reverse(&mut self, _: &mut Vec<VarValue<K,V>>, _: ()) { panic!("Nothing to reverse"); } } /////////////////////////////////////////////////////////////////////////// // Code to handle simple keys like ints, floats---anything that // doesn't have a subtyping relationship we need to worry about. /// Indicates a type that does not have any kind of subtyping /// relationship. pub trait SimplyUnifiable<'tcx> : Clone + PartialEq + Repr<'tcx> { fn to_type(&self) -> Ty<'tcx>; fn to_type_err(expected_found<Self>) -> ty::type_err<'tcx>; } pub fn err<'tcx, V:SimplyUnifiable<'tcx>>(a_is_expected: bool, a_t: V, b_t: V) -> ures<'tcx> { if a_is_expected { Err(SimplyUnifiable::to_type_err( ty::expected_found {expected: a_t, found: b_t})) } else { Err(SimplyUnifiable::to_type_err( ty::expected_found {expected: b_t, found: a_t})) } } pub trait InferCtxtMethodsForSimplyUnifiableTypes<'tcx, V:SimplyUnifiable<'tcx>, K:UnifyKey<'tcx, Option<V>>> { fn simple_vars(&self, a_is_expected: bool, a_id: K, b_id: K) -> ures<'tcx>; fn simple_var_t(&self, a_is_expected: bool, a_id: K, b: V) -> ures<'tcx>; fn probe_var(&self, a_id: K) -> Option<Ty<'tcx>>; } impl<'a,'tcx,V:SimplyUnifiable<'tcx>,K:UnifyKey<'tcx, Option<V>>> InferCtxtMethodsForSimplyUnifiableTypes<'tcx, V, K> for InferCtxt<'a, 'tcx> { /// Unifies two simple keys. Because simple keys do not have any subtyping relationships, if /// both keys have already been associated with a value, then those two values must be the /// same. fn simple_vars(&self, a_is_expected: bool, a_id: K, b_id: K) -> ures<'tcx> { let tcx = self.tcx; let table = UnifyKey::unification_table(self); let node_a = table.borrow_mut().get(tcx, a_id); let node_b = table.borrow_mut().get(tcx, b_id); let a_id = node_a.key.clone(); let b_id = node_b.key.clone(); if a_id == b_id { return uok(); } let combined = { match (&node_a.value, &node_b.value) { (&None, &None) => { None } (&Some(ref v), &None) | (&None, &Some(ref v)) => { Some((*v).clone()) } (&Some(ref v1), &Some(ref v2)) => { if *v1 != *v2 { return err(a_is_expected, (*v1).clone(), (*v2).clone()) } Some((*v1).clone()) } } }; let (new_root, new_rank) = table.borrow_mut().unify(tcx, &node_a, &node_b); table.borrow_mut().set(tcx, new_root, Root(combined, new_rank)); return Ok(()) } /// Sets the value of the key `a_id` to `b`. Because simple keys do not have any subtyping /// relationships, if `a_id` already has a value, it must be the same as `b`. fn simple_var_t(&self, a_is_expected: bool, a_id: K, b: V) -> ures<'tcx> { let tcx = self.tcx; let table = UnifyKey::unification_table(self); let node_a = table.borrow_mut().get(tcx, a_id); let a_id = node_a.key.clone(); match node_a.value { None => { table.borrow_mut().set(tcx, a_id, Root(Some(b), node_a.rank)); return Ok(()); } Some(ref a_t) => { if *a_t == b { return Ok(()); } else { return err(a_is_expected, (*a_t).clone(), b); } } } } fn probe_var(&self, a_id: K) -> Option<Ty<'tcx>> { let tcx = self.tcx; let table = UnifyKey::unification_table(self); let node_a = table.borrow_mut().get(tcx, a_id); match node_a.value {<|fim▁hole|> Some(ref a_t) => Some(a_t.to_type()) } } } /////////////////////////////////////////////////////////////////////////// // Integral type keys impl<'tcx> UnifyKey<'tcx, Option<IntVarValue>> for ty::IntVid { fn index(&self) -> uint { self.index } fn from_index(i: uint) -> ty::IntVid { ty::IntVid { index: i } } fn unification_table<'v>(infcx: &'v InferCtxt) -> &'v RefCell<UnificationTable<ty::IntVid, Option<IntVarValue>>> { return &infcx.int_unification_table; } fn tag(_: Option<ty::IntVid>) -> &'static str { "IntVid" } } impl<'tcx> SimplyUnifiable<'tcx> for IntVarValue { fn to_type(&self) -> Ty<'tcx> { match *self { ty::IntType(i) => ty::mk_mach_int(i), ty::UintType(i) => ty::mk_mach_uint(i), } } fn to_type_err(err: expected_found<IntVarValue>) -> ty::type_err<'tcx> { return ty::terr_int_mismatch(err); } } impl<'tcx> UnifyValue<'tcx> for Option<IntVarValue> { } // Floating point type keys impl<'tcx> UnifyKey<'tcx, Option<ast::FloatTy>> for ty::FloatVid { fn index(&self) -> uint { self.index } fn from_index(i: uint) -> ty::FloatVid { ty::FloatVid { index: i } } fn unification_table<'v>(infcx: &'v InferCtxt) -> &'v RefCell<UnificationTable<ty::FloatVid, Option<ast::FloatTy>>> { return &infcx.float_unification_table; } fn tag(_: Option<ty::FloatVid>) -> &'static str { "FloatVid" } } impl<'tcx> UnifyValue<'tcx> for Option<ast::FloatTy> { } impl<'tcx> SimplyUnifiable<'tcx> for ast::FloatTy { fn to_type(&self) -> Ty<'tcx> { ty::mk_mach_float(*self) } fn to_type_err(err: expected_found<ast::FloatTy>) -> ty::type_err<'tcx> { ty::terr_float_mismatch(err) } } impl<'tcx, K:Repr<'tcx>, V:Repr<'tcx>> Repr<'tcx> for VarValue<K,V> { fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String { match *self { Redirect(ref k) => format!("Redirect({})", k.repr(tcx)), Root(ref v, r) => format!("Root({}, {})", v.repr(tcx), r) } } }<|fim▁end|>
None => None,
<|file_name|>language.ts<|end_file_name|><|fim▁begin|><!DOCTYPE TS><TS> <context> <name>LanguageSettings</name> <message> <source>English</source> <translation>Angļu</translation> </message> <message> <source>default</source> <translation>noklusētā</translation> </message> <message> <source>Language</source> <translation>Valoda</translation> </message> <message> <source>&lt;qt&gt;Attention, all windows will be closed by changing the language<|fim▁hole|></context> <context> <name>LanguageSettingsBase</name> <message> <source>Language Settings</source> <translation>Valodas Uzstādījumi</translation> </message> <message> <source>Select language</source> <translation>Izvēlaties valodu</translation> </message> </context> </TS><|fim▁end|>
without saving the Data.&lt;br&gt;&lt;br&gt;Go on?&lt;/qt&gt;</source> <translation>&lt;qt&gt;Uzmanību, mainot valodu visi logi tiks aizvērti nesaglabājot datus.&lt;br&gt;&lt;br&gt;Turpināt?&lt;/qt&gt;</translation> </message>
<|file_name|>0004_auto_20160905_0938.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models def port_models(apps, schema_editor): Proposal = apps.get_model('core', 'Proposal') Notice = apps.get_model('core', 'Notice') n = Notice() n.title = "Edital" n.description = "Edital info" n.save() for p in Proposal.objects.all(): p.notice = n p.save() def reverse_port_models(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [<|fim▁hole|> ] operations = [ migrations.CreateModel( name='Notice', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('title', models.CharField(max_length=60)), ('description', models.CharField(max_length=500)), ('is_available', models.BooleanField(default=False)), ], ), migrations.AddField( model_name='proposal', name='notice', field=models.ForeignKey(related_name='proposals', to='core.Notice', null=True), ), migrations.RunPython(port_models, reverse_port_models), ]<|fim▁end|>
('core', '0003_proposaldate'),
<|file_name|>byext.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python """Show file statistics by extension.""" import os import sys class Stats: def __init__(self): self.stats = {} def statargs(self, args): for arg in args: if os.path.isdir(arg):<|fim▁hole|> self.statfile(arg) else: sys.stderr.write("Can't find %s\n" % file) self.addstats("<???>", "unknown", 1) def statdir(self, dir): self.addstats("<dir>", "dirs", 1) try: names = os.listdir(dir) except os.error, err: sys.stderr.write("Can't list %s: %s\n" % (file, err)) self.addstats(ext, "unlistable", 1) return names.sort() for name in names: if name.startswith(".#"): continue # Skip CVS temp files if name.endswith("~"): continue# Skip Emacs backup files full = os.path.join(dir, name) if os.path.islink(full): self.addstats("<lnk>", "links", 1) elif os.path.isdir(full): self.statdir(full) else: self.statfile(full) def statfile(self, file): head, ext = os.path.splitext(file) head, base = os.path.split(file) if ext == base: ext = "" # E.g. .cvsignore is deemed not to have an extension ext = os.path.normcase(ext) if not ext: ext = "<none>" self.addstats(ext, "files", 1) try: f = open(file, "rb") except IOError, err: sys.stderr.write("Can't open %s: %s\n" % (file, err)) self.addstats(ext, "unopenable", 1) return data = f.read() f.close() self.addstats(ext, "bytes", len(data)) if '\0' in data: self.addstats(ext, "binary", 1) return if not data: self.addstats(ext, "empty", 1) #self.addstats(ext, "chars", len(data)) lines = data.splitlines() self.addstats(ext, "lines", len(lines)) del lines words = data.split() self.addstats(ext, "words", len(words)) def addstats(self, ext, key, n): d = self.stats.setdefault(ext, {}) d[key] = d.get(key, 0) + n def report(self): exts = self.stats.keys() exts.sort() # Get the column keys columns = {} for ext in exts: columns.update(self.stats[ext]) cols = columns.keys() cols.sort() colwidth = {} colwidth["ext"] = max([len(ext) for ext in exts]) minwidth = 6 self.stats["TOTAL"] = {} for col in cols: total = 0 cw = max(minwidth, len(col)) for ext in exts: value = self.stats[ext].get(col) if value is None: w = 0 else: w = len("%d" % value) total += value cw = max(cw, w) cw = max(cw, len(str(total))) colwidth[col] = cw self.stats["TOTAL"][col] = total exts.append("TOTAL") for ext in exts: self.stats[ext]["ext"] = ext cols.insert(0, "ext") def printheader(): for col in cols: print "%*s" % (colwidth[col], col), print printheader() for ext in exts: for col in cols: value = self.stats[ext].get(col, "") print "%*s" % (colwidth[col], value), print printheader() # Another header at the bottom def main(): args = sys.argv[1:] if not args: args = [os.curdir] s = Stats() s.statargs(args) s.report() if __name__ == "__main__": main()<|fim▁end|>
self.statdir(arg) elif os.path.isfile(arg):
<|file_name|>MeteorIndexResponseWrapper.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * Copyright 2002 National Student Clearinghouse * * This code is part of the Meteor system as defined and specified * by the National Student Clearinghouse and the Meteor Sponsors. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ******************************************************************************/ package org.meteornetwork.meteor.common.abstraction.index; import org.meteornetwork.meteor.common.xml.indexresponse.DataProvider; import org.meteornetwork.meteor.common.xml.indexresponse.DataProviders; import org.meteornetwork.meteor.common.xml.indexresponse.IndexProviderData; import org.meteornetwork.meteor.common.xml.indexresponse.IndexProviderMessages; import org.meteornetwork.meteor.common.xml.indexresponse.Message; import org.meteornetwork.meteor.common.xml.indexresponse.MeteorIndexResponse; import org.meteornetwork.meteor.common.xml.indexresponse.types.RsMsgLevelEnum; public class MeteorIndexResponseWrapper { private final MeteorIndexResponse response; public MeteorIndexResponseWrapper() { response = new MeteorIndexResponse(); response.setDataProviders(new DataProviders()); } /** * Add index provider information to the response. * * @param id * the ID of this index provider * @param name * the name of this index provider * @param url * the contact URL of this index provider */ public void setIndexProviderData(String id, String name, String url) { IndexProviderData data = new IndexProviderData(); data.setEntityID(id); data.setEntityName(name); data.setEntityURL(url); response.setIndexProviderData(data); } /** * Add a message to this response * * @param messageText * the text of the message * @param level * the severity level of the message */ public void addMessage(String messageText, RsMsgLevelEnum level) { Message message = new Message(); message.setRsMsg(messageText); message.setRsMsgLevel(level.name()); if (response.getIndexProviderMessages() == null) { response.setIndexProviderMessages(new IndexProviderMessages()); } response.getIndexProviderMessages().addMessage(message); } /** * Add one or more Data Provider objects to the response * * @param dataProviders * the data providers to add to the response */ public void addDataProviders(DataProvider... dataProviders) {<|fim▁hole|> for (DataProvider dataProvider : dataProviders) { response.getDataProviders().addDataProvider(dataProvider); } } /** * Add Data Provider objects to the response * * @param dataProviders * an iterable collection of Data Providers to add to the * response */ public void addDataProviders(Iterable<DataProvider> dataProviders) { for (DataProvider dataProvider : dataProviders) { response.getDataProviders().addDataProvider(dataProvider); } } /** * Access a mutable version of the response. * * @return A mutable version of the internal MeteorIndexResponse object */ public MeteorIndexResponse getResponse() { return response; } }<|fim▁end|>
<|file_name|>EPGMEdge.java<|end_file_name|><|fim▁begin|>/* * This file is part of Gradoop. * * Gradoop is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Gradoop is distributed in the hope that it will be useful,<|fim▁hole|> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Gradoop. If not, see <http://www.gnu.org/licenses/>. */ package org.gradoop.common.model.api.entities; import org.gradoop.common.model.impl.id.GradoopId; /** * Describes data assigned to an edge in the EPGM. */ public interface EPGMEdge extends EPGMGraphElement { /** * Returns the source vertex identifier. * * @return source vertex id */ GradoopId getSourceId(); /** * Sets the source vertex identifier. * * @param sourceId source vertex id */ void setSourceId(GradoopId sourceId); /** * Returns the target vertex identifier. * * @return target vertex id */ GradoopId getTargetId(); /** * Sets the target vertex identifier. * * @param targetId target vertex id. */ void setTargetId(GradoopId targetId); }<|fim▁end|>
* but WITHOUT ANY WARRANTY; without even the implied warranty of
<|file_name|>Build.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from precious import db from datetime import datetime class Build(db.Model): __tablename__ = 'builds' id = db.Column(db.Integer, primary_key=True, unique=True) project_id = db.Column(db.Integer, db.ForeignKey('projects.id'))<|fim▁hole|> date = db.Column(db.DateTime) revision = db.Column(db.LargeBinary) stdout = db.Column(db.UnicodeText) success = db.Column(db.Boolean) def __init__(self, project_id, revision, stdout=u"", success=True, date=datetime.now()): self.project_id = project_id self.date = date self.revision = revision self.stdout = stdout self.success = success def __repr__(self): return '<Build id:%r project_id:%r>' % (self.id, self.project_id)<|fim▁end|>
<|file_name|>pipeobject.py<|end_file_name|><|fim▁begin|># This file is a part of pysnapshotd, a program for automated backups # Copyright (C) 2015-2016 Jonas Thiem # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. import threading class BufferedPipeObject(object): def __init__(self): self.closed = False self.contents = b"" self.access_mutex = threading.Lock() self.waiting_for_content_semaphore = \ threading.Semaphore() self.waiting_for_content_counter = 0 self._write_func = None def _set_write_func(self, f): self.access_mutex.acquire() self._write_func = f self.access_mutex.release() def close(self): self.access_mutex.acquire() self.closed = True self.access_mutex.release() def write(self, data): # First, check if pipe is still open at all: self.access_mutex.acquire() if self.closed: self.access_mutex.release() raise OSError("broken pipe - pipe has been closed") # Do nothing for an obvious dummy command: if len(data) == 0: self.access_mutex.release() return 0 # Try to write with the write func if given: # (which means this pipe object itself will always remain empty and # .read() on it will block forever, since things are somewhat bypassed # directly to some target write function) if self._write_func != None: try: self._write_func(data) except Exception: self.closed = True finally: self.access_mutex.release() return # Otherwise, just put contents in internal buffer for reading from # this pipe from "the other end": try: self.contents += data i = 0 while i < self.waiting_for_content_counter: self.waiting_for_content_semaphore.\ release() i += 1 finally:<|fim▁hole|> def read(self, amount): print(" >> PIPE READ: " + str(amount)) if amount <= 0: print(" >> PIPE READ DATA: <empty read>") return b"" self.access_mutex.acquire() # Try to read data as long as needed to acquire requested amount: obtained_data = b"" while True: # If pipe was closed along this process, abort: if self.closed: self.access_mutex.release() raise OSError("broken pipe - pipe has been closed") # Try to obtain as much data as requested: if len(self.contents) > 0: added_data = self.contents[:amount] obtained_data += added_data self.contents = self.contents[len(added_data):] amount -= len(added_data) # If there is not enough data available, we will need to wait for # more: if amount > 0: self.waiting_for_content_counter += 1 self.access_mutex.release() self.waiting_for_content_semaphore.acquire() self.access_mutex.acquire() else: assert(len(obtained_data) > 0) print(" >> PIPE READ DATA: " + str(obtained_data)) return obtained_data<|fim▁end|>
self.access_mutex.release()
<|file_name|>verify-mksnapshot.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from __future__ import print_function import argparse import glob import os import platform import shutil import subprocess import sys from lib.util import get_electron_branding, rm_rf, scoped_cwd PROJECT_NAME = get_electron_branding()['project_name'] PRODUCT_NAME = get_electron_branding()['product_name'] SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) SNAPSHOT_SOURCE = os.path.join(SOURCE_ROOT, 'spec', 'fixtures', 'testsnap.js') def main(): args = parse_args() source_root = os.path.abspath(args.source_root) initial_app_path = os.path.join(source_root, args.build_dir) app_path = create_app_copy(initial_app_path) returncode = 0 try: with scoped_cwd(app_path): if args.snapshot_files_dir is None: with open(os.path.join(app_path, 'mksnapshot_args')) as f: mkargs = f.read().splitlines() subprocess.check_call(mkargs + [ SNAPSHOT_SOURCE ], cwd=app_path) print('ok mksnapshot successfully created snapshot_blob.bin.')<|fim▁hole|> if os.environ.get('TARGET_ARCH') == 'arm64': context_snapshot = 'v8_context_snapshot.arm64.bin' else: context_snapshot = 'v8_context_snapshot.x86_64.bin' context_snapshot_path = os.path.join(app_path, context_snapshot) gen_binary = get_binary_path('v8_context_snapshot_generator', \ app_path) genargs = [ gen_binary, \ '--output_file={0}'.format(context_snapshot_path) ] subprocess.check_call(genargs) print('ok v8_context_snapshot_generator successfully created ' \ + context_snapshot) if args.create_snapshot_only: return 0 else: gen_bin_path = os.path.join(args.snapshot_files_dir, '*.bin') generated_bin_files = glob.glob(gen_bin_path) for bin_file in generated_bin_files: shutil.copy2(bin_file, app_path) test_path = os.path.join(SOURCE_ROOT, 'spec', 'fixtures', \ 'snapshot-items-available') if sys.platform == 'darwin': bin_files = glob.glob(os.path.join(app_path, '*.bin')) app_dir = os.path.join(app_path, '{0}.app'.format(PRODUCT_NAME)) electron = os.path.join(app_dir, 'Contents', 'MacOS', PRODUCT_NAME) bin_out_path = os.path.join(app_dir, 'Contents', 'Frameworks', '{0} Framework.framework'.format(PROJECT_NAME), 'Resources') for bin_file in bin_files: shutil.copy2(bin_file, bin_out_path) elif sys.platform == 'win32': electron = os.path.join(app_path, '{0}.exe'.format(PROJECT_NAME)) else: electron = os.path.join(app_path, PROJECT_NAME) subprocess.check_call([electron, test_path]) print('ok successfully used custom snapshot.') except subprocess.CalledProcessError as e: print('not ok an error was encountered while testing mksnapshot.') print(e) returncode = e.returncode except KeyboardInterrupt: print('Other error') returncode = 0 print('Returning with error code: {0}'.format(returncode)) return returncode # Create copy of app to install custom snapshot def create_app_copy(initial_app_path): print('Creating copy of app for testing') app_path = os.path.join(os.path.dirname(initial_app_path), os.path.basename(initial_app_path) + '-mksnapshot-test') rm_rf(app_path) shutil.copytree(initial_app_path, app_path, symlinks=True) return app_path def get_binary_path(binary_name, root_path): if sys.platform == 'win32': binary_path = os.path.join(root_path, '{0}.exe'.format(binary_name)) else: binary_path = os.path.join(root_path, binary_name) return binary_path def parse_args(): parser = argparse.ArgumentParser(description='Test mksnapshot') parser.add_argument('-b', '--build-dir', help='Path to an Electron build folder. \ Relative to the --source-root.', default=None, required=True) parser.add_argument('--create-snapshot-only', help='Just create snapshot files, but do not run test', action='store_true') parser.add_argument('--snapshot-files-dir', help='Directory containing snapshot files to use \ for testing', default=None, required=False) parser.add_argument('--source-root', default=SOURCE_ROOT, required=False) return parser.parse_args() if __name__ == '__main__': sys.exit(main())<|fim▁end|>
context_snapshot = 'v8_context_snapshot.bin' if platform.system() == 'Darwin':
<|file_name|>LayerObjectFieldTemplate.js<|end_file_name|><|fim▁begin|>import React from "react"; import PropTypes from "prop-types"; import Box from "grommet/components/Box"; import Paragraph from "grommet/components/Paragraph"; import Label from "grommet/components/Label"; import FormLayer from "../components/FormLayer"; class LayerObjectFieldTemplate extends React.Component { constructor(props) { super(props); this.state = { layerActive: false }; } _onClick() { this.setState({ layerActive: true }); } render() { if (this.props.idSchema["$id"] == "root") { return <Box>{this.props.properties.map(prop => prop.content)}</Box>; } else { return ( <Box className="grommetux-form-field" direction="row" wrap={false}> { <FormLayer layerActive={this.state.layerActive} onClose={(() => { this.setState({ layerActive: false }); }).bind(this)} properties={this.props.properties.map(prop => prop.content)} /> } <Box flex={true}> <Box align="center"> <Label size="small" strong="none" uppercase={true}> {this.props.title} </Label> </Box><|fim▁hole|> <Paragraph size="small">{this.props.description}</Paragraph> ) : null} </Box> </Box> ); } } } LayerObjectFieldTemplate.propTypes = { title: PropTypes.string, description: PropTypes.string, required: PropTypes.bool, idSchema: PropTypes.object, uiSchema: PropTypes.object, properties: PropTypes.object }; export default LayerObjectFieldTemplate;<|fim▁end|>
{this.props.description ? (
<|file_name|>edit-rule.component.ts<|end_file_name|><|fim▁begin|>/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|> * See the License for the specific language governing permissions and * limitations under the License. */ import {AfterViewInit, ElementRef, EventEmitter, Injector, OnDestroy, OnInit, Output} from '@angular/core'; import {AbstractComponent} from '@common/component/abstract.component'; import {Field, Rule} from '@domain/data-preparation/pr-dataset'; export abstract class EditRuleComponent extends AbstractComponent implements OnInit, AfterViewInit, OnDestroy { /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Private Variables |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Protected Variables |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Public Variables |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ public isShow: boolean = false; public mode: string = 'APPEND'; public ruleVO: Rule; public colDescs: any; public fields: Field[]; public selectedFields: Field[] = []; public forceFormula: string = ''; public forceCondition: string = ''; @Output() public onEvent: EventEmitter<any> = new EventEmitter(); /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Constructor |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ // 생성자 protected constructor( protected elementRef: ElementRef, protected injector: Injector) { super(elementRef, injector); } /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Override Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /** * 컴포넌트 초기 실행 */ public ngOnInit() { super.ngOnInit(); } // function - ngOnInit /** * 화면 초기화 */ public ngAfterViewInit() { super.ngAfterViewInit(); } // function - ngAfterViewInit /** * 컴포넌트 제거 */ public ngOnDestroy() { super.ngOnDestroy(); } // function - ngOnDestroy /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Public Method - API |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ public init(fields: Field[], selectedFields: Field[], data?: { ruleString?: string, jsonRuleString: any }) { this.fields = fields; this.selectedFields = selectedFields; if (!this.isNullOrUndefined(data)) { this.parsingRuleString(data); } this.beforeShowComp(); this.isShow = true; this.safelyDetectChanges(); this.afterShowComp(); this.safelyDetectChanges(); } // function - init public setValue(key: string, value: any) { Object.keys(this).some(item => { if (key === item && 'function' !== typeof this[key]) { this[key] = value; return true; } else { return false; } }); this.safelyDetectChanges(); } // function - setValue /** * Apply formula using Advanced formula popup * @param {{command: string, formula: string}} data */ public doneInputFormula(data: { command: string, formula: string }) { if (data.command === 'setCondition') { this.setValue('forceCondition', data.formula); } else { this.setValue('forceFormula', data.formula); } } /** * Returns value of variable name equals the key * @param {string} key * @returns {string} */ public getValue(key: string): string { let returnValue: string = undefined; if (!this.isNullOrUndefined(this[key])) { returnValue = this[key]; } this.safelyDetectChanges(); return returnValue; } // function - setValue /** * Rule 형식 정의 및 반환 */ public abstract getRuleData(); /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Public Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Protected Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /** * 컴포넌트 표시 전 실행 * @protected */ protected abstract beforeShowComp(); /** * 컴포넌트 표시 후 실행 * @protected */ protected abstract afterShowComp(); /** * rule string 을 분석한다. * @param ruleString * @protected */ protected abstract parsingRuleString(ruleString: any); protected getColumnNamesInArray(fields: Field[], isWrap: boolean = false): string[] { return fields.map((item) => { if (isWrap) { return '`' + item.name + '`' } else { return item.name } }); } /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Private Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ }<|fim▁end|>
<|file_name|>test_cluster_runner_config.py<|end_file_name|><|fim▁begin|>from genty import genty, genty_dataset import sys from app.master.atomizer import Atomizer from app.master.job_config import ConfigValidationError from app.master.cluster_runner_config import ClusterRunnerConfig, ConfigParseError from test.framework.base_unit_test_case import BaseUnitTestCase @genty class TestClusterRunnerConfig(BaseUnitTestCase): _COMPLETE_VALID_CONFIG = """ Best Job Ever: max_executors: 21 setup_build: - echo "This is setup! Woo!" # no semicolons in this section - sleep 1 commands: - echo "Now I'm doing $THE_THING!"; # semicolons in this section - echo "Semicolons are fun." > /tmp/my_hard_work.txt; atomizers: - THE_THING: printf 'something with a number %d\\n' {1..50} """ _MULTI_JOB_CONFIG = """ First Job: commands: - echo "go" atomizers: - ENV_VAR: echo "atom" Second Job: commands: - echo "go" atomizers: - ENV_VAR: echo "atom" """ _FREEFORM_ATOMIZER = """ PHPUnit: commands: - echo "go" atomizers: - "export VARNAME='asdf'" """ _MINIMAL_CONFIG = """ PHPUnit: commands: - echo "go" atomizers: - ENV_VAR: find . -name "*.php" """ _EMPTY_CONFIG = """ PHPUnit: """ _VALID_CONFIG_WITH_EMPTY_COMMANDS = """ PHPUnit: commands: - echo "first"<|fim▁hole|> - echo "last" atomizers: - ENV_VAR: echo "atom" """ _NO_COMMAND_INVALID_CONFIG = """ PHPUnit: max_executors: 5 setup_build: - echo "I don't know what I'm doing." atomizers: - VARNAME: sleep 123 """ _INVALID_CONFIG_WITH_EMPTY_COMMANDS = """ PHPUnit: commands: - atomizers: - ENV_VAR: echo "atom" """ _BACKGROUND_TASK_CONFIG = """ PHPUnit: max_executors: 5 setup_build: - echo "in the background" & - echo "in the foreground" ; - echo "another thing" atomizers: - VARNAME: sleep1 commands: - echo "go" """ @genty_dataset( complete_valid_config=(_COMPLETE_VALID_CONFIG, { 'name': 'Best Job Ever', 'max_executors': 21, 'setup_build': 'echo "This is setup! Woo!" && sleep 1', 'command': 'echo "Now I\'m doing $THE_THING!" && echo "Semicolons are fun." > /tmp/my_hard_work.txt', 'atomizer': [{'THE_THING': 'printf \'something with a number %d\\n\' {1..50}'}], }), valid_config_with_empty_command=(_VALID_CONFIG_WITH_EMPTY_COMMANDS, { 'command': 'echo "first" && echo "last"', 'atomizer': [{'ENV_VAR': 'echo "atom"'}], }), ) def test_valid_conf_properties_are_correctly_parsed(self, config_string, expected_loaded_config): config = ClusterRunnerConfig(config_string) job_config = config.get_job_config() for method_name, expected_value in expected_loaded_config.items(): actual_value = getattr(job_config, method_name) if isinstance(actual_value, Atomizer): actual_value = actual_value._atomizer_dicts # special case comparison for atomizer self.assertEqual(actual_value, expected_value, 'The output of {}() should match the expected value.'.format(method_name)) @genty_dataset( ('max_executors', sys.maxsize), ('setup_build', None), ) def test_undefined_conf_properties_return_default_values(self, conf_method_name, expected_value): config = ClusterRunnerConfig(self._MINIMAL_CONFIG) job_config = config.get_job_config() actual_value = getattr(job_config, conf_method_name) self.assertEqual(actual_value, expected_value, 'The default output of {}() should match the expected value.'.format(conf_method_name)) @genty_dataset( valid_config=(_COMPLETE_VALID_CONFIG, True), empty_config=(_EMPTY_CONFIG, False), invalid_config=(_NO_COMMAND_INVALID_CONFIG, False), invalid_config_with_empty_commands=(_INVALID_CONFIG_WITH_EMPTY_COMMANDS, False), ) def test_valid_configs_are_detected(self, config_contents, is_expected_valid): config = ClusterRunnerConfig(config_contents) try: config.get_job_config() except (ConfigParseError, ConfigValidationError) as e: self.assertFalse(is_expected_valid, 'Config is valid, but threw {}'.format(type(e))) return self.assertTrue(is_expected_valid, 'Config is not valid, but parsed without error') @genty_dataset( freeform_atomizer=(_FREEFORM_ATOMIZER,), ) def test_incorrect_atomizer_type_raises_exception(self, config_contents): config = ClusterRunnerConfig(config_contents) with self.assertRaises(ConfigValidationError): config.get_job_config() def test_get_specific_job_config(self): config = ClusterRunnerConfig(self._MULTI_JOB_CONFIG) job_config = config.get_job_config('Second Job') self.assertEqual('Second Job', job_config.name, '') job_config = config.get_job_config('First Job') self.assertEqual('First Job', job_config.name, '') def test_config_with_background_task(self): config = ClusterRunnerConfig(self._BACKGROUND_TASK_CONFIG) job_config = config.get_job_config() self.assertEqual(job_config.setup_build, 'echo "in the background" & echo "in the foreground" && echo "another thing"')<|fim▁end|>
- - # some YAML comment -
<|file_name|>edicao-novo.resolver.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core'; import { of } from 'rxjs'; import { Resolve, ActivatedRouteSnapshot } from '@angular/router'; import { catchError } from 'rxjs/operators';<|fim▁hole|> @Injectable() export class EdicaoNovoResolverService implements Resolve<any> { constructor(private catalogoService: CatalogoService) {} resolve(snapshot: ActivatedRouteSnapshot) { const params = snapshot.queryParams; return this.catalogoService.getModelo(params['id']) .pipe(catchError((error) => of({error: error}))); } }<|fim▁end|>
import { CatalogoService } from './catalogo.service';
<|file_name|>audio_settings_page.py<|end_file_name|><|fim▁begin|>import fsui<|fim▁hole|>from launcher.settings.settings_page import SettingsPage from system.prefs.components.notworking import PrefsNotWorkingWarningPanel class AudioSettingsPage(SettingsPage): def __init__(self, parent: Widget) -> None: super().__init__(parent) PrefsNotWorkingWarningPanel(parent=self) self.layout.add_spacer(20) icon = fsui.Icon("audio-settings", "pkg:workspace") gettext("Audio Settings") title = gettext("Audio") subtitle = "" self.add_header(icon, title, subtitle) self.add_option("volume") self.add_option("stereo_separation") self.add_section(gettext("Floppy Drive Sound Emulation")) self.add_option("floppy_drive_volume") self.add_option(Option.FLOPPY_DRIVE_VOLUME_EMPTY) self.add_section(gettext("Advanced Audio Options")) self.add_option("audio_frequency") self.add_option("audio_buffer_target_size")<|fim▁end|>
from fswidgets.widget import Widget from launcher.i18n import gettext from launcher.option import Option
<|file_name|>data.js<|end_file_name|><|fim▁begin|>// Get User's Coordinate from their Browser window.onload = function() { // HTML5/W3C Geolocation if (navigator.geolocation) { navigator.geolocation.getCurrentPosition(UserLocation); } // Default to Washington, DC else NearestCity(38.8951, -77.0367); } // Callback function for asynchronous call to HTML5 geolocation function UserLocation(position) { NearestCity(position.coords.latitude, position.coords.longitude); } // Convert Degress to Radians function Deg2Rad(deg) { return deg * Math.PI / 180; } function PythagorasEquirectangular(lat1, lon1, lat2, lon2) { lat1 = Deg2Rad(lat1); lat2 = Deg2Rad(lat2); lon1 = Deg2Rad(lon1); lon2 = Deg2Rad(lon2); var R = 6371; // km var x = (lon2 - lon1) * Math.cos((lat1 + lat2) / 2); var y = (lat2 - lat1); var d = Math.sqrt(x * x + y * y) * R; return d; } var lat = 20; // user's latitude var lon = 40; // user's longitude var cities = [ ["3","Aachen","50.782659","6.094087","202","Nordrhein-Westfalen"], ["44","Großenkneten","52.933541","8.236997","43.5","Niedersachsen"], ["73","Aldersbach-Kriestorf","48.615935","13.050595","340","Bayern"], ["78","Alfhausen","52.485314","7.912553","65","Niedersachsen"], ["91","Alsfeld-Eifa","50.744591","9.344972","300","Hessen"], ["142","Altomünster-Maisbrunn","48.406038","11.311716","510","Bayern"], ["150","Alzey","49.7273","8.116356","215","Rheinland-Pfalz"], ["151","Amberg-Unterammersricht","49.469064","11.854641","383","Bayern"], ["164","Angermünde","53.031601","13.99066","54","Brandenburg"], ["183","Arkona","54.67916","13.434252","42","Mecklenburg-Vorpommern"], ["198","Artern","51.374461","11.291977","164","Thüringen"], ["217","Attenkam","47.877407","11.364245","672","Bayern"], ["232","Augsburg","48.425393","10.942011","461.4","Bayern"], ["243","Aurich","53.462052","7.466971","4","Niedersachsen"], ["257","Baden-Baden-Geroldsau","48.72696","8.245757","240","Baden-Württemberg"], ["282","Bamberg","49.874176","10.920581","240","Bayern"], ["298","Barth","54.340582","12.710828","3","Mecklenburg-Vorpommern"], ["320","Heinersreuth-Vollhof","49.96667","11.519692","350","Bayern"], ["330","Beerfelden","49.561729","8.967329","450","Hessen"], ["377","BadBergzabern","49.107025","7.996749","252","Rheinland-Pfalz"], ["400","Berlin-Buch","52.630941","13.50215","60","Berlin"], ["403","Berlin-Dahlem(FU)","52.453711","13.301731","51","Berlin"], ["427","Berlin-Schönefeld","52.380698","13.530609","46","Brandenburg"], ["430","Berlin-Tegel","52.564412","13.308848","36","Berlin"], ["433","Berlin-Tempelhof","52.467488","13.402115","48","Berlin"], ["435","Berlin-Zehlendorf","52.428902","13.232686","45","Berlin"], ["450","Bernkastel-Kues","49.9186","7.0664","120","Rheinland-Pfalz"], ["502","Bischbrunn","49.874669","9.48829","412","Bayern"], ["591","Boizenburg","53.391084","10.687761","45","Mecklenburg-Vorpommern"], ["596","Boltenhagen","54.002806","11.190772","15","Mecklenburg-Vorpommern"], ["614","Borgentreich-Bühne","51.570874","9.311999","240","Nordrhein-Westfalen"], ["648","Brande-Hörnerkirchen","53.855114","9.71521","9","Schleswig-Holstein"], ["662","Braunschweig","52.291443","10.446456","81.2","Niedersachsen"], ["691","Bremen","53.045015","8.797904","4.1","Bremen"], ["701","Bremerhaven","53.533162","8.576083","7","Bremen"], ["722","Brocken","51.79862","10.618265","1133.9","Sachsen-Anhalt"], ["755","Buchen; Kr.Neckar-Odenwald","49.518196","9.32127","340","Baden-Württemberg"], ["807","Schlüsselfeld(Kläranlage)","49.74499","10.643423","290","Bayern"], ["817","Burgwald-Bottendorf","51.030634","8.814579","293","Hessen"], ["850","Celle","52.595933","10.029573","39","Niedersachsen"], ["853","Chemnitz","50.791286","12.871977","418","Sachsen"], ["863","Clausthal-Zellerfeld","51.790354","10.347039","585","Niedersachsen"], ["880","Cottbus","51.775983","14.316811","69","Brandenburg"], ["891","Cuxhaven","53.871254","8.705821","5","Niedersachsen"], ["963","Diepholz","52.588112","8.342405","37.7","Niedersachsen"], ["979","Dillenburg","50.736371","8.267238","314","Hessen"], ["982","Dillingen/Donau","48.570123","10.498459","420","Bayern"], ["1001","Doberlug-Kirchhain","51.645104","13.574676","96.8","Brandenburg"], ["1048","Dresden-Klotzsche","51.127955","13.754338","227","Sachsen"], ["1078","Düsseldorf","51.295952","6.768648","36.6","Nordrhein-Westfalen"], ["1103","Ebersberg-Halbing","48.100186","11.987154","592","Bayern"], ["1107","Ebrach","49.851977","10.499043","346","Bayern"], ["1197","Ellwangen-Rindelbach","48.989498","10.131234","460","Baden-Württemberg"], ["1224","Emmendingen-Mundingen","48.137756","7.835089","201","Baden-Württemberg"], ["1266","Erfde","54.299212","9.316185","18","Schleswig-Holstein"], ["1270","Erfurt-Weimar","50.982859","10.960808","316","Thüringen"], ["1279","Möhrendorf-Kleinseebach","49.649744","11.007445","268","Bayern"], ["1292","Eschenbach/Oberpfalz","49.752196","11.822151","470","Bayern"], ["1297","Eschwege","51.204132","10.013767","156","Hessen"], ["1303","Essen-Bredeney","51.404085","6.967741","150","Nordrhein-Westfalen"], ["1327","Weilerswist-Lommersum","50.711886","6.790489","147","Nordrhein-Westfalen"], ["1332","Falkenberg; Kr.Rottal-Inn","48.48315","12.724108","472","Bayern"], ["1346","Feldberg/Schwarzwald","47.874893","8.003817","1489.6","Baden-Württemberg"], ["1357","Fichtelberg/Oberfranken-Hüttstadl","49.98069","11.837637","654","Bayern"], ["1358","Fichtelberg","50.428346","12.953506","1213","Sachsen"], ["1420","Frankfurt/Main","50.025911","8.521294","99.7","Hessen"], ["1443","Freiburg","48.023276","7.834441","236.3","Baden-Württemberg"], ["1451","Freiburg/Elbe","53.827696","9.249276","2","Niedersachsen"], ["1468","Freudenstadt","48.453728","8.409057","796.5","Baden-Württemberg"], ["1503","Friesoythe-Altenoythe","53.064299","7.902205","5.7","Niedersachsen"], ["1526","Fulda-Horas","50.566806","9.65325","242","Hessen"], ["1544","Gardelegen","52.512914","11.394131","47","Sachsen-Anhalt"], ["1550","Garmisch-Partenkirchen","47.48305","11.062293","719","Bayern"], ["1580","Geisenheim","49.985931","7.954853","110.2","Hessen"], ["1590","Geldern-Walbeck","51.494168","6.246343","37","Nordrhein-Westfalen"], ["1612","Gera-Leumnitz","50.881268","12.128858","311","Thüringen"], ["1619","Gernsheim","49.761079","8.487637","90","Hessen"], ["1639","Gießen/Wettenberg","50.601706","8.643902","202.7","Hessen"], ["1645","Gilserberg-Moischeid","50.965565","9.050014","340","Hessen"], ["1667","Glückstadt","53.79702","9.429456","1","Schleswig-Holstein"], ["1684","Görlitz","51.162151","14.950565","238","Sachsen"], ["1691","Göttingen","51.500331","9.950566","167","Niedersachsen"], ["1735","Grainet-Rehberg","48.789321","13.62911","628","Bayern"], ["1757","Greifswald","54.096736","13.405576","2","Mecklenburg-Vorpommern"], ["1803","GroßLüsewitz","54.071372","12.32378","34","Mecklenburg-Vorpommern"], ["1832","GroßerArber","49.113037","13.134204","1436","Bayern"], ["1920","Hagen-Fley","51.412887","7.489477","100","Nordrhein-Westfalen"], ["1975","Hamburg-Fuhlsbüttel","53.633187","9.988085","11","Hamburg"], ["1981","Hamburg-Neuwiedenthal","53.477658","9.895686","3","Hamburg"], ["1990","Hamburg-Wandsbek","53.585347","10.129348","18","Hamburg"], ["1993","Hameln","52.084784","9.389576","68","Niedersachsen"], ["2014","Hannover","52.464425","9.677917","55","Niedersachsen"], ["2074","Hechingen","48.375139","8.980076","522","Baden-Württemberg"], ["2080","Heidelberg","49.420618","8.667613","110","Baden-Württemberg"], ["2110","Heinsberg-Schleiden","51.041072","6.104239","57","Nordrhein-Westfalen"], ["2115","Helgoland","54.174957","7.891954","4","Schleswig-Holstein"], ["2120","Helmstedt","52.216409","11.02193","140","Niedersachsen"], ["2147","Herford","52.126408","8.68649","77","Nordrhein-Westfalen"], ["2167","Niederwörresbach","49.767176","7.335605","302","Rheinland-Pfalz"], ["2171","BadHersfeld","50.851917","9.737819","272.2","Hessen"], ["2173","Herten","51.588967","7.154827","60","Nordrhein-Westfalen"], ["2206","Hildesheim","52.140812","9.883806","117","Niedersachsen"], ["2211","Hilgenroth","50.737065","7.652755","295","Rheinland-Pfalz"], ["2250","Höllenstein(Kraftwerk)","49.127875","12.864512","403","Bayern"], ["2260","Hof(Stadt)","50.323032","11.907729","474","Bayern"], ["2261","Hof","50.312236","11.876052","565.1","Bayern"], ["2268","Hofheim","50.135244","10.515636","263","Bayern"], ["2290","Hohenpeißenberg","47.800864","11.010754","977","Bayern"], ["2303","Hohn","54.314596","9.538997","10","Schleswig-Holstein"], ["2306","Hohwacht","54.319391","10.673193","8","Schleswig-Holstein"],<|fim▁hole|>["2483","KahlerAsten","51.180254","8.489068","839","Nordrhein-Westfalen"], ["2497","Kall-Sistig","50.50141","6.526408","505","Nordrhein-Westfalen"], ["2522","Karlsruhe","49.038161","8.36406","111.6","Baden-Württemberg"], ["2532","Kassel","51.296303","9.442424","231","Hessen"], ["2542","Kaufbeuren","47.865204","10.600653","716","Bayern"], ["2543","Kaufering","48.09158","10.860913","585","Bayern"], ["2559","Kempten","47.723259","10.334797","705.2","Bayern"], ["2597","BadKissingen","50.224063","10.079213","281.8","Bayern"], ["2600","Kitzingen","49.736304","10.178117","188","Bayern"], ["2601","KleinerFeldberg/Taunus","50.221815","8.446877","825.6","Hessen"], ["2629","Kleve","51.761242","6.095381","46","Nordrhein-Westfalen"], ["2638","Klippeneck","48.105371","8.754878","973.4","Baden-Württemberg"], ["2657","Koblenz-Horchheim","50.336911","7.599165","85","Rheinland-Pfalz"], ["2667","Köln-Bonn","50.864559","7.157488","92","Nordrhein-Westfalen"], ["2680","BadKönigshofen","50284","10.4456","288","Bayern"], ["2700","Kösching","48.830189","11.487243","416","Bayern"], ["2712","Konstanz","47.677419","9.190052","442.5","Baden-Württemberg"], ["2750","Kronach","50.252336","11.32093","312","Bayern"], ["2812","Lahr","48.364697","7.828016","155","Baden-Württemberg"], ["2814","Merklingen","48.512126","9.764464","685","Baden-Württemberg"], ["2925","Leinefelde","51.393291","10.312345","356","Thüringen"], ["2928","Leipzig-Holzhausen","51.315067","12.446226","138","Sachsen"], ["2932","Leipzig/Halle","51.43479","12.239622","131","Sachsen"], ["2947","Lennestadt-Theten","51.133253","8.034831","286","Nordrhein-Westfalen"], ["2950","Lensahn","54.218979","10.884417","14","Schleswig-Holstein"], ["2968","Köln-Stammheim","50.989428","6.977688","43","Nordrhein-Westfalen"], ["3015","Lindenberg","52.208491","14.117973","98","Brandenburg"], ["3018","Lindenfels-Winterkasten","49.711685","8.780395","445","Hessen"], ["3023","Lingen","52.518093","7.308057","22","Niedersachsen"], ["3028","BadLippspringe","51.785459","8.838777","157","Nordrhein-Westfalen"], ["3031","Lippstadt-Bökenförde","51.633617","8.39445","92","Nordrhein-Westfalen"], ["3032","ListaufSylt","55.010989","8.412531","24.7","Schleswig-Holstein"], ["3093","Lüchow","52.972375","11.137388","17","Niedersachsen"], ["3126","Magdeburg","52.102889","11.582678","76","Sachsen-Anhalt"], ["3137","Mainz-Lerchenberg(ZDF)","49.965563","8.213852","195","Rheinland-Pfalz"], ["3155","Manderscheid-Sonnenhof","50.101542","6.800909","413","Rheinland-Pfalz"], ["3167","BadMarienberg","50.662025","7.960193","546.6","Rheinland-Pfalz"], ["3196","Marnitz","53.322288","11.931949","81","Mecklenburg-Vorpommern"], ["3231","Meiningen","50.561159","10.377105","450","Thüringen"], ["3244","Memmingen","47.982038","10.138397","615","Bayern"], ["3257","Mergentheim; Bad-Neunkirchen","49.477317","9.762223","250","Baden-Württemberg"], ["3271","Metten","48.854761","12.918851","313","Bayern"], ["3307","Mittenwald-Buckelwiesen","47.477882","11.265305","981","Bayern"], ["3366","Mühldorf","48.279069","12.502379","405.6","Bayern"], ["3376","Müncheberg","52.517588","14.123226","63","Brandenburg"], ["3379","München-Stadt","48.163142","11.542922","515.2","Bayern"], ["3402","Münsingen-Apfelstetten","48.385066","9.483693","750","Baden-Württemberg"], ["3490","Neuenahr; Bad-Ahrweiler","50.534561","7.085337","111","Rheinland-Pfalz"], ["3509","Menz","53.101974","13.042072","77","Brandenburg"], ["3527","Neukirchen-Hauptschwenda","50.89228","9.40498","500","Hessen"], ["3537","Neumünster","54.087289","9.979065","26","Schleswig-Holstein"], ["3552","Neuruppin","52.903704","12.807205","38","Brandenburg"], ["3578","Regensburg-Burgweinting","48.983186","12.144318","341","Bayern"], ["3612","Nienburg","52.671083","9.22291","25","Niedersachsen"], ["3621","Reimlingen","48.825254","10.506667","435","Bayern"], ["3631","Norderney","53.712302","7.151921","11","Niedersachsen"], ["3640","Nordhorn-Blanke","52.412368","7.063994","24","Niedersachsen"], ["3667","Nürnberg-Netzstall","49.425781","11.253831","368","Bayern"], ["3668","Nürnberg","49.503031","11.054923","314","Bayern"], ["3730","Oberstdorf","47.398578","10.275988","806","Bayern"], ["3739","Oberviechtach","49.452098","12.436557","596","Bayern"], ["3761","Öhringen","49.207046","9.517492","275.9","Baden-Württemberg"], ["3811","Oschatz","51.295965","13.092837","150","Sachsen"], ["3875","Parsberg/Oberpfalz-Eglwang","49.15102","11.689638","549","Bayern"], ["3927","Pfullendorf","47.934445","9.28694","630","Baden-Württemberg"], ["3939","Pirmasens","49.191191","7.5879","385","Rheinland-Pfalz"], ["3987","Potsdam","52.381287","13.062229","81","Brandenburg"], ["4063","Rahden-Kleinendorf","52.446122","8.590574","40.5","Nordrhein-Westfalen"], ["4104","Regensburg","49.042357","12.102053","365.4","Bayern"], ["4169","Rheinau-Memprechtshofen","48.67025","7.993875","131","Baden-Württemberg"], ["4175","Rheinfelden","47.558997","7.772105","282","Baden-Württemberg"], ["4261","Rosenheim","47.87535","12.127954","444","Bayern"], ["4271","Rostock-Warnemünde","54.180279","12.080806","4","Mecklenburg-Vorpommern"], ["4278","RothbeiNürnberg","49.251149","11.093364","340","Bayern"], ["4287","RothenburgobderTauber","49.384842","10.173229","415","Bayern"], ["4318","Ruhpolding","47.731033","12.660966","692","Bayern"], ["4323","Ruppertsecken","49.646838","7.883741","461","Rheinland-Pfalz"], ["4336","Saarbrücken-Ensheim","49.212803","7.107712","320","Saarland"], ["4350","BadSäckingen","47.561928","7.939928","339","Baden-Württemberg"], ["4371","BadSalzuflen","52.104211","8.75208","134.6","Nordrhein-Westfalen"], ["4377","Sandberg","50.351741","10.003394","510","Bayern"], ["4393","SanktPeter-Ording","54.327918","8.602987","5","Schleswig-Holstein"], ["4411","Schaafheim-Schlierbach","49.919516","8.967138","155","Hessen"], ["4442","Schieder","51.911935","9.15332","155","Nordrhein-Westfalen"], ["4466","Schleswig","54.527539","9.548666","42.7","Schleswig-Holstein"], ["4501","Schmücke","50.654562","10.769332","937","Thüringen"], ["4508","Schneifelforsthaus","50.296848","6.419387","649","Rheinland-Pfalz"], ["4549","Schönwald/Schwarzwald","48.100168","8.196889","1021","Baden-Württemberg"], ["4560","Schotten","50.492508","9.122558","265","Hessen"], ["4592","Schwandorf","49.327832","12.087041","356","Bayern"], ["4597","Schwangau-Horn","47.576937","10.71814","792","Bayern"], ["4625","Schwerin","53.642521","11.387203","59","Mecklenburg-Vorpommern"], ["4642","Seehausen","52.891136","11.729697","21","Sachsen-Anhalt"], ["4651","Seesen","51.903973","10.188523","186","Niedersachsen"], ["4692","Siegen(Kläranlage)","50.853381","7.996614","229","Nordrhein-Westfalen"], ["4703","Sigmaringen-Laiz","48.071874","9.194248","580","Baden-Württemberg"], ["4706","Simbach/Inn","48.271874","13.027305","360","Bayern"], ["4745","Soltau","52.9604","9.79306","75.6","Niedersachsen"], ["4887","Stötten","48.665709","9.864648","733.8","Baden-Württemberg"], ["4896","Wagersrott","54.665383","9.805022","40","Schleswig-Holstein"], ["4926","Stuttgart(Neckartal)","48.789592","9.216739","224","Baden-Württemberg"], ["4928","Stuttgart(Schnarrenberg)","48.828188","9.200041","314.3","Baden-Württemberg"], ["4931","Stuttgart-Echterdingen","48.688307","9.223535","371","Baden-Württemberg"], ["5014","Worpswede-Hüttenbusch","53.275827","8.985687","7","Niedersachsen"], ["5017","Teuschnitz","50.400219","11.388904","633","Bayern"], ["5029","Tholey","49.473764","7.038578","385.9","Saarland"], ["5064","Tönisvorst","51.289722","6.443651","37","Nordrhein-Westfalen"], ["5100","Trier-Petrisberg","49.747889","6.658227","265","Rheinland-Pfalz"], ["5111","Trostberg","48.03111","12.53957","559","Bayern"], ["5142","Ueckermünde","53.744431","14.069699","1.2","Mecklenburg-Vorpommern"], ["5155","Ulm","48.383656","9.952422","566.8","Baden-Württemberg"], ["5165","Unterlüß","52.849932","10.289833","95","Niedersachsen"], ["5185","Uttenreuth","49.593309","11.070363","291","Bayern"], ["5229","Villingen-Schwenningen","48.045281","8.460835","720","Baden-Württemberg"], ["5279","Wahlsburg-Lippoldsberg","51.61941","9.57491","176","Hessen"], ["5280","Wittenborn","53.922412","10.226738","35","Schleswig-Holstein"], ["5361","Wartenberg-Angersbach","50.627083","9.441989","270","Hessen"], ["5371","Wasserkuppe","50.497345","9.942797","921","Hessen"], ["5397","Weiden","49.666262","12.184464","439.6","Bayern"], ["5426","Weinbiet","49.375835","8.121278","553","Rheinland-Pfalz"], ["5433","Weiskirchen/Saar","49.553365","6.811951","380","Saarland"], ["5440","Weißenburg-Emetzheim","49.011554","10.93081","439.3","Bayern"], ["5467","Wendelstein","47.703531","12.011857","1832","Bayern"], ["5540","Wiesbaden(Süd)","50.068132","8.260327","147","Hessen"], ["5610","Winterberg","51.196815","8.526821","681","Nordrhein-Westfalen"], ["5629","Wittenberg","51.889183","12.644523","105","Sachsen-Anhalt"], ["5654","Wörnitz-Bottenweiler","49.217848","10.229589","464","Bayern"], ["5664","Wolfach","48.295289","8.239094","291","Baden-Württemberg"], ["5676","Wolfsburg(Südwest)","52.396186","10.689225","82","Niedersachsen"], ["5692","Worms","49.605078","8.365906","88","Rheinland-Pfalz"], ["5705","Würzburg","49.770283","9.957723","268","Bayern"], ["5717","Wuppertal-Buchenhofen","51.224808","7.105335","130","Nordrhein-Westfalen"], ["5731","Wutöschingen-Ofteringen","47.678257","8.380129","398","Baden-Württemberg"], ["5745","Zehdenick","52.966353","13.326781","51","Brandenburg"], ["5779","Zinnwald-Georgenfeld","50.731376","13.751594","877","Sachsen"], ["5792","Zugspitze","47.420868","10.984724","2964","Bayern"], ["5906","Mannheim","49.509028","8.554076","96.1","Baden-Württemberg"], ["6159","Dörpen","52.954182","7.319582","8","Niedersachsen"], ["14311","Hersdorf-Weißenseifen","50.150531","6.55262","530","Rheinland-Pfalz"] ]; weather = [ ["3",["1981-2010","3.8","3.1",".1","0","0","0","0","0","0","0",".4","2.6","10"]], ["44",["1981-2010","5.5","4.5",".5","0","0","0","0","0","0","0",".7","4.8","16"]], ["73",["1981-2010","11.1","5.4",".6","0","0","0","0","0","0","0","1.8","7.3","26.2"]], ["78",["1981-2010","4.8","3.8",".5","0","0","0","0","0","0","0",".6","4.1","13.8"]], ["91",["1981-2010","8.2","6.3",".8","0","0","0","0","0","0","0","1.4","6.2","22.9"]], ["142",["1981-2010","10.7","6.2","1","0","0","0","0","0","0","0","2.1","8.4","28.4"]], ["150",["1981-2010","6.3","3.9",".1","0","0","0","0","0","0","0","1.1","4.5","15.8"]], ["151",["1981-2010","10.3","5.9","1","0","0","0","0","0","0","0","1.6","8.1","26.9"]], ["164",["1981-2010","9","5.7","1.1","0","0","0","0","0","0","0","1.5","7.5","24.8"]], ["183",["1981-2010","6.1","5.6","1.4","0","0","0","0","0","0","0",".4","2.8","16.3"]], ["198",["1981-2010","8.1","6",".9","0","0","0","0","0","0","0","1.4","6.7","23.2"]], ["217",["1981-2010","10.4","7.5","2",".1","0","0","0","0","0","0","3.1","8.8","32"]], ["232",["1981-2010","10.7","6.9","1.4",".1","0","0","0","0","0","0","2.1","8.4","29.5"]], ["243",["1981-2010","4.4","3.1",".5","0","0","0","0","0","0","0",".5","3.4","12"]], ["257",["1981-2010","5.8","2.7",".2","0","0","0","0","0","0","0",".8","3.2","12.7"]], ["282",["1981-2010","8","4.3",".5","0","0","0","0","0","0","0","1.1","5.1","19"]], ["298",["1981-2010","7.7","5.8","1.3","0","0","0","0","0","0","0",".9","5.9","21.5"]], ["320",["1981-2010","9.4","5.3",".8","0","0","0","0","0","0","0","1.2","6.3","23.1"]], ["330",["1981-2010","9.3","6.6","1.1",".1","0","0","0","0","0","0","2","7.3","26.4"]], ["377",["1981-2010","6.2","3.7",".3","0","0","0","0","0","0","0","1.1","3.7","15"]], ["400",["1981-2010","8","4",".4","0","0","0","0","0","0","0","1.3","6.8","20.5"]], ["403",["1981-2010","8","4.6",".5","0","0","0","0","0","0","0","1.2","6.6","20.8"]], ["427",["1981-2010","8.3","5",".7","0","0","0","0","0","0","0","1.3","7","22.2"]], ["430",["1981-2010","7.4","4.7",".6","0","0","0","0","0","0","0","1.1","6.2","20"]], ["433",["1981-2010","7.8","4.8",".6","0","0","0","0","0","0","0","1.1","6.3","20.7"]], ["435",["1981-2010","6.9","4.5",".4","0","0","0","0","0","0","0","1","5.6","18.4"]], ["450",["1981-2010","4.4","2.1","0","0","0","0","0","0","0","0",".7","2.9","10.2"]], ["502",["1981-2010","10.2","7.4","1",".1","0","0","0","0","0","0","2","7.8","28.4"]], ["591",["1981-2010","8.9","5.4","1","0","0","0","0","0","0","0","1","7.1","23.4"]], ["596",["1981-2010","6.4","4.8","1.4","0","0","0","0","0","0","0",".5","4.4","17.5"]], ["614",["1981-2010","6.6","5.2",".6","0","0","0","0","0","0","0","1","4.6","18"]], ["648",["1981-2010","5.8","3.7",".5","0","0","0","0","0","0","0",".6","5.1","15.8"]], ["662",["1981-2010","7","5.3",".7","0","0","0","0","0","0","0","1.2","5.9","20.2"]], ["691",["1981-2010","5.6","3.7",".6","0","0","0","0","0","0","0",".5","4.5","14.9"]], ["701",["1981-2010","5.3","3.3",".5","0","0","0","0","0","0","0",".5","4.1","13.6"]], ["722",["1981-2010","18.3","17.7","13.9","5.8",".4","0","0","0","0","2.6","9.9","16.4","85"]], ["755",["1981-2010","8.6","5.7",".8","0","0","0","0","0","0","0","1.6","6.2","22.9"]], ["807",["1981-2010","8.2","4.5",".5","0","0","0","0","0","0","0","1.4","5.6","20.3"]], ["817",["1981-2010","7.6","4.6",".5","0","0","0","0","0","0","0","1.3","6.2","20.3"]], ["850",["1981-2010","6","4.7",".6","0","0","0","0","0","0","0",".8","5","17.2"]], ["853",["1981-2010","9.4","8.1","2.3",".2","0","0","0","0","0","0","2.3","7.9","30.2"]], ["863",["1981-2010","11.6","11","4",".4","0","0","0","0","0",".1","3.2","9.8","40.2"]], ["880",["1981-2010","7.4","5.1",".7","0","0","0","0","0","0","0","1.2","6.3","20.7"]], ["891",["1981-2010","5.4","3.5",".6","0","0","0","0","0","0","0",".5","3.7","13.7"]], ["963",["1981-2010","5.6","3.7",".5","0","0","0","0","0","0","0",".5","4.8","15.1"]], ["979",["1981-2010","6.6","3.5",".3","0","0","0","0","0","0","0",".9","5","16.4"]], ["982",["1981-2010","10.2","6.4",".7","0","0","0","0","0","0","0","2.2","7.2","26.8"]], ["1001",["1981-2010","8.3","5.4",".9","0","0","0","0","0","0","0","1.4","6.4","22.4"]], ["1048",["1981-2010","9","6.6","1.2","0","0","0","0","0","0","0","1.5","6.3","24.7"]], ["1078",["1981-2010","3.1","1.9",".1","0","0","0","0","0","0","0",".4","2","7.5"]], ["1103",["1981-2010","10.5","7.3","1.4","0","0","0","0","0","0","0","2.8","9.6","31.6"]], ["1107",["1981-2010","9.1","5.1",".6","0","0","0","0","0","0","0","1.7","6.6","23.2"]], ["1197",["1981-2010","9.1","6.3","1",".1","0","0","0","0","0","0","1.8","6.9","25.1"]], ["1224",["1981-2010","5.9","2.5",".2","0","0","0","0","0","0","0",".9","2.9","12.4"]], ["1266",["1981-2010","6.6","4",".6","0","0","0","0","0","0","0",".7","5.1","17.1"]], ["1270",["1981-2010","9.6","8","1.7",".1","0","0","0","0","0","0","2.1","8.1","29.6"]], ["1279",["1981-2010","7.7","3.9",".5","0","0","0","0","0","0","0","1","5","18.1"]], ["1292",["1981-2010","12","7.9","1",".1","0","0","0","0","0","0","2.6","9.7","33.3"]], ["1297",["1981-2010","7","4.9",".6","0","0","0","0","0","0","0","1","5.2","18.7"]], ["1303",["1981-2010","4.4","3.4",".2","0","0","0","0","0","0","0",".6","2.9","11.6"]], ["1327",["1981-2010","4.3","3.2",".1","0","0","0","0","0","0","0",".6","2.5","10.6"]], ["1332",["1981-2010","13","7.4","1.4","0","0","0","0","0","0","0","2.2","9.7","33.7"]], ["1346",["1981-2010","15","14.6","13","6.5","1","0","0","0","0","2.3","9.2","13","74.6"]], ["1357",["1981-2010","15.1","11.3","3.6",".4","0","0","0","0","0",".1","4.8","12.8","48"]], ["1358",["1981-2010","19.8","18.3","14.5","4.9",".2","0","0","0","0","3.1","11.1","18.3","90.1"]], ["1420",["1981-2010","5.9","2.8",".1","0","0","0","0","0","0","0",".8","3.8","13.3"]], ["1443",["1981-2010","5.3","2.4",".1","0","0","0","0","0","0","0",".9","3.1","11.9"]], ["1451",["1981-2010","6.2","3.9",".5","0","0","0","0","0","0","0",".7","5.3","16.5"]], ["1468",["1981-2010","10.5","8.9","2.9",".3","0","0","0","0","0","0","3.9","9.7","36.3"]], ["1503",["1981-2010","4.3","3.6",".5","0","0","0","0","0","0","0",".5","3.8","12.7"]], ["1526",["1981-2010","7.3","4.6",".5","0","0","0","0","0","0","0","1.3","5","18.7"]], ["1544",["1981-2010","7.7","4.4",".7","0","0","0","0","0","0","0",".9","6.7","20.4"]], ["1550",["1981-2010","9.4","5.4","1.4",".1","0","0","0","0","0","0","2.8","8.6","27.6"]], ["1580",["1981-2010","5.1","2.4",".1","0","0","0","0","0","0","0",".9","3.2","11.7"]], ["1590",["1981-2010","3.6","2.2",".1","0","0","0","0","0","0","0",".4","2.1","8.4"]], ["1612",["1981-2010","8.9","7.5","1.5",".1","0","0","0","0","0","0","1.8","7.2","27.1"]], ["1619",["1981-2010","5.2","2.2","0","0","0","0","0","0","0","0",".7","2.8","11"]], ["1639",["1981-2010","7.2","3.8",".2","0","0","0","0","0","0","0","1","5","17.2"]], ["1645",["1981-2010","9.5","7.3","1.2","0","0","0","0","0","0","0","1.9","7.4","27.3"]], ["1667",["1981-2010","5.9","3.8",".7","0","0","0","0","0","0","0",".6","4.7","15.6"]], ["1684",["1981-2010","9.9","7.5","1.5",".1","0","0","0","0","0","0","2.1","8.2","29.3"]], ["1691",["1981-2010","6.5","4.7",".6","0","0","0","0","0","0","0","1","4.9","17.6"]], ["1735",["1981-2010","12.4","8","2.2","0","0","0","0","0","0","0","3.3","10.3","36.3"]], ["1757",["1981-2010","7.4","5.3","1","0","0","0","0","0","0","0","1.1","5.7","20.4"]], ["1803",["1981-2010","7.6","5.6","1.1","0","0","0","0","0","0","0","1.3","6.2","21.8"]], ["1832",["1981-2010","19.5","18.1","14.5","6.4",".5","0","0","0",".1","3","11.6","17.1","90.8"]], ["1920",["1981-2010","3.5","2.7",".2","0","0","0","0","0","0","0",".5","2.1","8.9"]], ["1975",["1981-2010","6.1","3.7",".6","0","0","0","0","0","0","0",".7","5.2","16.4"]], ["1981",["1981-2010","5.9","3.7",".5","0","0","0","0","0","0","0",".6","4.9","15.5"]], ["1990",["1981-2010","5.6","3.4",".4","0","0","0","0","0","0","0",".6","4.9","15"]], ["1993",["1981-2010","5.2","4",".5","0","0","0","0","0","0","0",".7","3.5","13.8"]], ["2014",["1981-2010","6.3","4.7",".6","0","0","0","0","0","0","0",".9","5.3","17.8"]], ["2074",["1981-2010","7.4","5.8","1",".1","0","0","0","0","0","0","1.7","5.5","21.5"]], ["2080",["1981-2010","5.3","2.1",".1","0","0","0","0","0","0","0",".7","2.7","10.8"]], ["2110",["1981-2010","3.9","2.8","0","0","0","0","0","0","0","0",".5","2.2","9.4"]], ["2115",["1981-2010","3","2.8",".5","0","0","0","0","0","0","0",".1","1.1","7.5"]], ["2120",["1981-2010","6.8","5.7",".8","0","0","0","0","0","0","0","1.2","5.7","20.1"]], ["2147",["1981-2010","4.6","3.4",".4","0","0","0","0","0","0","0",".7","3","12.1"]], ["2167",["1981-2010","6.3","4.1",".3","0","0","0","0","0","0","0","1.1","4.6","16.4"]], ["2171",["1981-2010","7.8","5.2",".8","0","0","0","0","0","0","0","1.2","6.1","21.1"]], ["2173",["1981-2010","3.5","2.4",".1","0","0","0","0","0","0","0",".4","2","8.4"]], ["2206",["1981-2010","5.4","4.9",".8","0","0","0","0","0","0","0",".9","4.3","16.2"]], ["2211",["1981-2010","6.6","4.1",".3","0","0","0","0","0","0","0","1","4.4","16.4"]], ["2250",["1981-2010","12.8","8.1","1.2","0","0","0","0","0","0","0","2.7","10","34.8"]], ["2260",["1981-2010","11.2","8.4","2.1",".2","0","0","0","0","0","0","2.5","8.3","32.7"]], ["2261",["1981-2010","13.8","10","2.9",".3","0","0","0","0","0","0","3.8","12","42.8"]], ["2268",["1981-2010","8.3","5",".5","0","0","0","0","0","0","0","1.3","4.9","20.1"]], ["2290",["1981-2010","11.2","10.5","5",".7","0","0","0","0","0",".3","5.8","10.3","43.8"]], ["2303",["1981-2010","5.9","4.1",".7","0","0","0","0","0","0","0",".6","4.1","15.4"]], ["2306",["1981-2010","6.4","4.6",".8","0","0","0","0","0","0","0",".4","4.1","16.2"]], ["2319",["1981-2010","10","7.3","1.8",".1","0","0","0","0","0",".1","3.2","9","31.4"]], ["2324",["1981-2010","10","7.9","1.9",".1","0","0","0","0","0","0","2","7.8","29.6"]], ["2480",["1981-2010","5.5","2.7",".1","0","0","0","0","0","0","0",".8","3","12"]], ["2483",["1981-2010","15","13.4","7.4","1.2","0","0","0","0","0",".2","6.2","13.7","57"]], ["2497",["1981-2010","6.6","6.1","1.3",".1","0","0","0","0","0","0","1.6","5.6","21.3"]], ["2522",["1981-2010","5.4","2.5",".1","0","0","0","0","0","0","0",".7","2.4","11.1"]], ["2532",["1981-2010","7.9","5.7",".6","0","0","0","0","0","0","0","1.5","6","21.7"]], ["2542",["1981-2010","10","8.3","2.2",".1","0","0","0","0","0","0","3.1","8.6","32.4"]], ["2543",["1981-2010","10.2","8.3","1.3","0","0","0","0","0","0","0","2.9","9.1","31.9"]], ["2559",["1981-2010","8.5","7","1.9",".1","0","0","0","0","0","0","2.6","7.7","27.8"]], ["2597",["1981-2010","8.8","4.1",".5","0","0","0","0","0","0","0","1.2","5.8","20.4"]], ["2600",["1981-2010","7.6","3.9",".3","0","0","0","0","0","0","0","1.1","4.3","17.3"]], ["2601",["1981-2010","14.6","12.2","5.2",".6","0","0","0","0","0",".1","5.7","12.3","50.6"]], ["2629",["1981-2010","4.1","2.3",".1","0","0","0","0","0","0","0",".6","2.9","10.1"]], ["2638",["1981-2010","10.4","10.6","4.6",".5","0","0","0","0","0","0","5.6","10.4","42.2"]], ["2657",["1981-2010","3.8","1.9","0","0","0","0","0","0","0","0",".6","1.9","8.2"]], ["2667",["1981-2010","3.6","1.9",".1","0","0","0","0","0","0","0",".4","2","7.9"]], ["2680",["1981-2010","9.1","5",".6","0","0","0","0","0","0","0","1.3","6.1","22"]], ["2700",["1981-2010","12.4","7.3","1.1","0","0","0","0","0","0","0","2.1","9.1","32.1"]], ["2712",["1981-2010","7.8","4",".4","0","0","0","0","0","0","0","1.4","4.9","18.5"]], ["2750",["1981-2010","9.2","5.3","1","0","0","0","0","0","0","0","1.3","6.3","23.2"]], ["2812",["1981-2010","7","2.6","0","0","0","0","0","0","0","0",".6","3.8","14.1"]], ["2814",["1981-2010","11.5","9.3","2.6",".3","0","0","0","0","0",".1","4.3","10.1","38.1"]], ["2925",["1981-2010","9.6","8","1.9",".1","0","0","0","0","0","0","2.1","7.8","29.4"]], ["2928",["1981-2010","7.4","5.8","1","0","0","0","0","0","0","0","1.3","5.9","21.3"]], ["2932",["1981-2010","7.6","6","1","0","0","0","0","0","0","0","1.3","6.3","22.1"]], ["2947",["1981-2010","5.7","3.6",".4","0","0","0","0","0","0","0","1","4.2","14.9"]], ["2950",["1981-2010","6","4.3",".7","0","0","0","0","0","0","0",".7","4.1","15.7"]], ["2968",["1981-2010","2.5","1.6",".1","0","0","0","0","0","0","0",".2","1.2","5.6"]], ["3015",["1981-2010","9","5.8",".7","0","0","0","0","0","0","0","1.4","7.6","24.5"]], ["3018",["1981-2010","7.9","5.7",".6","0","0","0","0","0","0","0","1.8","6.2","22.2"]], ["3023",["1981-2010","4.3","3",".3","0","0","0","0","0","0","0",".5","3.7","11.8"]], ["3028",["1981-2010","5.5","4.6",".6","0","0","0","0","0","0","0",".8","3.7","15.2"]], ["3031",["1981-2010","4.2","3.4",".4","0","0","0","0","0","0","0",".6","2.8","11.4"]], ["3032",["1981-2010","5.4","4.1",".6","0","0","0","0","0","0","0",".3","2.9","13.3"]], ["3093",["1981-2010","6.8","4.7",".5","0","0","0","0","0","0","0","1.1","7","20"]], ["3126",["1981-2010","6.9","4.8",".5","0","0","0","0","0","0","0","1","6","19.2"]], ["3137",["1981-2010","5.7","2.3",".1","0","0","0","0","0","0","0",".7","3.5","12.2"]], ["3155",["1981-2010","7","4.9",".7","0","0","0","0","0","0","0","1.5","5","19.2"]], ["3167",["1981-2010","11.5","8.5","2.3",".1","0","0","0","0","0","0","2.7","9.2","34.2"]], ["3196",["1981-2010","8.7","5.6",".8","0","0","0","0","0","0","0","1.2","7.7","24.1"]], ["3231",["1981-2010","12.3","8.4","1.7",".1","0","0","0","0","0","0","2.8","10","35.3"]], ["3244",["1981-2010","10.3","7.6","1.4","0","0","0","0","0","0","0","2.6","8.6","30.5"]], ["3257",["1981-2010","7.3","4.5",".4","0","0","0","0","0","0","0","1.3","4.8","18.3"]], ["3271",["1981-2010","10.9","5.8",".7","0","0","0","0","0","0","0","1.7","7.6","26.6"]], ["3307",["1981-2010","7.4","6.4","2.1",".2","0","0","0","0","0","0","2.8","7.2","26.1"]], ["3366",["1981-2010","11.8","6.5","1","0","0","0","0","0","0","0","1.9","8.9","30.1"]], ["3376",["1981-2010","9.4","5.7","1.1","0","0","0","0","0","0","0","1.8","7.6","25.6"]], ["3379",["1981-2010","8.3","5.7","1","0","0","0","0","0","0","0","1.5","6.2","22.8"]], ["3402",["1981-2010","11","8.9","2.1",".3","0","0","0","0","0","0","3.9","10","36.1"]], ["3490",["1981-2010","3.7","2.4","0","0","0","0","0","0","0","0",".5","1.9","8.6"]], ["3509",["1981-2010","8.9","5.3","1","0","0","0","0","0","0","0","1.4","8.2","24.8"]], ["3527",["1981-2010","11.5","8.3","1.9",".1","0","0","0","0","0","0","2.7","9.1","33.5"]], ["3537",["1981-2010","6.3","3.8",".6","0","0","0","0","0","0","0",".5","4.7","15.9"]], ["3552",["1981-2010","8.1","4.9",".6","0","0","0","0","0","0","0","1.1","6.3","20.9"]], ["3578",["1981-2010","10.8","6",".6","0","0","0","0","0","0","0","1.7","7.2","26.3"]], ["3612",["1981-2010","5.5","3.8",".4","0","0","0","0","0","0","0",".6","4.3","14.7"]], ["3621",["1981-2010","10.3","6.6","1",".1","0","0","0","0","0","0","1.8","7.6","27.3"]], ["3631",["1981-2010","4.3","3.1",".4","0","0","0","0","0","0","0",".5","2.7","11"]], ["3640",["1981-2010","3.8","2.9",".3","0","0","0","0","0","0","0",".4","2.9","10.4"]], ["3667",["1981-2010","8.7","5.1",".7","0","0","0","0","0","0","0","1.4","6","21.9"]], ["3668",["1981-2010","8.2","5",".7","0","0","0","0","0","0","0","1.3","5.9","21.1"]], ["3730",["1981-2010","8.6","5.9","1.8",".1","0","0","0","0","0","0","2.9","7.4","26.8"]], ["3739",["1981-2010","14.2","9.8","2.1",".1","0","0","0","0","0","0","3.7","11.3","41.1"]], ["3761",["1981-2010","6.6","3.9",".3","0","0","0","0","0","0","0","1.3","4.4","16.5"]], ["3811",["1981-2010","7.2","5.7","1","0","0","0","0","0","0","0","1.3","5.9","21"]], ["3875",["1981-2010","13.4","7.9","1.7","0","0","0","0","0","0","0","3","11","36.9"]], ["3927",["1981-2010","12","8.4","1.6",".1","0","0","0","0","0","0","3.2","10.2","35.6"]], ["3939",["1981-2010","7.1","4.1",".4","0","0","0","0","0","0","0","1.3","4","17"]], ["3987",["1981-2010","8.2","4.8",".5","0","0","0","0","0","0","0","1.4","7.1","22"]], ["4063",["1981-2010","5.4","4.3",".6","0","0","0","0","0","0","0",".7","4.6","15.7"]], ["4104",["1981-2010","11.9","6.3",".7","0","0","0","0","0","0","0","1.6","8.4","28.9"]], ["4169",["1981-2010","6.3","2.8",".1","0","0","0","0","0","0","0",".8","3.6","13.6"]], ["4175",["1981-2010","4.7","2",".1","0","0","0","0","0","0","0",".5","2.1","9.5"]], ["4261",["1981-2010","8.5","5.4",".7","0","0","0","0","0","0","0","1.9","7.4","23.8"]], ["4271",["1981-2010","6.1","4.1",".8","0","0","0","0","0","0","0",".6","4.3","15.9"]], ["4278",["1981-2010","8.4","4.6",".5","0","0","0","0","0","0","0","1.4","5.9","20.7"]], ["4287",["1981-2010","9.5","6.3",".9",".1","0","0","0","0","0","0","2","6.8","25.6"]], ["4318",["1981-2010","10.1","8","2.7",".2","0","0","0","0","0","0","3.4","9","33.3"]], ["4323",["1981-2010","9.3","6.7","1.1",".1","0","0","0","0","0","0","2.1","7.8","27.2"]], ["4336",["1981-2010","6.8","4.2",".3","0","0","0","0","0","0","0","1.2","4.9","17.4"]], ["4350",["1981-2010","6.7","3.2",".3","0","0","0","0","0","0","0","1.2","3.9","15.3"]], ["4371",["1981-2010","5.5","3.8",".5","0","0","0","0","0","0","0",".8","4.2","14.8"]], ["4377",["1981-2010","11.7","8.2","1.8",".1","0","0","0","0","0","0","2.4","9.9","34"]], ["4393",["1981-2010","5.2","3.6",".6","0","0","0","0","0","0","0",".4","3.7","13.6"]], ["4411",["1981-2010","6.1","3.6",".2","0","0","0","0","0","0","0","1","3.6","14.4"]], ["4442",["1981-2010","5.6","4.3",".5","0","0","0","0","0","0","0",".7","3.8","14.9"]], ["4466",["1981-2010","7","4.5",".8","0","0","0","0","0","0","0",".5","5","17.8"]], ["4501",["1981-2010","17.8","15.3","9","2","0","0","0","0","0",".6","7.9","17","69.5"]], ["4508",["1981-2010","9.9","7.4","2.5",".1","0","0","0","0","0","0","3","8.9","31.8"]], ["4549",["1981-2010","10.2","10.3","5",".8","0","0","0","0","0",".2","4.3","9.6","40.5"]], ["4560",["1981-2010","7.7","5.1",".5","0","0","0","0","0","0","0","1.3","5.4","20"]], ["4592",["1981-2010","11.6","5.5",".6","0","0","0","0","0","0","0","1.8","8.5","28"]], ["4597",["1981-2010","8.3","7.3","1.7",".2","0","0","0","0","0","0","3","7","27.5"]], ["4625",["1981-2010","7.5","4.8",".6","0","0","0","0","0","0","0",".8","6.2","19.9"]], ["4642",["1981-2010","7.6","4.9",".7","0","0","0","0","0","0","0","1.1","6.5","20.8"]], ["4651",["1981-2010","6.9","5.9",".9","0","0","0","0","0","0","0","1.3","5.5","20.4"]], ["4692",["1981-2010","5.6","3.1",".2","0","0","0","0","0","0","0",".7","4.1","13.6"]], ["4703",["1981-2010","10.3","7","1.2",".1","0","0","0","0","0","0","2.4","8.6","29.7"]], ["4706",["1981-2010","12","5.9","1.1","0","0","0","0","0","0","0","1.7","8.2","28.8"]], ["4745",["1981-2010","6.6","4.3",".3","0","0","0","0","0","0","0","1","6.8","19.2"]], ["4887",["1981-2010","12.6","10.5","3.4",".3","0","0","0","0","0",".1","5.2","11.7","43.9"]], ["4896",["1981-2010","6.8","5.5","1.2","0","0","0","0","0","0","0",".4","4.2","18"]], ["4926",["1981-2010","4.6","2.8",".2","0","0","0","0","0","0","0",".7","2.3","10.5"]], ["4928",["1981-2010","6.2","4.1",".3","0","0","0","0","0","0","0","1.4","4.2","16.2"]], ["4931",["1981-2010","6.9","4.7",".8",".1","0","0","0","0","0","0","1.3","4.9","18.6"]], ["5014",["1981-2010","5.9","3.9",".6","0","0","0","0","0","0","0",".8","5.2","16.4"]], ["5017",["1981-2010","13.9","10.1","2.9",".2","0","0","0","0","0","0","4.1","11.9","43.1"]], ["5029",["1981-2010","7.4","5.5",".4","0","0","0","0","0","0","0","1.8","6.8","21.8"]], ["5064",["1981-2010","3.7","1.8","0","0","0","0","0","0","0","0",".4","2.1","8"]], ["5100",["1981-2010","5.8","3.4",".2","0","0","0","0","0","0","0","1.1","4","14.5"]], ["5111",["1981-2010","9.9","6.3","1.2",".1","0","0","0","0","0","0","2","7.9","27.5"]], ["5142",["1981-2010","8.5","5.3","1.3","0","0","0","0","0","0","0","1.4","6","22.6"]], ["5155",["1981-2010","11.9","6.9","1","0","0","0","0","0","0","0","2.6","9.2","31.6"]], ["5165",["1981-2010","6.8","4.9",".7","0","0","0","0","0","0","0","1.1","5.6","19.1"]], ["5185",["1981-2010","7.3","4",".4","0","0","0","0","0","0","0","1.1","4.9","17.7"]], ["5229",["1981-2010","9.3","6.6","1.3",".1","0","0","0","0","0","0","2.8","7.9","28.1"]], ["5279",["1981-2010","7.5","5.5",".7","0","0","0","0","0","0","0","1.3","5.6","20.6"]], ["5280",["1981-2010","6.6","4.1",".7","0","0","0","0","0","0","0",".8","5.3","17.5"]], ["5361",["1981-2010","6.5","4.6",".6","0","0","0","0","0","0","0","1.3","4.8","17.8"]], ["5371",["1981-2010","15.9","13.8","7.6","1.3","0","0","0","0","0",".3","6.8","14.3","60"]], ["5397",["1981-2010","12.3","7.7","1.4",".1","0","0","0","0","0","0","2.1","9.4","32.9"]], ["5426",["1981-2010","10.5","7.7","1.7",".2","0","0","0","0","0","0","2.7","9","31.8"]], ["5433",["1981-2010","6.5","4.1",".3","0","0","0","0","0","0","0","1.1","4.2","16.3"]], ["5440",["1981-2010","9.2","6.1",".8",".1","0","0","0","0","0","0","1.6","6.7","24.4"]], ["5467",["1981-2010","15.7","16.1","14.1","8.4","1.6",".2","0","0",".4","2.9","10.7","14.3","84.4"]], ["5540",["1981-2010","5.2","2.3",".1","0","0","0","0","0","0","0",".7","2.7","10.9"]], ["5610",["1981-2010","11.8","11.1","4.7",".5","0","0","0","0","0","0","4.1","10","42.3"]], ["5629",["1981-2010","7.9","5.1",".5","0","0","0","0","0","0","0","1.4","6.9","21.8"]], ["5654",["1981-2010","10.3","7.8","1.6","0","0","0","0","0","0","0","2.5","7.9","30.1"]], ["5664",["1981-2010","4.9","2.2",".1","0","0","0","0","0","0","0","1","2.7","10.9"]], ["5676",["1981-2010","6.6","4.5",".5","0","0","0","0","0","0","0","1.1","5.2","17.9"]], ["5692",["1981-2010","5.4","2.6","0","0","0","0","0","0","0","0",".9","3.2","12.1"]], ["5705",["1981-2010","8.5","4.5",".5","0","0","0","0","0","0","0","1.3","5.7","20.4"]], ["5717",["1981-2010","3.2","2.2",".1","0","0","0","0","0","0","0",".3","1.9","7.8"]], ["5731",["1981-2010","7.6","3.8",".3","0","0","0","0","0","0","0","1.6","4.8","18.1"]], ["5745",["1981-2010","8.9","5.3",".6","0","0","0","0","0","0","0","1.5","6.8","23.1"]], ["5779",["1981-2010","18.9","17.1","10.2","1.6","0","0","0","0","0","1","9.2","18.5","76.5"]], ["5792",["1981-2010","29.6","26.3","29.1","24.1","12.6","6.8","2.9","2.2","7.9","12.9","23.6","29","207"]], ["5906",["1981-2010","5.5","2.1",".1","0","0","0","0","0","0","0",".7","3","11.3"]], ["6159",["1981-2010","4.6","3.3",".5","0","0","0","0","0","0","0",".5","4.1","13"]], ["14311",["1981-2010","8.8","6.2","1",".1","0","0","0","0","0","0","2.1","7.2","25.3"]] ]; temperature = [ [1881,7.6,7.5,7.7,6.6,7.5,7.0,7.5,7.5,8.1,8.0,7.1,8.3,6.7,7.5,7.1,6.7,7.3], [1882,9.0,9.0,8.1,7.3,8.2,8.5,8.9,8.9,9.0,8.6,8.8,8.8,8.1,8.8,8.4,7.8,8.3], [1883,8.4,8.4,7.8,6.8,8.0,7.9,8.4,8.4,8.7,8.3,8.2,8.5,7.5,8.3,7.9,7.3,7.9], [1884,9.1,9.1,8.4,7.5,8.6,8.7,9.1,9.1,9.4,8.9,8.9,9.2,8.2,8.9,8.5,7.9,8.6], [1885,8.4,8.4,7.8,7.0,7.7,7.7,7.9,7.9,8.3,8.0,7.6,8.3,7.7,8.1,7.7,7.2,7.7], [1886,8.5,8.5,8.1,7.3,8.1,7.9,8.2,8.2,8.7,8.4,7.8,8.7,7.8,8.4,7.9,7.4,8.0], [1887,7.8,7.7,6.7,5.9,6.8,7.4,7.4,7.4,7.6,7.1,7.3,7.3,6.7,7.5,6.9,6.2,7.0], [1888,7.4,7.4,7.0,6.2,6.8,6.7,7.1,7.1,7.4,7.1,6.7,7.3,6.7,7.3,6.8,6.2,6.9], [1889,8.1,8.1,7.1,6.4,7.3,7.6,8.0,8.0,8.1,7.5,7.7,7.6,7.1,8.0,7.4,6.7,7.4], [1890,8.1,8.1,7.0,6.5,7.3,7.6,7.7,7.7,7.8,7.4,7.6,7.6,7.2,7.8,7.3,6.7,7.3], [1891,8.2,8.2,7.2,6.5,7.4,7.6,7.8,7.8,8.1,7.5,7.6,7.7,7.4,8.0,7.5,6.8,7.4], [1892,8.0,7.9,7.8,6.9,7.5,7.2,7.6,7.6,8.2,7.8,7.1,8.1,7.4,7.9,7.5,6.9,7.5], [1893,8.3,8.3,8.1,7.0,8.0,7.6,8.3,8.3,8.8,8.4,7.9,8.7,7.6,8.3,7.9,7.3,7.9], [1894,8.7,8.7,8.0,7.2,8.1,8.2,8.6,8.6,8.9,8.3,8.4,8.5,7.9,8.6,8.1,7.5,8.1], [1895,8.0,8.0,7.3,6.4,7.2,7.4,7.6,7.6,7.9,7.5,7.3,7.8,7.3,7.8,7.3,6.7,7.3], [1896,8.3,8.3,7.4,6.4,7.6,7.9,8.2,8.2,8.3,7.8,8.1,8.0,7.4,8.1,7.5,6.8,7.6], [1897,8.3,8.3,8.2,7.1,7.9,7.9,8.2,8.2,8.6,8.3,8.0,8.6,7.8,8.3,7.9,7.3,7.9], [1898,9.1,9.0,8.6,7.6,8.5,8.4,8.9,8.9,9.1,8.7,8.5,8.9,8.6,9.0,8.5,7.9,8.5], [1899,8.6,8.6,8.3,7.0,8.1,8.1,8.5,8.6,8.8,8.4,8.4,8.7,7.9,8.6,8.0,7.4,8.1], [1900,8.8,8.8,8.5,7.5,8.4,8.2,8.7,8.8,8.9,8.7,8.3,9.0,8.3,8.8,8.3,7.7,8.4], [1901,8.3,8.3,7.4,6.8,7.5,7.8,8.1,8.1,8.3,7.9,7.9,8.1,7.5,8.1,7.5,6.7,7.6], [1902,7.3,7.2,7.5,6.8,7.3,6.6,7.3,7.3,7.7,7.7,7.0,7.9,6.9,7.3,6.9,6.4,7.2], [1903,9.0,9.0,8.1,7.6,8.3,8.2,8.8,8.8,8.9,8.5,8.4,8.6,8.4,9.0,8.5,7.8,8.4], [1904,8.8,8.8,8.4,7.8,8.3,8.0,8.6,8.6,8.8,8.6,8.2,8.7,8.3,8.8,8.4,7.8,8.4], [1905,8.6,8.6,7.8,7.2,7.9,7.9,8.3,8.3,8.5,8.2,8.1,8.3,7.8,8.4,7.9,7.3,8.0], [1906,9.1,9.1,8.0,7.4,8.2,8.4,8.8,8.8,8.8,8.4,8.5,8.5,8.2,9.0,8.4,7.7,8.3], [1907,8.2,8.2,7.9,7.2,7.8,7.5,8.1,8.1,8.4,8.1,7.6,8.3,7.7,8.1,7.7,7.2,7.8], [1908,8.0,8.0,7.3,6.6,7.3,7.6,8.0,8.0,8.1,7.6,7.8,7.8,7.4,7.9,7.4,6.8,7.5], [1909,7.9,7.8,7.2,6.6,7.4,7.2,7.7,7.7,7.9,7.7,7.3,7.9,7.3,7.8,7.3,6.8,7.4], [1910,9.0,9.0,8.0,7.5,8.4,8.6,9.0,9.0,9.1,8.5,8.8,8.7,8.4,8.9,8.4,7.8,8.4], [1911,9.7,9.6,8.8,8.2,9.1,9.0,9.5,9.5,9.7,9.3,9.1,9.5,9.0,9.7,9.1,8.5,9.0], [1912,8.2,8.2,7.7,7.0,7.9,7.7,8.3,8.3,8.8,8.3,8.0,8.5,7.7,8.2,7.8,7.3,7.9], [1913,9.2,9.2,8.2,7.6,8.4,8.8,9.0,9.0,9.2,8.7,8.8,9.0,8.4,9.1,8.5,7.9,8.5], [1914,9.3,9.3,7.9,7.3,8.4,9.2,9.2,9.2,9.2,8.6,9.2,8.8,8.4,9.1,8.5,7.8,8.5], [1915,8.3,8.3,7.9,7.4,8.1,7.6,8.1,8.1,8.5,8.5,7.6,8.7,7.7,8.2,7.8,7.3,7.9], [1916,9.0,9.0,8.3,8.0,8.4,8.3,8.6,8.6,8.9,8.8,8.2,8.9,8.4,8.8,8.4,7.8,8.4], [1917,8.2,8.2,7.3,6.9,7.5,7.6,7.9,7.9,8.0,7.8,7.7,7.8,7.4,8.0,7.5,6.8,7.5], [1918,9.2,9.1,8.2,7.8,8.5,8.5,8.9,8.9,9.1,8.9,8.4,9.0,8.5,9.0,8.5,8.0,8.5], [1919,7.7,7.7,7.3,6.8,7.3,7.3,7.5,7.5,7.9,7.8,7.2,7.9,7.1,7.7,7.2,6.6,7.3], [1920,9.0,9.0,8.5,8.1,8.5,8.5,9.0,9.0,9.2,8.8,8.6,8.9,8.5,9.0,8.6,8.0,8.6], [1921,9.4,9.4,8.8,8.3,9.0,9.0,9.3,9.3,9.6,9.4,9.1,9.5,8.8,9.4,8.9,8.2,9.0], [1922,7.4,7.4,7.3,6.7,7.1,7.1,7.4,7.4,7.8,7.7,7.1,7.8,6.9,7.4,7.0,6.4,7.2], [1923,8.2,8.2,8.2,7.6,8.0,7.6,8.0,8.0,8.5,8.6,7.6,8.6,7.9,8.2,7.9,7.4,8.0], [1924,8.0,8.0,7.4,6.7,7.4,7.5,8.0,8.0,8.2,7.8,7.6,7.9,7.5,8.0,7.6,7.0,7.5], [1925,9.0,9.0,7.9,7.4,8.2,8.6,8.8,8.8,8.8,8.5,8.5,8.5,8.3,8.9,8.4,7.8,8.3], [1926,9.3,9.2,8.6,8.0,8.7,8.7,9.1,9.1,9.3,9.0,8.7,9.1,8.6,9.2,8.7,8.2,8.7], [1927,8.4,8.3,8.0,7.5,8.0,7.9,8.3,8.3,8.6,8.4,8.0,8.5,7.8,8.4,8.0,7.4,8.0], [1928,8.6,8.6,8.6,7.9,8.3,8.0,8.5,8.5,8.9,8.8,8.1,9.0,8.0,8.6,8.2,7.6,8.3], [1929,7.6,7.6,7.5,6.7,7.6,7.0,7.5,7.5,8.2,8.0,7.1,8.1,7.1,7.6,7.3,6.8,7.4], [1930,9.2,9.2,8.7,8.1,8.8,8.6,9.0,9.0,9.4,9.1,8.7,9.3,8.6,9.2,8.7,8.2,8.8], [1931,8.1,8.1,7.2,6.6,7.6,7.6,8.1,8.1,8.3,7.9,7.7,8.0,7.4,8.2,7.7,7.0,7.6], [1932,9.0,8.9,7.9,7.4,8.2,8.6,8.9,8.9,9.0,8.5,8.7,8.7,8.1,8.9,8.4,7.7,8.3], [1933,7.9,7.9,7.4,6.6,7.7,7.8,8.2,8.2,8.4,8.1,8.3,8.4,7.1,8.0,7.5,6.9,7.6], [1934,10.4,10.4,9.0,8.7,9.5,9.7,10.0,10.0,10.1,9.7,9.7,9.8,9.6,10.3,9.8,9.1,9.5], [1935,8.9,8.9,8.0,7.5,8.4,8.5,8.9,8.9,9.1,8.8,8.6,8.9,8.2,9.0,8.5,7.8,8.4], [1936,8.9,8.9,8.2,7.7,8.4,8.3,8.8,8.8,9.0,8.7,8.4,8.8,8.3,8.9,8.4,7.8,8.4], [1937,8.9,8.9,8.6,7.9,8.7,8.4,8.9,8.8,9.2,9.0,8.4,9.3,8.4,9.0,8.6,8.1,8.6], [1938,9.3,9.3,8.1,7.6,8.5,9.0,9.2,9.2,9.2,8.7,9.1,9.0,8.5,9.2,8.7,7.9,8.6], [1939,8.9,8.9,7.9,7.3,8.2,8.5,8.9,8.9,9.1,8.6,8.6,8.8,8.1,8.9,8.3,7.7,8.3], [1940,6.7,6.7,6.8,6.0,6.7,6.2,6.9,6.9,7.5,7.3,6.5,7.6,6.2,6.7,6.4,5.9,6.6], [1941,7.3,7.3,7.1,6.4,7.3,6.9,7.6,7.6,8.1,7.7,7.1,8.0,6.7,7.4,7.0,6.5,7.2], [1942,7.6,7.6,7.4,6.7,7.3,6.9,7.5,7.5,8.0,7.8,7.0,8.1,7.0,7.6,7.2,6.7,7.3], [1943,9.3,9.3,8.8,8.1,8.8,8.9,9.2,9.2,9.5,9.1,9.0,9.4,8.8,9.3,8.9,8.4,8.9], [1944,9.0,9.0,7.8,7.3,8.3,8.6,8.9,8.9,8.9,8.4,8.6,8.6,8.1,9.0,8.4,7.7,8.3], [1945,9.5,9.5,8.7,8.1,9.0,9.3,9.4,9.4,9.6,9.3,9.1,9.5,8.9,9.2,8.9,8.5,9.0], [1946,8.9,8.8,8.2,7.8,8.3,8.8,8.6,8.6,8.9,8.5,8.2,8.7,8.2,8.8,8.3,7.8,8.4], [1947,8.4,8.4,8.9,8.2,8.8,7.7,8.6,8.5,9.3,9.3,8.0,9.5,8.2,8.6,8.3,7.9,8.5], [1948,9.6,9.6,8.7,8.3,8.9,9.0,9.5,9.5,9.7,9.2,9.1,9.5,9.1,9.7,9.2,8.5,9.0], [1949,9.6,9.6,8.9,8.4,9.1,9.2,9.5,9.5,9.8,9.5,9.3,9.9,8.9,9.7,9.2,8.6,9.1], [1950,9.0,9.0,8.5,8.1,8.5,8.5,8.9,8.9,9.2,8.8,8.6,9.1,8.5,9.0,8.5,8.0,8.6], [1951,9.2,9.2,8.5,8.1,8.7,8.6,9.0,9.0,9.3,8.9,8.7,9.2,8.9,9.2,8.8,8.3,8.7], [1952,8.2,8.2,8.1,7.4,8.0,7.6,8.1,8.1,8.5,8.4,7.7,8.7,7.8,8.2,7.9,7.4,7.9], [1953,9.6,9.6,8.4,8.0,8.9,9.1,9.4,9.4,9.5,9.1,9.2,9.5,9.1,9.6,9.2,8.6,8.9], [1954,8.0,8.0,7.5,6.9,7.8,7.5,8.1,8.1,8.5,8.1,7.9,8.3,7.5,8.1,7.7,7.2,7.7], [1955,7.9,7.9,7.5,6.8,7.4,7.6,8.0,8.0,8.3,7.9,7.8,8.3,7.2,7.8,7.4,6.8,7.5], [1956,7.2,7.2,6.7,6.0,6.9,6.9,7.3,7.3,7.6,7.2,7.2,7.4,6.4,7.1,6.7,6.2,6.8], [1957,9.0,9.0,8.3,7.8,8.6,8.5,9.0,9.0,9.4,8.9,8.7,9.3,8.5,9.0,8.6,8.1,8.6], [1958,8.5,8.5,8.1,7.5,8.3,8.0,8.6,8.6,8.9,8.6,8.1,8.9,8.2,8.6,8.2,7.7,8.2], [1959,9.3,9.3,8.8,8.1,9.2,8.9,9.4,9.4,9.9,9.6,9.1,10.1,8.8,9.4,9.0,8.6,9.0], [1960,8.7,8.7,8.3,7.7,8.5,8.2,8.8,8.8,9.2,8.8,8.3,9.1,8.2,8.8,8.4,7.8,8.4], [1961,9.3,9.3,9.0,8.2,8.9,8.8,9.2,9.2,9.6,9.3,8.9,9.7,8.9,9.3,8.9,8.4,8.9], [1962,7.7,7.7,7.1,6.4,7.0,7.2,7.4,7.4,7.6,7.5,7.4,7.9,7.1,7.5,7.1,6.5,7.1], [1963,7.7,7.7,7.0,6.4,7.0,7.1,7.3,7.3,7.7,7.3,7.1,7.7,7.2,7.5,7.1,6.5,7.1], [1964,8.4,8.4,8.2,7.4,8.2,8.0,8.4,8.4,8.9,8.6,8.0,9.0,8.0,8.4,8.0,7.6,8.1], [1965,7.9,7.8,7.3,6.7,7.5,7.3,7.9,7.9,8.2,7.8,7.5,8.1,7.4,7.9,7.5,6.9,7.5], [1966,8.8,8.8,8.5,7.9,8.6,8.2,8.7,8.7,9.2,8.9,8.2,9.2,8.5,8.8,8.5,8.0,8.5], [1967,9.6,9.6,8.5,8.0,8.9,9.1,9.4,9.4,9.5,9.0,9.1,9.2,8.9,9.5,9.0,8.4,8.9], [1968,8.7,8.7,7.9,7.3,8.1,8.3,8.6,8.6,8.8,8.3,8.4,8.6,8.0,8.6,8.1,7.5,8.1], [1969,7.8,7.8,7.6,7.1,7.9,7.3,8.2,8.2,8.7,8.2,7.8,8.5,7.5,8.0,7.6,7.1,7.8], [1970,7.9,7.9,7.8,7.1,7.8,7.3,8.1,8.1,8.6,8.2,7.6,8.5,7.4,8.0,7.6,7.1,7.7], [1971,9.0,9.0,8.1,7.5,8.4,8.6,8.9,8.9,9.2,8.7,8.6,8.9,8.3,8.9,8.4,7.8,8.4], [1972,8.3,8.3,7.5,7.0,7.7,7.9,8.2,8.2,8.5,8.0,8.0,8.3,7.8,8.2,7.8,7.2,7.8], [1973,8.7,8.7,7.8,7.2,8.3,8.4,8.8,8.8,9.0,8.6,8.6,8.9,8.0,8.7,8.2,7.5,8.2], [1974,9.3,9.3,8.6,8.0,8.8,8.8,9.3,9.3,9.4,9.1,8.9,9.4,8.7,9.4,8.8,8.2,8.8], [1975,9.6,9.6,8.5,8.0,8.9,9.0,9.5,9.5,9.6,9.1,9.2,9.4,8.9,9.5,9.0,8.3,8.9], [1976,8.6,8.6,8.4,7.7,8.7,8.0,8.8,8.8,9.4,9.2,8.4,9.6,8.1,8.8,8.4,7.9,8.5], [1977,9.1,9.1,8.6,8.0,8.7,8.5,9.1,9.1,9.3,8.9,8.6,9.1,8.5,9.1,8.6,8.1,8.7], [1978,8.4,8.4,7.5,6.9,7.7,7.8,8.3,8.3,8.5,8.0,8.0,8.1,7.6,8.3,7.8,7.2,7.8], [1979,8.0,8.0,8.0,7.4,7.6,7.3,7.8,7.8,8.2,8.1,7.2,8.4,7.7,8.0,7.6,7.1,7.7], [1980,7.7,7.7,7.5,6.9,7.7,7.3,8.2,8.2,8.6,8.1,7.7,8.3,7.2,7.9,7.5,7.0,7.6], [1981,8.7,8.6,8.1,7.5,8.1,8.1,8.5,8.5,8.9,8.5,8.0,8.9,8.0,8.6,8.1,7.5,8.2], [1982,9.5,9.4,8.6,8.1,8.8,8.7,9.3,9.3,9.7,9.1,8.7,9.5,9.0,9.6,9.1,8.5,8.9], [1983,9.7,9.7,8.8,8.2,8.9,9.1,9.5,9.5,9.7,9.2,9.1,9.5,9.0,9.6,9.1,8.4,9.0], [1984,8.4,8.4,7.7,7.1,7.9,8.0,8.5,8.5,8.9,8.3,8.2,8.7,7.7,8.4,7.9,7.3,8.0], [1985,7.9,7.9,7.4,6.7,7.3,7.2,7.8,7.8,8.0,7.7,7.4,8.1,7.3,7.9,7.4,6.8,7.4], [1986,8.3,8.2,7.9,7.3,7.9,7.7,8.2,8.2,8.6,8.3,7.8,8.6,7.8,8.2,7.8,7.3,7.9], [1987,7.5,7.5,7.7,7.0,7.4,7.1,7.7,7.7,8.2,7.9,7.3,8.3,7.0,7.5,7.1,6.6,7.4], [1988,9.5,9.5,8.9,8.3,9.0,9.0,9.5,9.5,9.7,9.4,9.1,9.6,8.8,9.6,9.1,8.5,9.1], [1989,10.2,10.1,9.0,8.4,9.4,9.7,10.0,10.0,10.3,9.7,9.6,9.9,9.5,10.1,9.6,8.9,9.5], [1990,10.1,10.1,9.1,8.4,9.4,9.8,10.1,10.1,10.2,9.7,9.7,10.0,9.4,10.1,9.5,8.8,9.5], [1991,8.9,8.9,8.2,7.4,8.3,8.4,8.8,8.8,9.0,8.7,8.5,9.1,8.2,8.8,8.3,7.7,8.3], [1992,9.9,9.9,9.1,8.6,9.2,9.3,9.9,9.9,10.0,9.5,9.5,9.8,9.3,9.9,9.4,8.7,9.4], [1993,8.8,8.8,8.6,8.0,8.5,8.2,8.6,8.6,9.1,8.9,8.2,9.3,8.3,8.7,8.3,7.8,8.5], [1994,9.9,9.9,9.9,9.4,9.7,9.2,9.8,9.8,10.3,10.1,9.3,10.5,9.4,10.0,9.6,9.1,9.7], [1995,9.2,9.2,8.8,8.1,8.9,8.7,9.3,9.3,9.8,9.4,8.9,9.8,8.5,9.3,8.8,8.2,8.9], [1996,7.4,7.3,7.4,6.7,7.2,6.9,7.5,7.5,7.9,7.8,7.2,8.3,6.6,7.3,6.9,6.4,7.2], [1997,9.2,9.2,8.8,8.1,8.9,8.8,9.4,9.4,9.7,9.3,9.0,9.7,8.5,9.3,8.8,8.3,8.9], [1998,9.5,9.5,8.9,8.4,9.0,8.9,9.5,9.5,9.7,9.3,9.0,9.5,8.9,9.6,9.1,8.5,9.1], [1999,10.1,10.0,9.1,8.5,9.5,9.5,10.2,10.2,10.3,9.8,9.7,9.9,9.2,10.1,9.6,8.9,9.5], [2000,10.4,10.4,9.7,9.1,9.8,9.7,10.3,10.3,10.5,10.1,9.8,10.3,9.8,10.4,9.9,9.4,9.9], [2001,9.2,9.2,9.0,8.3,9.1,8.8,9.4,9.4,9.8,9.5,8.9,9.7,8.6,9.3,9.0,8.5,9.0], [2002,9.8,9.7,9.5,9.0,9.6,9.4,9.9,9.9,10.3,10.0,9.6,10.3,9.2,9.7,9.3,8.8,9.6], [2003,9.5,9.5,9.4,8.8,9.5,9.0,9.7,9.7,10.1,10.1,9.2,10.6,9.0,9.6,9.2,8.7,9.4], [2004,9.3,9.3,8.7,8.2,8.9,9.0,9.5,9.5,9.6,9.2,9.1,9.5,8.6,9.3,8.9,8.3,8.9], [2005,9.3,9.3,8.6,8.0,9.1,9.1,9.6,9.6,9.9,9.5,9.2,9.9,8.6,9.4,9.0,8.4,9.0], [2006,10.0,10.0,9.2,8.5,9.5,9.7,10.2,10.2,10.3,9.9,9.9,10.3,9.2,10.0,9.5,8.9,9.5], [2007,10.4,10.3,9.5,9.1,9.8,10.0,10.4,10.4,10.5,10.1,10.0,10.3,9.7,10.3,9.9,9.3,9.9], [2008,10.1,10.1,9.1,8.7,9.3,9.7,10.0,10.0,9.9,9.6,9.7,9.7,9.4,10.0,9.5,8.9,9.5], [2009,9.5,9.5,9.0,8.5,9.1,9.1,9.7,9.7,9.8,9.6,9.3,9.8,8.9,9.5,9.1,8.6,9.2], [2010,8.1,8.1,7.9,7.3,7.9,7.7,8.1,8.1,8.4,8.4,7.7,8.7,7.5,8.0,7.6,7.2,7.8], [2011,9.9,9.9,9.6,8.9,9.7,9.4,10.0,10.0,10.4,10.2,9.4,10.5,9.4,10.0,9.6,9.1,9.6], [2012,9.4,9.3,9.1,8.5,9.1,8.8,9.5,9.5,9.7,9.5,8.8,9.7,8.9,9.4,9.1,8.6,9.1], [2013,9.2,9.2,8.6,8.1,8.7,8.9,9.1,9.1,9.2,9.0,8.8,9.2,8.4,9.1,8.6,8.1,8.7], [2014,10.7,10.7,10.1,9.6,10.3,10.2,10.8,10.8,11.0,10.7,10.5,10.9,10.1,10.7,10.3,9.8,10.3], [2015,10.4,10.3,9.9,9.4,9.9,9.8,10.2,10.2,10.4,10.2,9.7,10.5,9.9,10.3,10.0,9.5,9.9], [2016,10.0,10.0,9.3,8.9,9.4,9.6,9.9,9.9,10.1,9.8,9.6,9.9,9.4,10.1,9.6,9.0,9.5] ]; function NearestCity(latitude, longitude) { var mindif = 99999; var closest; window.lat=latitude; window.long=longitude; for (index = 0; index < cities.length; ++index) { var dif = PythagorasEquirectangular(latitude, longitude, cities[index][2], cities[index][3]); if (dif < mindif) { closest = index; mindif = dif; } } city = cities[closest]; for (index = 0; index < weather.length; ++index) { if (weather[index][0] == city[0]) { tmp = weather[index][1]; jan = tmp[1]; feb = tmp[2]; mar = tmp[3]; apr = tmp[4]; mai = tmp[5]; jun = tmp[6]; jul = tmp[7]; aug = tmp[8]; sep = tmp[9]; okt = tmp[10]; nov = tmp[11]; dec = tmp[12]; } } // echo the nearest city frame = document.getElementById('frame'); number = 16; if (city[5] == "Berlin") {number=1;} if (city[5] == "Brandenburg") {number=2;} if (city[5] == "Baden-Württemberg") {number=3;} if (city[5] == "Hessen") {number=5;} if (city[5] == "Mecklenburg-Vorpommern") {number=6;} if (city[5] == "Niedersachsen") {number=7;} if (city[5] == "Hamburg") {number=8;} if (city[5] == "Nordrhein-Westfalen") {number=9;} if (city[5] == "Rheinland-Pfalz") {number=10;} if (city[5] == "Schleswig-Holstein") {number=11;} if (city[5] == "Saarland") {number=12;} if (city[5] == "Sachsen") {number=13;} if (city[5] == "Sachsen-Anhalt") {number=14;} if (city[5] == "Thüringen") {number=15;} if (city[5] == "Berlin") {number=1;} html = '<h3>Your next weather centre is '+city[1]+" in "+city[5]+". It's located "+city[4]+" meters above normal.</h3>"; html = html+'<img style="max-width:80%;height:auto;" src="https://maps.googleapis.com/maps/api/staticmap?size=500x400&markers=color:blue|'+city[2]+', '+city[3]+'|'+window.lat+', '+window.long+'"><h4>Statistics of frozen days per month. (Average of 1881 to 2010)</h4>'; html = html+'<table>'; html = html+'<tr><td style="font-weight:bold;width:30%;">jan</td><td><div style="width:'+jan*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+jan+'</span></td></tr>'; html = html+'<tr><td style="font-weight:bold;width:30%;">feb</td><td><div style="width:'+feb*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+feb+'</span></td></tr>'; html = html+'<tr><td style="font-weight:bold;width:30%;">mar</td><td><div style="width:'+mar*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+mar+'</span></td></tr>'; html = html+'<tr><td style="font-weight:bold;width:30%;">apr</td><td><div style="width:'+apr*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+apr+'</span></td></tr>'; html = html+'<tr><td style="font-weight:bold;width:30%;">mai</td><td><div style="width:'+mai*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+mai+'</span></td></tr>'; html = html+'<tr><td style="font-weight:bold;width:30%;">jun</td><td><div style="width:'+jun*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+jun+'</span></td></tr>'; html = html+'<tr><td style="font-weight:bold;width:30%;">jul</td><td><div style="width:'+jul*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+jul+'</span></td></tr>'; html = html+'<tr><td style="font-weight:bold;width:30%;">aug</td><td><div style="width:'+aug*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+aug+'</span></td></tr>'; html = html+'<tr><td style="font-weight:bold;width:30%;">sep</td><td><div style="width:'+sep*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+sep+'</span></td></tr>'; html = html+'<tr><td style="font-weight:bold;width:30%;">okt</td><td><div style="width:'+okt*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+okt+'</span></td></tr>'; html = html+'<tr><td style="font-weight:bold;width:30%;">nov</td><td><div style="width:'+nov*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+nov+'</span></td></tr>'; html = html+'<tr><td style="font-weight:bold;width:30%;">dec</td><td><div style="width:'+dec*10+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+dec+'</span></td></tr>'; html = html+'</table>'; html = html+'<h4>Average temperature in '+city[5]+' (1881 to 2016)</h4>'; html = html+'<table>'; html = html+'<h4>The current average temperature is '+Math.round((temperature[temperature.length-1][0]/temperature[0][0])*10000)/100+'% of '+temperature[0][0]+'.</h4>'; for (index = temperature.length-1; index >= 0; index=index-1) { html = html+'<tr><td style="font-weight:bold;width:30%;width:30%">'+temperature[index][0]+'</td><td><div style="width:'+temperature[index][number]*8+'%;height:30px;margin:-4px;margin-right:4px;margin-top:-2px;float:left;background:#FF3529;"></div><span>'+temperature[index][number]+'</span></td></tr>'; } html = html+'</table>'; frame.innerHTML = html; }<|fim▁end|>
["2319","Holzkirchen","47.882278","11.69615","685","Bayern"], ["2324","Holzminden-Silberborn","51.765853","9.544662","440","Niedersachsen"], ["2480","Kahl/Main","50.064313","8993","107","Bayern"],
<|file_name|>binary_frame.go<|end_file_name|><|fim▁begin|>// binary_frame.go /* Binary Frame Tool. Version: 0.1.1. Date of Creation: 2018-01-28. Author: McArcher. This is a simple Tool which draws a binary Frame around the Content. A Frame consists of logical Ones (1) and has a Spacer of Zeroes (0). So, --- XXX XXX --- Becomes something with a binary Frame with a Spacer: ------- 1111111 1000001 10XXX01 10XXX01 1000001 1111111 ------- This Technique is a great Thing to enclose Dimensions of the "X" into the File. It may be useful for Transmission of Signals in Space or other Places with a great Chances for Signal Corruption. Even when damaged, a Frame or Remains of a Frame may help a lot in the Process of Data Recovery. This Frame may be used as a Transport Package when there is no Package of the Transmission Protocol, and may be used in a universal Range. To comply with this-day Computers, the Algorithm makes Sizes compatible with 8-Bit Bytes, but it is also able to use any Size you want. */ //============================================================================== package main import ( "flag" "fmt" "io/ioutil" "log" "math" "os" "strconv" ) //============================================================================== type bit bool type row_of_bits []bit type field_of_bits []row_of_bits //============================================================================== const bits_in_byte = 8 const FILLED = true const EMPTY = false const ACTION_NONE = 0 const ACTION_ENCODE_F1 = 1 const ACTION_ENCODE_F2 = 2 const ACTION_DECODE_F1 = 3 const ACTION_DECODE_F2 = 4 const ERROR_1 = 1 //============================================================================== var cla_file_in *string var cla_file_out *string var cla_action_type *string var cla_x *string var cla_y *string var action_type uint8 var file_input_path string var file_output_path string var file_input_content []byte var file_output_content []byte var file_input_x uint64 var file_input_y uint64 var file_input_size uint64 // Number of Bytes. var input_field_size uint64 // Number of Bits. var output_field_size uint64 // Number of Bits. var file_input_cols uint64 var file_input_rows uint64 var file_output_cols uint64 var file_output_rows uint64 var field field_of_bits var field_framed field_of_bits //============================================================================== func main() { var err error var err_code uint8 var ok bool // Command Line Arguments. read_cla() // Read Input File. file_input_content, err = ioutil.ReadFile(file_input_path) check_error(err) file_input_size = uint64(len(file_input_content)) input_field_size = file_input_size * bits_in_byte // Check X & Y. if file_input_x <= 0 { fmt.Println("Bad X Size.") os.Exit(ERROR_1) } file_input_cols = file_input_x if (input_field_size % file_input_x) != 0 { fmt.Println("Bad X Size.") os.Exit(ERROR_1) } file_input_rows = input_field_size / file_input_x // Do Action. if action_type == ACTION_ENCODE_F1 { fmt.Println("Encoding (F1) \"" + file_input_path + "\"...") // // Output Size. file_output_cols = file_input_cols + 4 file_output_rows = file_input_rows + 4 output_field_size = file_output_cols * file_output_rows // Report. fmt.Println("Input Data (WxH):", file_input_cols, "x", file_input_rows, ".") /// fmt.Println("Output Data (WxH):", file_output_cols, "x", file_output_rows, ".") /// // Bytes -> Field. field, ok = bytes_to_field(input_field_size, file_input_cols, file_input_rows, file_input_content) check_ok(ok) // Field -> Frame. field_framed, err_code = pack_data_f1(input_field_size, file_input_cols, file_input_rows, field) check_err_code(err_code) // Frame -> Bytes. file_output_content, ok = field_to_bytes(output_field_size, file_output_cols, file_output_rows, field_framed) check_ok(ok) // Bytes -> File. fmt.Println("Writing \"" + file_output_path + "\"...") // err = ioutil.WriteFile(file_output_path, file_output_content, 0644) check_error(err) } if action_type == ACTION_ENCODE_F2 { fmt.Println("Encoding (F2) \"" + file_input_path + "\"...") // // Output Size. file_output_cols = file_input_cols + 8 file_output_rows = file_input_rows + 8 output_field_size = file_output_cols * file_output_rows // Report. fmt.Println("Input Data (WxH):", file_input_cols, "x", file_input_rows, ".") /// fmt.Println("Output Data (WxH):", file_output_cols, "x", file_output_rows, ".") /// // Bytes -> Field. field, ok = bytes_to_field(input_field_size, file_input_cols, file_input_rows, file_input_content) check_ok(ok) // Field -> Frame. field_framed, err_code = pack_data_f2(input_field_size, file_input_cols, file_input_rows, field) check_err_code(err_code) // Frame -> Bytes. file_output_content, ok = field_to_bytes(output_field_size, file_output_cols, file_output_rows, field_framed) check_ok(ok) // Bytes -> File. fmt.Println("Writing \"" + file_output_path + "\"...") // err = ioutil.WriteFile(file_output_path, file_output_content, 0644) check_error(err) } if action_type == ACTION_DECODE_F1 { fmt.Println("Decoding (F1) \"" + file_input_path + "\"...") // // Output Size. file_output_cols = file_input_cols - 4 file_output_rows = file_input_rows - 4 output_field_size = file_output_cols * file_output_rows // Report. fmt.Println("Input Data (WxH):", file_input_cols, "x", file_input_rows, ".") /// fmt.Println("Output Data (WxH):", file_output_cols, "x", file_output_rows, ".") /// // Bytes -> Frame. field_framed, ok = bytes_to_field(input_field_size, file_input_cols, file_input_rows, file_input_content) check_ok(ok) // Frame -> Field. field, ok = get_data_f1(input_field_size, file_input_cols, file_input_rows, field_framed) check_ok(ok) // Field -> Bytes. file_output_content, ok = field_to_bytes(output_field_size, file_output_cols, file_output_rows, field) check_ok(ok) // Bytes -> File. fmt.Println("Writing \"" + file_output_path + "\"...") // err = ioutil.WriteFile(file_output_path, file_output_content, 0644) check_error(err) } if action_type == ACTION_DECODE_F2 { fmt.Println("Decoding (F2) \"" + file_input_path + "\"...") // // Output Size. file_output_cols = file_input_cols - 8 file_output_rows = file_input_rows - 8 output_field_size = file_output_cols * file_output_rows // Report. fmt.Println("Input Data (WxH):", file_input_cols, "x", file_input_rows, ".") /// fmt.Println("Output Data (WxH):", file_output_cols, "x", file_output_rows, ".") /// // Bytes -> Frame. field_framed, ok = bytes_to_field(input_field_size, file_input_cols, file_input_rows, file_input_content) check_ok(ok) // Frame -> Field. field, ok = get_data_f2(input_field_size, file_input_cols, file_input_rows, field_framed) check_ok(ok) // Field -> Bytes. file_output_content, ok = field_to_bytes(output_field_size, file_output_cols, file_output_rows, field) check_ok(ok) // Bytes -> File. fmt.Println("Writing \"" + file_output_path + "\"...") // err = ioutil.WriteFile(file_output_path, file_output_content, 0644) check_error(err) } if action_type == ACTION_NONE { fmt.Println("Idle...") // } } //============================================================================== // Packs useful Data into Message and surrounds it with a Frame I. func pack_data_f1( data_bits_count uint64, data_columns_count uint64, data_rows_count uint64, data field_of_bits) (field_of_bits, uint8) { const DS = 4 const DO = DS / 2 const data_columns_count_limit = math.MaxUint64 - DS const data_rows_count_limit = math.MaxUint64 - DS const ERROR_ALL_CLEAR = 0 // No Error. const ERROR_BAD_SIZE = 1 // (Colums * Rows) ≠ (Bit Count). const ERROR_COLUMNS_ERROR = 2 // Too many Columns in Data. const ERROR_ROWS_ERROR = 3 // Too many Rows in Data. var data_bits_count_required uint64 var result field_of_bits // Cursors in Result. var i uint64 // Current Row #. var i_max uint64 // var i_min uint64 // var j uint64 // Current Column #. var j_max uint64 // var j_min uint64 // // Cursors in Data. var y uint64 // Current Row #. var x uint64 // Current Column #. var result_columns_count uint64 var result_rows_count uint64 var data_first_column_index uint64 var data_first_row_index uint64 //var data_last_column_index uint64 //var data_last_row_index uint64 var result_first_column_index uint64 var result_first_row_index uint64 var result_last_column_index uint64 var result_last_row_index uint64 // Check Input Data. data_bits_count_required = data_columns_count * data_rows_count if data_bits_count != data_bits_count_required { return nil, ERROR_BAD_SIZE } if data_columns_count > data_columns_count_limit { return nil, ERROR_COLUMNS_ERROR } if data_rows_count > data_rows_count_limit { return nil, ERROR_ROWS_ERROR } // Indices & Sizes. result_columns_count = data_columns_count + DS result_rows_count = data_rows_count + DS data_first_column_index = 0 data_first_row_index = 0 //data_last_column_index = data_columns_count - 1 //data_last_row_index = data_rows_count - 1 result_first_column_index = 0 result_first_row_index = 0 result_last_column_index = result_columns_count - 1 result_last_row_index = result_rows_count - 1 // Create an empty Field. result = make(field_of_bits, result_rows_count) for i = result_first_row_index; i <= result_last_row_index; i++ { result[i] = make(row_of_bits, result_columns_count) for j = result_first_column_index; j <= result_last_column_index; j++ { result[i][j] = EMPTY } } // Draw the Frame I. for j = result_first_column_index; j <= result_last_column_index; j++ { result[result_first_row_index][j] = FILLED result[result_last_row_index][j] = FILLED } for i = result_first_row_index; i <= result_last_row_index; i++ { result[i][result_first_column_index] = FILLED result[i][result_last_column_index] = FILLED } // Draw Frame's Spacer. i_min = result_first_row_index + 1 i_max = result_last_row_index - 1 j_min = result_first_column_index + 1 j_max = result_last_column_index - 1 for j = j_min; j <= j_max; j++ { result[i_min][j] = EMPTY result[i_max][j] = EMPTY } for i = i_min; i <= i_max; i++ { result[i][j_min] = EMPTY result[i][j_max] = EMPTY } // Draw Data. i_min = result_first_row_index + DO i_max = result_last_row_index - DO j_min = result_first_column_index + DO j_max = result_last_column_index - DO y = data_first_row_index for i = i_min; i <= i_max; i++ { x = data_first_column_index for j = j_min; j <= j_max; j++ { result[i][j] = data[y][x] x++ } y++ } return result, ERROR_ALL_CLEAR } //============================================================================== // Packs useful Data into Message and surrounds it with a Frame II. func pack_data_f2( data_bits_count uint64, data_columns_count uint64, data_rows_count uint64, data field_of_bits) (field_of_bits, uint8) { const DS = 8 const DO = DS / 2 const data_columns_count_limit = math.MaxUint64 - DS const data_rows_count_limit = math.MaxUint64 - DS const ERROR_ALL_CLEAR = 0 // No Error. const ERROR_BAD_SIZE = 1 // (Colums * Rows) ≠ (Bit Count). const ERROR_COLUMNS_ERROR = 2 // Too many Columns in Data. const ERROR_ROWS_ERROR = 3 // Too many Rows in Data. var data_bits_count_required uint64 var result field_of_bits // Cursors in Result. var i uint64 // Current Row #. var i_max uint64 // var i_min uint64 // var j uint64 // Current Column #. var j_max uint64 // var j_min uint64 // // Cursors in Data. var y uint64 // Current Row #. var x uint64 // Current Column #. var result_columns_count uint64 var result_rows_count uint64 var data_first_column_index uint64 var data_first_row_index uint64 //var data_last_column_index uint64 //var data_last_row_index uint64 var result_first_column_index uint64 var result_first_row_index uint64 var result_last_column_index uint64 var result_last_row_index uint64 // Check Input Data. data_bits_count_required = data_columns_count * data_rows_count if data_bits_count != data_bits_count_required { return nil, ERROR_BAD_SIZE } if data_columns_count > data_columns_count_limit { return nil, ERROR_COLUMNS_ERROR } if data_rows_count > data_rows_count_limit { return nil, ERROR_ROWS_ERROR } // Indices & Sizes. result_columns_count = data_columns_count + DS result_rows_count = data_rows_count + DS data_first_column_index = 0 data_first_row_index = 0 //data_last_column_index = data_columns_count - 1 //data_last_row_index = data_rows_count - 1 result_first_column_index = 0 result_first_row_index = 0 result_last_column_index = result_columns_count - 1 result_last_row_index = result_rows_count - 1 // Create an empty Field. result = make(field_of_bits, result_rows_count) for i = result_first_row_index; i <= result_last_row_index; i++ { result[i] = make(row_of_bits, result_columns_count) for j = result_first_column_index; j <= result_last_column_index; j++ { result[i][j] = EMPTY } } // Draw the Frame I. for j = result_first_column_index; j <= result_last_column_index; j++ { result[result_first_row_index][j] = FILLED result[result_last_row_index][j] = FILLED } for i = result_first_row_index; i <= result_last_row_index; i++ { result[i][result_first_column_index] = FILLED result[i][result_last_column_index] = FILLED } // Draw Frame's Spacer. i_min = result_first_row_index + 1 i_max = result_last_row_index - 1 j_min = result_first_column_index + 1 j_max = result_last_column_index - 1 for j = j_min; j <= j_max; j++ { result[i_min][j] = EMPTY result[i_max][j] = EMPTY } for i = i_min; i <= i_max; i++ { result[i][j_min] = EMPTY result[i][j_max] = EMPTY } // Draw the Frame II. i_min = result_first_row_index + 2 i_max = result_last_row_index - 2 j_min = result_first_column_index + 2 j_max = result_last_column_index - 2 for j = j_min; j <= j_max; j++ { result[i_min][j] = FILLED result[i_max][j] = FILLED } for i = i_min; i <= i_max; i++ { result[i][j_min] = FILLED result[i][j_max] = FILLED } // Draw Frame's Spacer. i_min = result_first_row_index + 3 i_max = result_last_row_index - 3 j_min = result_first_column_index + 3 j_max = result_last_column_index - 3 for j = j_min; j <= j_max; j++ { result[i_min][j] = EMPTY result[i_max][j] = EMPTY } for i = i_min; i <= i_max; i++ { result[i][j_min] = EMPTY result[i][j_max] = EMPTY } // Draw Data. i_min = result_first_row_index + DO i_max = result_last_row_index - DO j_min = result_first_column_index + DO j_max = result_last_column_index - DO y = data_first_row_index for i = i_min; i <= i_max; i++ { x = data_first_column_index for j = j_min; j <= j_max; j++ { result[i][j] = data[y][x] x++ } y++ } return result, ERROR_ALL_CLEAR } //============================================================================== // Checks Integrity of a Frame I of the Message. func check_frame_f1( message_bits_count uint64, message_columns_count uint64, message_rows_count uint64, message field_of_bits) bool { const message_columns_count_limit = math.MaxUint64 const message_rows_count_limit = math.MaxUint64 const message_rows_count_min = 4 + 1 // Rows in empty Message. const message_columns_count_min = 4 + 1 // Columns in empty Message. const ERROR_ALL_CLEAR = true // No Error. const ERROR = false var data_bits_count_required uint64 // Cursors in Message. var i uint64 // Current Row #. var i_max uint64 // var i_min uint64 // var j uint64 // Current Column #. var j_max uint64 // var j_min uint64 // // Check Input Data. data_bits_count_required = message_columns_count * message_rows_count if message_bits_count != data_bits_count_required { return ERROR } if message_columns_count > message_columns_count_limit { return ERROR } if message_rows_count > message_rows_count_limit { return ERROR } // Check Minimum Sizes. if message_rows_count < message_rows_count_min { return ERROR } if message_columns_count < message_columns_count_min { return ERROR } // Check Dimensions of Array. if uint64(len(message)) != message_rows_count { return ERROR } i_min = 0 i_max = message_rows_count - 1 for i = i_min; i <= i_max; i++ { if uint64(len(message[i])) != message_columns_count { return ERROR } } // Check Frame I. j_min = 0 j_max = message_columns_count - 1 for j = j_min; j <= j_max; j++ { if message[i_min][j] != FILLED { return ERROR } if message[i_max][j] != FILLED { return ERROR } } for i = i_min; i <= i_max; i++ { if message[i][j_min] != FILLED { return ERROR } if message[i][j_max] != FILLED { return ERROR } } // Check Frame's Spacer. i_min = 1 i_max = message_rows_count - 2 j_min = 1 j_max = message_columns_count - 2 for j = j_min; j <= j_max; j++ { if message[i_min][j] != EMPTY { return ERROR } if message[i_max][j] != EMPTY { return ERROR } } for i = i_min; i <= i_max; i++ { if message[i][j_min] != EMPTY { return ERROR } if message[i][j_max] != EMPTY { return ERROR } } return ERROR_ALL_CLEAR } //============================================================================== // Checks Integrity of a Frame II of the Message. func check_frame_f2( message_bits_count uint64, message_columns_count uint64, message_rows_count uint64, message field_of_bits) bool { const message_columns_count_limit = math.MaxUint64 const message_rows_count_limit = math.MaxUint64 const message_rows_count_min = 8 + 1 // Rows in empty Message. const message_columns_count_min = 8 + 1 // Columns in empty Message. const ERROR_ALL_CLEAR = true // No Error. const ERROR = false var data_bits_count_required uint64 // Cursors in Message. var i uint64 // Current Row #. var i_max uint64 // var i_min uint64 // var j uint64 // Current Column #. var j_max uint64 // var j_min uint64 // // Check Input Data. data_bits_count_required = message_columns_count * message_rows_count if message_bits_count != data_bits_count_required { return ERROR } if message_columns_count > message_columns_count_limit { return ERROR } if message_rows_count > message_rows_count_limit { return ERROR } // Check Minimum Sizes. if message_rows_count < message_rows_count_min { return ERROR } if message_columns_count < message_columns_count_min { return ERROR } // Check Dimensions of Array. if uint64(len(message)) != message_rows_count { return ERROR } i_min = 0 i_max = message_rows_count - 1 for i = i_min; i <= i_max; i++ { if uint64(len(message[i])) != message_columns_count { return ERROR } } // Check Frame I. j_min = 0 j_max = message_columns_count - 1 for j = j_min; j <= j_max; j++ { if message[i_min][j] != FILLED { return ERROR } if message[i_max][j] != FILLED { return ERROR } } for i = i_min; i <= i_max; i++ { if message[i][j_min] != FILLED { return ERROR } if message[i][j_max] != FILLED { return ERROR } } // Check Frame's Spacer. i_min = 1 i_max = message_rows_count - 2 j_min = 1 j_max = message_columns_count - 2 for j = j_min; j <= j_max; j++ { if message[i_min][j] != EMPTY { return ERROR } if message[i_max][j] != EMPTY { return ERROR } } for i = i_min; i <= i_max; i++ { if message[i][j_min] != EMPTY { return ERROR } if message[i][j_max] != EMPTY { return ERROR } } // Check Frame II. i_min = 2 i_max = message_rows_count - 3 j_min = 2 j_max = message_columns_count - 3 for j = j_min; j <= j_max; j++ { if message[i_min][j] != FILLED { return ERROR } if message[i_max][j] != FILLED { return ERROR } } for i = i_min; i <= i_max; i++ { if message[i][j_min] != FILLED { return ERROR } if message[i][j_max] != FILLED { return ERROR } } // Check Frame's Spacer. i_min = 3 i_max = message_rows_count - 4 j_min = 3 j_max = message_columns_count - 4 for j = j_min; j <= j_max; j++ { if message[i_min][j] != EMPTY { return ERROR } if message[i_max][j] != EMPTY { return ERROR } } for i = i_min; i <= i_max; i++ { if message[i][j_min] != EMPTY { return ERROR } if message[i][j_max] != EMPTY { return ERROR } } return ERROR_ALL_CLEAR } //============================================================================== // Gets Data from Message with Frame I. func get_data_f1( message_bits_count uint64, message_columns_count uint64, message_rows_count uint64, message field_of_bits) (field_of_bits, bool) { const DS = 4 const DO = DS / 2 const ERROR_ALL_CLEAR = true // No Error. const ERROR = false var data field_of_bits var data_rows_count uint64 var data_columns_count uint64 var cf bool // Result of Frame Check. // Cursors in Message. var i uint64 // Current Row #. var i_min uint64 // var j uint64 // Current Column #. var j_min uint64 // // Cursors in Data. var y uint64 // Current Row #. var x uint64 // Current Column #. // Check Frame. cf = check_frame_f1(message_bits_count, message_columns_count, message_rows_count, message) if cf == ERROR { return nil, ERROR } // Prepare Data. data_rows_count = message_rows_count - DS data_columns_count = message_columns_count - DS // data = make(field_of_bits, data_rows_count) for y = 0; y < data_rows_count; y++ { data[y] = make(row_of_bits, data_columns_count) for x = 0; x < data_columns_count; x++ { data[y][x] = EMPTY } } // Get Data. i_min = DO j_min = DO i = i_min for y = 0; y < data_rows_count; y++ { j = j_min for x = 0; x < data_columns_count; x++ { data[y][x] = message[i][j] j++ } i++ } return data, ERROR_ALL_CLEAR } //============================================================================== // Gets Data from Message with Frame II. func get_data_f2( message_bits_count uint64, message_columns_count uint64, message_rows_count uint64, message field_of_bits) (field_of_bits, bool) { const DS = 8 const DO = DS / 2 const ERROR_ALL_CLEAR = true // No Error. const ERROR = false var data field_of_bits var data_rows_count uint64 var data_columns_count uint64 var cf bool // Result of Frame Check. // Cursors in Message. var i uint64 // Current Row #. var i_min uint64 // var j uint64 // Current Column #. var j_min uint64 // // Cursors in Data. var y uint64 // Current Row #. var x uint64 // Current Column #. // Check Frame. cf = check_frame_f2(message_bits_count, message_columns_count, message_rows_count, message) if cf == ERROR { return nil, ERROR } // Prepare Data. data_rows_count = message_rows_count - DS data_columns_count = message_columns_count - DS // data = make(field_of_bits, data_rows_count) for y = 0; y < data_rows_count; y++ { data[y] = make(row_of_bits, data_columns_count) for x = 0; x < data_columns_count; x++ { data[y][x] = EMPTY } } // Get Data. i_min = DO j_min = DO i = i_min for y = 0; y < data_rows_count; y++ { j = j_min for x = 0; x < data_columns_count; x++ { data[y][x] = message[i][j] j++<|fim▁hole|> } return data, ERROR_ALL_CLEAR } //============================================================================== // Converts Field into Array of Bytes. func field_to_bytes( field_bits_count uint64, field_columns_count uint64, field_rows_count uint64, field field_of_bits) ([]byte, bool) { const field_columns_count_limit = math.MaxUint64 const field_rows_count_limit = math.MaxUint64 const ERROR_ALL_CLEAR = true // No Error. const ERROR = false const MSG_1 = "Warning ! The Size of the Output Data can not be stored " + "using 8-Bit Bytes ! The Size is not a Multiple of 8 !" var i uint64 var j uint64 // Cursors in Field. var y uint64 var x uint64 var array []byte var current_bit bit var current_byte byte var bytes_count uint64 var field_bits_count_required uint64 var field_column_first uint64 var field_column_last uint64 field_column_first = 0 field_column_last = field_columns_count - 1 // Check Input Data. field_bits_count_required = field_columns_count * field_rows_count if field_bits_count != field_bits_count_required { log.Println("1") return nil, ERROR } if field_columns_count > field_columns_count_limit { log.Println("2") return nil, ERROR } if field_rows_count > field_rows_count_limit { log.Println("3") return nil, ERROR } // Can be converted to Bytes ? if (field_bits_count % bits_in_byte) != 0 { fmt.Println(MSG_1) return nil, ERROR } bytes_count = field_bits_count / bits_in_byte array = make([]byte, bytes_count) x = 0 y = 0 for i = 0; i < bytes_count; i++ { current_byte = 0 // Read 8 Bits. for j = 0; j < bits_in_byte; j++ { current_bit = field[y][x] // Save Bit in Byte. if current_bit == FILLED { current_byte = current_byte | (128 >> j) } // Next Element in Field. if x == field_column_last { y++ x = field_column_first } else { x++ } } // Save to Array. array[i] = current_byte } return array, ERROR_ALL_CLEAR } //============================================================================== // Converts Array of Bytes into Field. func bytes_to_field( field_bits_count uint64, field_columns_count uint64, field_rows_count uint64, array []byte) (field_of_bits, bool) { const field_columns_count_limit = math.MaxUint64 const field_rows_count_limit = math.MaxUint64 const ERROR_ALL_CLEAR = true // No Error. const ERROR = false var i uint64 var j uint64 // Cursors in Field. var y uint64 var x uint64 var field field_of_bits var current_bit bit var current_byte byte var current_byte_tmp byte var bytes_count uint64 var field_bits_count_required uint64 var field_column_first uint64 var field_column_last uint64 field_column_first = 0 field_column_last = field_columns_count - 1 // Check Input Data. field_bits_count_required = field_columns_count * field_rows_count if field_bits_count != field_bits_count_required { return nil, ERROR } if field_columns_count > field_columns_count_limit { return nil, ERROR } if field_rows_count > field_rows_count_limit { return nil, ERROR } // Can be converted to Bytes ? if (field_bits_count % bits_in_byte) != 0 { return nil, ERROR } bytes_count = uint64(len(array)) if bytes_count*bits_in_byte != field_bits_count { return nil, ERROR } // Create an empty Field. field = make(field_of_bits, field_rows_count) for y = 0; y < field_rows_count; y++ { field[y] = make(row_of_bits, field_columns_count) for x = 0; x < field_columns_count; x++ { field[y][x] = EMPTY } } x = 0 y = 0 for i = 0; i < bytes_count; i++ { current_byte = array[i] // Read 8 Bits. for j = 0; j < bits_in_byte; j++ { current_byte_tmp = (current_byte >> (7 - j)) & 1 if current_byte_tmp == 1 { current_bit = FILLED } else { current_bit = EMPTY } // Save Bit in Field. field[y][x] = current_bit // Next Element in Field. if x == field_column_last { y++ x = field_column_first } else { x++ } } } return field, ERROR_ALL_CLEAR } //============================================================================== // Read Command Line Arguments (Keys, Flags, Switches). func read_cla() { var err error // Set Rules. cla_file_in = flag.String("fi", "input", "File Input.") cla_file_out = flag.String("fo", "output", "File Output.") cla_action_type = flag.String("a", "", "Action Type.") cla_x = flag.String("x", "0", "Columns.") cla_y = flag.String("y", "0", "Rows.") // Read C.L.A. flag.Parse() // Files. file_input_path = *cla_file_in file_output_path = *cla_file_out // Action Type. if *cla_action_type == "e1" { action_type = ACTION_ENCODE_F1 } else if *cla_action_type == "e2" { action_type = ACTION_ENCODE_F2 } else if *cla_action_type == "d1" { action_type = ACTION_DECODE_F1 } else if *cla_action_type == "d2" { action_type = ACTION_DECODE_F2 } else { action_type = ACTION_NONE } // X, Y. file_input_x, err = strconv.ParseUint(*cla_x, 10, 64) check_error(err) file_input_y, err = strconv.ParseUint(*cla_y, 10, 64) check_error(err) } //============================================================================== func check_error(err error) { if err != nil { log.Println(err) os.Exit(ERROR_1) } } //============================================================================== func check_ok(ok bool) { if !ok { log.Println("Error.") os.Exit(ERROR_1) } else { //fmt.Println("OK.") } } //============================================================================== func check_err_code(err_code uint8) { if err_code == 0 { //fmt.Println("OK.") } else { log.Println("Error.") os.Exit(ERROR_1) } } //==============================================================================<|fim▁end|>
} i++
<|file_name|>take.ts<|end_file_name|><|fim▁begin|>import { Operator } from '../Operator'; import { Subscriber } from '../Subscriber'; import { ArgumentOutOfRangeError } from '../util/ArgumentOutOfRangeError'; import { empty } from '../observable/empty'; import { Observable } from '../Observable'; import { MonoTypeOperatorFunction, TeardownLogic } from '../types'; /** * Emits only the first `count` values emitted by the source Observable. * * <span class="informal">Takes the first `count` values from the source, then * completes.</span> * * ![](take.png) * * `take` returns an Observable that emits only the first `count` values emitted * by the source Observable. If the source emits fewer than `count` values then * all of its values are emitted. After that, it completes, regardless if the * source completes. * * ## Example * Take the first 5 seconds of an infinite 1-second interval Observable * ```ts * import { interval } from 'rxjs'; * import { take } from 'rxjs/operators'; * * const intervalCount = interval(1000); * const takeFive = intervalCount.pipe(take(5)); * takeFive.subscribe(x => console.log(x)); * * // Logs: * // 0 * // 1 * // 2 * // 3 * // 4 * ``` * * @see {@link takeLast} * @see {@link takeUntil} * @see {@link takeWhile} * @see {@link skip} * * @throws {ArgumentOutOfRangeError} When using `take(i)`, it delivers an * ArgumentOutOrRangeError to the Observer's `error` callback if `i < 0`. * * @param {number} count The maximum number of `next` values to emit. * @return {Observable<T>} An Observable that emits only the first `count` * values emitted by the source Observable, or all of the values from the source * if the source emits fewer than `count` values. * @method take * @owner Observable */ export function take<T>(count: number): MonoTypeOperatorFunction<T> { return (source: Observable<T>) => { if (count === 0) { return empty(); } else { return source.lift(new TakeOperator(count)); } }; } class TakeOperator<T> implements Operator<T, T> { constructor(private total: number) { if (this.total < 0) { throw new ArgumentOutOfRangeError; } }<|fim▁hole|> call(subscriber: Subscriber<T>, source: any): TeardownLogic { return source.subscribe(new TakeSubscriber(subscriber, this.total)); } } /** * We need this JSDoc comment for affecting ESDoc. * @ignore * @extends {Ignored} */ class TakeSubscriber<T> extends Subscriber<T> { private count: number = 0; constructor(destination: Subscriber<T>, private total: number) { super(destination); } protected _next(value: T): void { const total = this.total; const count = ++this.count; if (count <= total) { this.destination.next(value); if (count === total) { this.destination.complete(); this.unsubscribe(); } } } }<|fim▁end|>
<|file_name|>display-main-image.pipe.ts<|end_file_name|><|fim▁begin|>import {Pipe, PipeTransform} from '@angular/core';<|fim▁hole|>import { Images } from '../../../../both/collections/images.collection'; import { Track } from '../../../../both/models/track.model'; @Pipe({ name: 'displayMainImage' }) export class DisplayMainImagePipe implements PipeTransform { transform(track: Track) { if (!track) { return; } let imageUrl: string; let imageId: string = (track.images || [])[0]; const found = Images.findOne(imageId); if (found) { imageUrl = found.url; } return imageUrl; } }<|fim▁end|>
<|file_name|>variant.rs<|end_file_name|><|fim▁begin|>use vtable::VTable; use variant_ref::VariantRef; use variant_ref_mut::VariantRefMut; use std::any::{Any, TypeId}; use std::fmt::{Debug, Display, Error as FmtError, Formatter}; use std::ops::Deref; pub struct Variant<'a> { pub data: *mut (), pub vtable: &'a VTable, } impl<'a> Variant<'a> { pub fn new<T: Any>(value: T, vtable: &'a VTable) -> Self { Variant { data: Box::into_raw(Box::new(value)) as *mut (), vtable: vtable, } } #[inline] pub fn is<T: Any>(&self) -> bool { self.vtable.id == TypeId::of::<T>() } #[inline] pub fn downcast_ref<T: Any>(&self) -> Option<&T> { if self.is::<T>() { unsafe { Some(&*(self.data as *const T)) } } else { None } } #[inline] pub fn downcast_mut<T: Any>(&mut self) -> Option<&mut T> { if self.is::<T>() { unsafe { Some(&mut *(self.data as *mut T)) } } else { None } } #[inline] pub unsafe fn downcast_ref_unchecked<T: Any>(&self) -> &T { debug_assert!(self.is::<T>()); &*(self.data as *const T) } #[inline] pub unsafe fn downcast_mut_unchecked<T: Any>(&mut self) -> &mut T { debug_assert!(self.is::<T>()); &mut *(self.data as *mut T) } } impl<'a> Deref for Variant<'a> { type Target = VariantRef<'a>; fn deref(&self) -> &VariantRef<'a> { self.as_ref() } } impl<'a> AsRef<VariantRef<'a>> for Variant<'a> { fn as_ref(&self) -> &VariantRef<'a> { unsafe { &*(self as *const _ as *const VariantRef<'a>) } } } <|fim▁hole|>impl<'a> AsMut<VariantRefMut<'a>> for Variant<'a> { fn as_mut(&mut self) -> &mut VariantRefMut<'a> { unsafe { &mut *(self as *mut _ as *mut VariantRefMut<'a>) } } } impl<'a> Clone for Variant<'a> { fn clone(&self) -> Self { (self.vtable.clone)(self.as_ref()) } } impl<'a> Drop for Variant<'a> { fn drop(&mut self) { (self.vtable.drop)(self) } } impl<'a> Display for Variant<'a> { fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { (self.vtable.display)(self.as_ref(), f) } } impl<'a> Debug for Variant<'a> { fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { (self.vtable.debug)(self.as_ref(), f) } }<|fim▁end|>
<|file_name|>sqlite_filesystem.cpp<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2014-present, The osquery authors * * This source code is licensed as defined by the LICENSE file found in the * root directory of this source tree. * * SPDX-License-Identifier: (Apache-2.0 OR GPL-2.0-only) */ #include <string> #include <boost/filesystem.hpp> #include <boost/optional.hpp> #include <sqlite3.h> namespace osquery { static boost::optional<std::string> findExistingProgramPathFromCommand( const char* path, char escape_symbol, bool allow_quoting, bool shortest) { size_t length = strlen(path); std::string result; size_t pos = 0; // Skip spaces for (; pos < length; ++pos) { if (!isspace(path[pos])) { break; } } std::string temp_string; bool is_quoted = false; bool is_escaped = false; for (; pos < length; ++pos) { if (is_escaped) { temp_string += path[pos]; is_escaped = false; continue; } if (allow_quoting && path[pos] == '"') { is_quoted = !is_quoted; continue; } if (path[pos] == escape_symbol) { is_escaped = true; continue; } if (!is_quoted && isspace(path[pos])) { // validate temp string boost::filesystem::path test_path = temp_string; auto status = boost::filesystem::status(test_path); if (boost::filesystem::exists(status) && !boost::filesystem::is_directory(status)) { result = temp_string; if (shortest) { break; } } } temp_string += path[pos]; } if (result.length() == 0 || !shortest) { boost::filesystem::path test_path = temp_string; auto status = boost::filesystem::status(test_path); if (boost::filesystem::exists(status) && !boost::filesystem::is_directory(status)) { result = temp_string; } } return result; } static boost::optional<std::string> findExistingProgramPathFromCommandSqlArgs( int argc, sqlite3_value** argv, bool shortest) { if (argc == 0) { return boost::none; } // NULLs are not allowed for (int i = 0; i < argc; i++) { if (SQLITE_NULL == sqlite3_value_type(argv[i])) { return boost::none; } } const char* path = reinterpret_cast<const char*>(sqlite3_value_text(argv[0])); bool allow_quoting = false; if (argc > 1) { allow_quoting = sqlite3_value_int(argv[1]) != 0 ? true : false; } #ifdef WIN32 char escape_symbol = '^'; #else char escape_symbol = '\\'; #endif<|fim▁hole|> std::strlen(escape_symbol_string) != 1) { return boost::none; } escape_symbol = escape_symbol_string[0]; } return findExistingProgramPathFromCommand( path, escape_symbol, allow_quoting, shortest); } static void findFilePathInLaunchCommand(sqlite3_context* context, int argc, sqlite3_value** argv) { auto result = findExistingProgramPathFromCommandSqlArgs(argc, argv, true); if (result) { sqlite3_result_text(context, result->c_str(), static_cast<int>(result->size()), SQLITE_TRANSIENT); } else { sqlite3_result_error( context, "Invalid inputs to find_binary_path_from_cmd", -1); } } static void isPathDeterministic(sqlite3_context* context, int argc, sqlite3_value** argv) { auto shortest = findExistingProgramPathFromCommandSqlArgs(argc, argv, true); if (shortest) { const char* path = (const char*)sqlite3_value_text(argv[0]); if (shortest->length() == 0 || shortest->length() == strlen(path)) { // There are 2 cases: // 1 - empty string, all parts of path are invalid, // so path is deterministic // 2 - short == full, then there is only 1 valid path sqlite3_result_int(context, 1); return; } else { auto longest = findExistingProgramPathFromCommandSqlArgs(argc, argv, false); if (longest) { sqlite3_result_int(context, shortest->length() == longest->length() ? 1 : 0); return; } } } sqlite3_result_error(context, "Invalid inputs to is_path_deterministic", -1); } static void getParentDirectory(sqlite3_context* context, int argc, sqlite3_value** argv) { if (sqlite3_value_type(argv[0]) != SQLITE_TEXT) { sqlite3_result_error( context, "Invalid inputs to parent_directory, TEXT was expected", -1); return; } const char* path = reinterpret_cast<const char*>(sqlite3_value_text(argv[0])); if (path == nullptr) { sqlite3_result_null(context); return; } int pos = 0; int last_slash_pos = -1; #if defined(OSQUERY_WINDOWS) char directory_symbol = '\\'; #elif defined(OSQUERY_POSIX) char directory_symbol = '/'; #else #error Unsupported platform #endif while (path[pos] != '\0') { if (path[pos] == directory_symbol) { last_slash_pos = pos; } pos++; } if (last_slash_pos < 0) { // No parent directory sqlite3_result_null(context); return; } char* result = reinterpret_cast<char*>(malloc(last_slash_pos)); memcpy(result, path, last_slash_pos); sqlite3_result_text(context, result, last_slash_pos, free); } void registerFilesystemExtensions(sqlite3* db) { sqlite3_create_function(db, "is_path_deterministic", -1, SQLITE_UTF8 | SQLITE_DETERMINISTIC, nullptr, isPathDeterministic, nullptr, nullptr); sqlite3_create_function(db, "find_file_path_in_cmd", -1, SQLITE_UTF8 | SQLITE_DETERMINISTIC, nullptr, findFilePathInLaunchCommand, nullptr, nullptr); sqlite3_create_function(db, "parent_directory", 1, SQLITE_UTF8 | SQLITE_DETERMINISTIC, nullptr, getParentDirectory, nullptr, nullptr); } } // namespace osquery<|fim▁end|>
if (argc > 2) { const char* escape_symbol_string = reinterpret_cast<const char*>(sqlite3_value_text(argv[2])); if (escape_symbol_string == NULL ||
<|file_name|>models.py<|end_file_name|><|fim▁begin|># quick_info/models.py # Brought to you by We Vote. Be good. # -*- coding: UTF-8 -*- # Diagrams here: https://docs.google.com/drawings/d/1fEs_f2-4Du9knJ8FXn6PQ2BcmXL4zSkMYh-cp75EeLE/edit from ballot.models import OFFICE, CANDIDATE, POLITICIAN, MEASURE, KIND_OF_BALLOT_ITEM_CHOICES from django.db import models from exception.models import handle_exception, handle_record_found_more_than_one_exception,\ handle_record_not_saved_exception import wevote_functions.admin from wevote_functions.functions import convert_to_int, positive_value_exists from wevote_settings.models import fetch_next_we_vote_id_quick_info_integer, \ fetch_next_we_vote_id_quick_info_master_integer, fetch_site_unique_id_prefix # Language Codes: http://www.mcanerin.com/en/articles/meta-language.asp # Country Codes: http://www.mcanerin.com/en/articles/ccTLD.asp SPANISH = 'es' ENGLISH = 'en' TAGALOG = 'tl' VIETNAMESE = 'vi' CHINESE = 'zh' LANGUAGE_CHOICES = ( (ENGLISH, 'English'), (SPANISH, 'Spanish'), (TAGALOG, 'Tagalog'), (VIETNAMESE, 'Vietnamese'), (CHINESE, 'Chinese'), ) NOT_SPECIFIED = 'not_specified' BALLOTPEDIA = 'ballotpedia' DIRECT_ENTRY = 'direct' WIKIPEDIA = 'wikipedia' SOURCE_SITE_CHOICES = ( (NOT_SPECIFIED, 'Not Specified'), (BALLOTPEDIA, 'Ballotpedia'), (DIRECT_ENTRY, 'Direct Entry'), (WIKIPEDIA, 'Wikipedia'), ) logger = wevote_functions.admin.get_logger(__name__) class QuickInfo(models.Model): """ The information that shows when you click an info icon next to a ballot item """ # We are relying on built-in Python id field # The we_vote_id identifier is unique across all We Vote sites, and allows us to share our org info with other # organizations # It starts with "wv" then we add on a database specific identifier like "3v" (WeVoteSetting.site_unique_id_prefix) # then the string "info", and then a sequential integer like "123". # We keep the last value in WeVoteSetting.we_vote_id_last_quick_info_integer we_vote_id = models.CharField( verbose_name="we vote permanent id", max_length=255, default=None, null=True, blank=True, unique=True) # The language that this text is in language = models.CharField(max_length=5, choices=LANGUAGE_CHOICES, default=ENGLISH) info_text = models.TextField(null=True, blank=True) info_html = models.TextField(null=True, blank=True) ballot_item_display_name = models.CharField(verbose_name="text name for ballot item for quick display", max_length=255, null=True, blank=True) # See also more_info_credit_text more_info_credit = models.CharField(max_length=15, choices=SOURCE_SITE_CHOICES, default=NOT_SPECIFIED, null=True, blank=True) # A link to any location with more information about this quick information more_info_url = models.URLField(blank=True, null=True, verbose_name='url with more the full entry for this info') last_updated = models.DateTimeField(verbose_name='date entered', null=True, auto_now=True) # TODO Convert to date_last_changed # The unique id of the last person who edited this entry. last_editor_we_vote_id = models.CharField( verbose_name="last editor we vote id", max_length=255, null=True, blank=True, unique=False) # This is the office that the quick_info refers to. # Either contest_measure is filled, contest_office OR candidate, but not all three contest_office_we_vote_id = models.CharField( verbose_name="we vote permanent id for the contest_office", max_length=255, null=True, blank=True, unique=False) # This is the candidate/politician that the quick_info refers to. # Either candidate is filled, contest_office OR contest_measure, but not all three candidate_campaign_we_vote_id = models.CharField( verbose_name="we vote permanent id for the candidate", max_length=255, null=True, blank=True, unique=False) # Useful for queries based on Politicians politician_we_vote_id = models.CharField( verbose_name="we vote permanent id for politician", max_length=255, null=True, blank=True, unique=False) # This is the measure/initiative/proquick_info that the quick_info refers to. # Either contest_measure is filled, contest_office OR candidate, but not all three contest_measure_we_vote_id = models.CharField( verbose_name="we vote permanent id for the contest_measure", max_length=255, null=True, blank=True, unique=False) # There are many ballot items that don't have (or need) a custom quick_info entry, and can reference a general # entry. This field is the we_vote_id of the master quick_info entry that has the general text. quick_info_master_we_vote_id = models.CharField( verbose_name="we vote id of other entry which is the master", max_length=255, default=None, null=True, blank=True, unique=True) # The unique ID of the election containing this contest. (Provided by Google Civic) google_civic_election_id = models.PositiveIntegerField( verbose_name="google civic election id", default=0, null=True, blank=True) def __unicode__(self): return self.we_vote_id class Meta: ordering = ('last_updated',) # We override the save function so we can auto-generate we_vote_id def save(self, *args, **kwargs): # Even if this organization came from another source we still need a unique we_vote_id if self.we_vote_id: self.we_vote_id = self.we_vote_id.strip().lower() if self.we_vote_id == "" or self.we_vote_id is None: # If there isn't a value... # ...generate a new id site_unique_id_prefix = fetch_site_unique_id_prefix() next_local_integer = fetch_next_we_vote_id_quick_info_integer() # "wv" = We Vote # site_unique_id_prefix = a generated (or assigned) unique id for one server running We Vote # "info" = tells us this is a unique id for a quick_info entry # next_integer = a unique, sequential integer for this server - not necessarily tied to database id self.we_vote_id = "wv{site_unique_id_prefix}info{next_integer}".format( site_unique_id_prefix=site_unique_id_prefix, next_integer=next_local_integer, ) super(QuickInfo, self).save(*args, **kwargs) def is_english(self): if self.language == ENGLISH: return True return False def is_spanish(self): if self.language == SPANISH: return True return False def is_vietnamese(self): if self.language == VIETNAMESE: return True return False def is_chinese(self): if self.language == CHINESE: return True return False def is_tagalog(self): if self.language == TAGALOG: return True return False def get_kind_of_ballot_item(self): if positive_value_exists(self.contest_office_we_vote_id): return OFFICE elif positive_value_exists(self.candidate_campaign_we_vote_id): return CANDIDATE elif positive_value_exists(self.politician_we_vote_id): return POLITICIAN elif positive_value_exists(self.contest_measure_we_vote_id): return MEASURE return None def get_ballot_item_we_vote_id(self): if positive_value_exists(self.contest_office_we_vote_id): return self.contest_office_we_vote_id elif positive_value_exists(self.candidate_campaign_we_vote_id): return self.candidate_campaign_we_vote_id elif positive_value_exists(self.politician_we_vote_id): return self.politician_we_vote_id elif positive_value_exists(self.contest_measure_we_vote_id): return self.contest_measure_we_vote_id return None def more_info_credit_text(self): if self.more_info_credit == BALLOTPEDIA: return "Courtesy of Ballotpedia.org" if self.more_info_credit == WIKIPEDIA: return "Courtesy of Wikipedia.org" return "" class QuickInfoManager(models.Manager): def __unicode__(self): return "QuickInfoManager" def fetch_we_vote_id_from_local_id(self, quick_info_id): if positive_value_exists(quick_info_id): results = self.retrieve_quick_info_from_id(quick_info_id) if results['quick_info_found']: quick_info = results['quick_info'] return quick_info.we_vote_id else: return None else: return None def retrieve_contest_office_quick_info(self, contest_office_we_vote_id): quick_info_id = 0 quick_info_we_vote_id = None candidate_we_vote_id = None politician_we_vote_id = None contest_measure_we_vote_id = None quick_info_manager = QuickInfoManager() return quick_info_manager.retrieve_quick_info( quick_info_id, quick_info_we_vote_id, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id ) def retrieve_candidate_quick_info(self, candidate_we_vote_id): quick_info_id = 0 quick_info_we_vote_id = None politician_we_vote_id = None contest_measure_we_vote_id = None contest_office_we_vote_id = None quick_info_manager = QuickInfoManager() return quick_info_manager.retrieve_quick_info( quick_info_id, quick_info_we_vote_id, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id ) def retrieve_contest_measure_quick_info(self, contest_measure_we_vote_id): quick_info_id = 0 quick_info_we_vote_id = None candidate_we_vote_id = None politician_we_vote_id = None contest_office_we_vote_id = None quick_info_manager = QuickInfoManager() return quick_info_manager.retrieve_quick_info( quick_info_id, quick_info_we_vote_id, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id ) def retrieve_quick_info_from_id(self, quick_info_id): quick_info_we_vote_id = None candidate_we_vote_id = None politician_we_vote_id = None contest_office_we_vote_id = None contest_measure_we_vote_id = None quick_info_manager = QuickInfoManager() return quick_info_manager.retrieve_quick_info( quick_info_id, quick_info_we_vote_id, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id ) def retrieve_quick_info_from_we_vote_id(self, quick_info_we_vote_id): quick_info_id = 0 candidate_we_vote_id = None politician_we_vote_id = None contest_office_we_vote_id = None contest_measure_we_vote_id = None quick_info_manager = QuickInfoManager() return quick_info_manager.retrieve_quick_info( quick_info_id, quick_info_we_vote_id, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id ) def retrieve_quick_info(self, quick_info_id, quick_info_we_vote_id=None, contest_office_we_vote_id=None, candidate_we_vote_id=None, politician_we_vote_id=None, contest_measure_we_vote_id=None): error_result = False exception_does_not_exist = False exception_multiple_object_returned = False quick_info_on_stage = QuickInfo() success = False try: if positive_value_exists(quick_info_id): status = "RETRIEVE_QUICK_INFO_FOUND_WITH_QUICK_INFO_ID" quick_info_on_stage = QuickInfo.objects.get(id=quick_info_id) quick_info_id = quick_info_on_stage.id success = True elif positive_value_exists(quick_info_we_vote_id): status = "RETRIEVE_QUICK_INFO_FOUND_WITH_WE_VOTE_ID" quick_info_on_stage = QuickInfo.objects.get(we_vote_id=quick_info_we_vote_id) quick_info_id = quick_info_on_stage.id success = True elif positive_value_exists(contest_office_we_vote_id): status = "RETRIEVE_QUICK_INFO_FOUND_WITH_OFFICE_WE_VOTE_ID" quick_info_on_stage = QuickInfo.objects.get( contest_office_we_vote_id=contest_office_we_vote_id) quick_info_id = quick_info_on_stage.id success = True elif positive_value_exists(candidate_we_vote_id): status = "RETRIEVE_QUICK_INFO_FOUND_WITH_CANDIDATE_WE_VOTE_ID" quick_info_on_stage = QuickInfo.objects.get( candidate_campaign_we_vote_id=candidate_we_vote_id) quick_info_id = quick_info_on_stage.id success = True elif positive_value_exists(politician_we_vote_id): status = "RETRIEVE_QUICK_INFO_FOUND_WITH_POLITICIAN_WE_VOTE_ID" quick_info_on_stage = QuickInfo.objects.get( politician_we_vote_id=politician_we_vote_id) quick_info_id = quick_info_on_stage.id success = True elif positive_value_exists(contest_measure_we_vote_id): status = "RETRIEVE_QUICK_INFO_FOUND_WITH_MEASURE_WE_VOTE_ID" quick_info_on_stage = QuickInfo.objects.get( contest_measure_we_vote_id=contest_measure_we_vote_id) quick_info_id = quick_info_on_stage.id success = True else: status = "RETRIEVE_QUICK_INFO_INSUFFICIENT_VARIABLES" except QuickInfo.MultipleObjectsReturned as e:<|fim▁hole|> exception_multiple_object_returned = True success = False status = "RETRIEVE_QUICK_INFO_MULTIPLE_FOUND" except QuickInfo.DoesNotExist: error_result = False exception_does_not_exist = True success = True status = "RETRIEVE_QUICK_INFO_NONE_FOUND" results = { 'success': success, 'status': status, 'error_result': error_result, 'DoesNotExist': exception_does_not_exist, 'MultipleObjectsReturned': exception_multiple_object_returned, 'quick_info_found': True if quick_info_id > 0 else False, 'quick_info_id': quick_info_id, 'quick_info_we_vote_id': quick_info_on_stage.we_vote_id, 'quick_info': quick_info_on_stage, 'is_chinese': quick_info_on_stage.is_chinese(), 'is_english': quick_info_on_stage.is_english(), 'is_spanish': quick_info_on_stage.is_spanish(), 'is_tagalog': quick_info_on_stage.is_tagalog(), 'is_vietnamese': quick_info_on_stage.is_vietnamese(), } return results def retrieve_quick_info_list(self, google_civic_election_id, quick_info_search_str=''): google_civic_election_id = convert_to_int(google_civic_election_id) quick_info_list = [] quick_info_list_found = False try: quick_info_queryset = QuickInfo.objects.all() if positive_value_exists(quick_info_search_str): filters = [] # new_filter = Q(id__iexact=quick_info_search_str) # filters.append(new_filter) # # new_filter = Q(ballot_location_display_name__icontains=quick_info_search_str) # filters.append(new_filter) # Add the first query if len(filters): final_filters = filters.pop() # ...and "OR" the remaining items in the list for item in filters: final_filters |= item quick_info_queryset = quick_info_queryset.filter(final_filters) quick_info_queryset = quick_info_queryset.filter( google_civic_election_id=google_civic_election_id) # if positive_value_exists(state_code): # quick_info_queryset = quick_info_queryset.filter(normalized_state__iexact=state_code) quick_info_list = quick_info_queryset if len(quick_info_list): quick_info_list_found = True status = 'QUICK_INFO_LIST_FOUND' else: status = 'NO_QUICK_INFO_LIST_FOUND' except QuickInfo.DoesNotExist: status = 'NO_QUICK_INFO_LIST_FOUND_DOES_NOT_EXIST' quick_info_list = [] except Exception as e: handle_exception(e, logger=logger) status = 'FAILED retrieve_quick_info_list_for_election ' \ '{error} [type: {error_type}]'.format(error=e, error_type=type(e)) results = { 'success': True if quick_info_list_found else False, 'status': status, 'quick_info_list_found': quick_info_list_found, 'quick_info_list': quick_info_list, } return results def update_or_create_quick_info(self, quick_info_id, quick_info_we_vote_id, ballot_item_display_name, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id, info_html, info_text, language, last_editor_we_vote_id, quick_info_master_we_vote_id, more_info_url, more_info_credit, google_civic_election_id ): # Does a quick_info entry already exist? quick_info_manager = QuickInfoManager() results = quick_info_manager.retrieve_quick_info(quick_info_id, quick_info_we_vote_id, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id) quick_info_on_stage_found = False quick_info_on_stage_id = 0 quick_info_on_stage = QuickInfo() if results['quick_info_found']: quick_info_on_stage = results['quick_info'] # Update this quick_info entry with new values - we do not delete because we might be able to use # noinspection PyBroadException try: # Figure out if the update is a change to a master entry if positive_value_exists(quick_info_master_we_vote_id): uses_master_entry = True elif (info_html is not False) or (info_text is not False) or (more_info_url is not False): uses_master_entry = False elif positive_value_exists(quick_info_on_stage.info_textx) or \ positive_value_exists(quick_info_on_stage.info_html) or \ positive_value_exists(quick_info_on_stage.more_info_url): uses_master_entry = False elif positive_value_exists(quick_info_on_stage.quick_info_master_we_vote_id): uses_master_entry = True else: uses_master_entry = True if ballot_item_display_name is not False: quick_info_on_stage.ballot_item_display_name = ballot_item_display_name if language is not False: quick_info_on_stage.language = language if last_editor_we_vote_id is not False: quick_info_on_stage.last_editor_we_vote_id = last_editor_we_vote_id if contest_office_we_vote_id is not False: quick_info_on_stage.contest_office_we_vote_id = contest_office_we_vote_id if candidate_we_vote_id is not False: quick_info_on_stage.candidate_campaign_we_vote_id = candidate_we_vote_id if politician_we_vote_id is not False: quick_info_on_stage.politician_we_vote_id = politician_we_vote_id if contest_measure_we_vote_id is not False: quick_info_on_stage.contest_measure_we_vote_id = contest_measure_we_vote_id if google_civic_election_id is not False: quick_info_on_stage.google_civic_election_id = google_civic_election_id if uses_master_entry: if quick_info_master_we_vote_id is not False: quick_info_on_stage.quick_info_master_we_vote_id = quick_info_master_we_vote_id # Clear out unique entry values quick_info_on_stage.info_text = "" quick_info_on_stage.info_html = "" quick_info_on_stage.more_info_url = "" quick_info_on_stage.more_info_credit = NOT_SPECIFIED else: # If here, this is NOT a master entry if info_text is not False: quick_info_on_stage.info_text = info_text if info_html is not False: quick_info_on_stage.info_html = info_html if more_info_url is not False: quick_info_on_stage.more_info_url = more_info_url if more_info_credit is not False: quick_info_on_stage.more_info_credit = more_info_credit # Clear out master entry value quick_info_on_stage.quick_info_master_we_vote_id = "" if google_civic_election_id is not False: quick_info_on_stage.google_civic_election_id = google_civic_election_id # We don't need to update date_last_changed here because set set auto_now=True in the field quick_info_on_stage.save() quick_info_on_stage_id = quick_info_on_stage.id quick_info_on_stage_found = True status = 'QUICK_INFO_UPDATED' except Exception as e: status = 'FAILED_TO_UPDATE_QUICK_INFO' elif results['MultipleObjectsReturned']: status = 'QUICK_INFO MultipleObjectsReturned' elif results['DoesNotExist']: try: # Create new quick_info entry if ballot_item_display_name is False: ballot_item_display_name = "" if language is False: language = ENGLISH if last_editor_we_vote_id is False: last_editor_we_vote_id = "" if contest_office_we_vote_id is False: contest_office_we_vote_id = "" if candidate_we_vote_id is False: candidate_we_vote_id = "" if politician_we_vote_id is False: politician_we_vote_id = "" if contest_measure_we_vote_id is False: contest_measure_we_vote_id = "" if google_civic_election_id is False: google_civic_election_id = 0 # Master related data if quick_info_master_we_vote_id is False: quick_info_master_we_vote_id = "" # Unique related data if info_html is False: info_html = "" if info_text is False: info_text = "" if more_info_url is False: more_info_url = "" if more_info_credit is False: more_info_credit = None quick_info_on_stage = QuickInfo( ballot_item_display_name=ballot_item_display_name, contest_office_we_vote_id=contest_office_we_vote_id, candidate_campaign_we_vote_id=candidate_we_vote_id, politician_we_vote_id=politician_we_vote_id, contest_measure_we_vote_id=contest_measure_we_vote_id, info_html=info_html, info_text=info_text, language=language, last_editor_we_vote_id=last_editor_we_vote_id, quick_info_master_we_vote_id=quick_info_master_we_vote_id, more_info_url=more_info_url, more_info_credit=more_info_credit, google_civic_election_id=google_civic_election_id # We don't need to update last_updated here because set set auto_now=True in the field ) quick_info_on_stage.save() quick_info_on_stage_id = quick_info_on_stage.id quick_info_on_stage_found = True status = 'CREATED_QUICK_INFO' except Exception as e: status = 'FAILED_TO_CREATE_NEW_QUICK_INFO' handle_record_not_saved_exception(e, logger=logger, exception_message_optional=status) else: status = results['status'] results = { 'success': True if quick_info_on_stage_found else False, 'status': status, 'quick_info_found': quick_info_on_stage_found, 'quick_info_id': quick_info_on_stage_id, 'quick_info': quick_info_on_stage, } return results def delete_quick_info(self, quick_info_id): quick_info_id = convert_to_int(quick_info_id) quick_info_deleted = False try: if quick_info_id: results = self.retrieve_quick_info(quick_info_id) if results['quick_info_found']: quick_info = results['quick_info'] quick_info_id = quick_info.id quick_info.delete() quick_info_deleted = True except Exception as e: handle_exception(e, logger=logger) results = { 'success': quick_info_deleted, 'quick_info_deleted': quick_info_deleted, 'quick_info_id': quick_info_id, } return results class QuickInfoMaster(models.Model): """ Master data that can be applied to multiple ballot items """ # We are relying on built-in Python id field # The we_vote_id identifier is unique across all We Vote sites, and allows us to share our org info with other # organizations # It starts with "wv" then we add on a database specific identifier like "3v" (WeVoteSetting.site_unique_id_prefix) # then the string "infom" (for "info master"), and then a sequential integer like "123". # We keep the last value in WeVoteSetting.we_vote_id_last_quick_info_master_integer we_vote_id = models.CharField( verbose_name="we vote permanent id", max_length=255, default=None, null=True, blank=True, unique=True) # What kind of ballot item is this a master entry for? Mostly used so we can organize these entries kind_of_ballot_item = models.CharField(max_length=10, choices=KIND_OF_BALLOT_ITEM_CHOICES, default=OFFICE) # The language that this text is in language = models.CharField(max_length=5, choices=LANGUAGE_CHOICES, default=ENGLISH) info_text = models.TextField(null=True, blank=True) info_html = models.TextField(null=True, blank=True) master_entry_name = models.CharField(verbose_name="text name for quick info master entry", max_length=255, null=True, blank=True) more_info_credit = models.CharField(max_length=15, choices=SOURCE_SITE_CHOICES, default=BALLOTPEDIA, null=True, blank=True) # A link to any location with more information about this quick information more_info_url = models.URLField(blank=True, null=True, verbose_name='url with more the full entry for this info') last_updated = models.DateTimeField(verbose_name='date entered', null=True, auto_now=True) # TODO convert to date_last_changed # The unique id of the last person who edited this entry. last_editor_we_vote_id = models.CharField( verbose_name="last editor we vote id", max_length=255, null=True, blank=True, unique=False) def __unicode__(self): return self.we_vote_id class Meta: ordering = ('last_updated',) # We override the save function so we can auto-generate we_vote_id def save(self, *args, **kwargs): # Even if this organization came from another source we still need a unique we_vote_id if self.we_vote_id: self.we_vote_id = self.we_vote_id.strip().lower() if self.we_vote_id == "" or self.we_vote_id is None: # If there isn't a value... # ...generate a new id site_unique_id_prefix = fetch_site_unique_id_prefix() next_local_integer = fetch_next_we_vote_id_quick_info_master_integer() # "wv" = We Vote # site_unique_id_prefix = a generated (or assigned) unique id for one server running We Vote # "infom" = tells us this is a unique id for a quick_info_master entry # next_integer = a unique, sequential integer for this server - not necessarily tied to database id self.we_vote_id = "wv{site_unique_id_prefix}infom{next_integer}".format( site_unique_id_prefix=site_unique_id_prefix, next_integer=next_local_integer, ) super(QuickInfoMaster, self).save(*args, **kwargs) def is_english(self): if self.language == ENGLISH: return True return False def is_spanish(self): if self.language == SPANISH: return True return False def is_vietnamese(self): if self.language == VIETNAMESE: return True return False def is_chinese(self): if self.language == CHINESE: return True return False def is_tagalog(self): if self.language == TAGALOG: return True return False def more_info_credit_text(self): if self.more_info_credit == BALLOTPEDIA: return "Courtesy of Ballotpedia.org" if self.more_info_credit == WIKIPEDIA: return "Courtesy of Wikipedia.org" return "" class QuickInfoMasterManager(models.Manager): def __unicode__(self): return "QuickInfoMasterManager" def fetch_we_vote_id_from_local_id(self, quick_info_master_id): if positive_value_exists(quick_info_master_id): results = self.retrieve_quick_info_master_from_id(quick_info_master_id) if results['quick_info_master_found']: quick_info_master = results['quick_info_master'] return quick_info_master.we_vote_id else: return None else: return None def retrieve_quick_info_master_from_id(self, quick_info_master_id): quick_info_master_we_vote_id = None quick_info_master_manager = QuickInfoMasterManager() return quick_info_master_manager.retrieve_quick_info_master(quick_info_master_id, quick_info_master_we_vote_id) def retrieve_quick_info_master_from_we_vote_id(self, quick_info_master_we_vote_id): quick_info_master_id = 0 quick_info_master_manager = QuickInfoMasterManager() return quick_info_master_manager.retrieve_quick_info_master(quick_info_master_id, quick_info_master_we_vote_id) def retrieve_quick_info_master(self, quick_info_master_id, quick_info_master_we_vote_id=None): error_result = False exception_does_not_exist = False exception_multiple_object_returned = False quick_info_master = QuickInfoMaster() success = False try: if positive_value_exists(quick_info_master_id): status = "RETRIEVE_QUICK_INFO_MASTER_FOUND_WITH_ID" quick_info_master = QuickInfoMaster.objects.get(id=quick_info_master_id) quick_info_master_id = quick_info_master.id success = True elif positive_value_exists(quick_info_master_we_vote_id): status = "RETRIEVE_QUICK_INFO_MASTER_FOUND_WITH_WE_VOTE_ID" quick_info_master = QuickInfoMaster.objects.get(we_vote_id=quick_info_master_we_vote_id) quick_info_master_id = quick_info_master.id success = True else: status = "RETRIEVE_QUICK_INFO_MASTER_INSUFFICIENT_VARIABLES" except QuickInfoMaster.MultipleObjectsReturned as e: handle_record_found_more_than_one_exception(e, logger=logger) error_result = True exception_multiple_object_returned = True success = False status = "RETRIEVE_QUICK_INFO_MASTER_MULTIPLE_FOUND" except QuickInfoMaster.DoesNotExist: error_result = False exception_does_not_exist = True success = True status = "RETRIEVE_QUICK_INFO_MASTER_NONE_FOUND" results = { 'success': success, 'status': status, 'error_result': error_result, 'DoesNotExist': exception_does_not_exist, 'MultipleObjectsReturned': exception_multiple_object_returned, 'quick_info_master_found': True if quick_info_master_id > 0 else False, 'quick_info_master_id': quick_info_master_id, 'quick_info_master': quick_info_master, } return results def update_or_create_quick_info_master(self, quick_info_master_id, quick_info_master_we_vote_id, master_entry_name, info_html, info_text, language, kind_of_ballot_item, last_editor_we_vote_id, more_info_url, more_info_credit, ): # Does a quick_info_master entry already exist? quick_info_master_manager = QuickInfoMasterManager() if positive_value_exists(quick_info_master_id) or positive_value_exists(quick_info_master_we_vote_id): results = quick_info_master_manager.retrieve_quick_info_master(quick_info_master_id, quick_info_master_we_vote_id) quick_info_master_found = results['quick_info_master_found'] else: quick_info_master_found = False if quick_info_master_found: quick_info_master = results['quick_info_master'] # noinspection PyBroadException try: if master_entry_name is not False: quick_info_master.master_entry_name = master_entry_name if info_html is not False: quick_info_master.info_html = info_html if info_text is not False: quick_info_master.info_text = info_text if language is not False: quick_info_master.language = language if kind_of_ballot_item is not False: quick_info_master.kind_of_ballot_item = kind_of_ballot_item if last_editor_we_vote_id is not False: quick_info_master.last_editor_we_vote_id = last_editor_we_vote_id if more_info_url is not False: quick_info_master.more_info_url = more_info_url if more_info_credit is not False: quick_info_master.more_info_credit = more_info_credit # We don't need to update date_last_changed here because set set auto_now=True in the field quick_info_master.save() quick_info_master_id = quick_info_master.id quick_info_master_found = True status = 'QUICK_INFO_MASTER_UPDATED' except Exception as e: status = 'FAILED_TO_UPDATE_QUICK_INFO_MASTER' else: try: # Create new quick_info_master entry # Create new quick_info entry if master_entry_name is False: master_entry_name = None if info_html is False: info_html = None if info_text is False: info_text = None if language is False: language = ENGLISH if last_editor_we_vote_id is False: last_editor_we_vote_id = None if more_info_url is False: more_info_url = None if more_info_credit is False: more_info_credit = None quick_info_master = QuickInfoMaster( master_entry_name=master_entry_name, info_html=info_html, info_text=info_text, language=language, kind_of_ballot_item=kind_of_ballot_item, last_editor_we_vote_id=last_editor_we_vote_id, more_info_url=more_info_url, more_info_credit=more_info_credit, # We don't need to update last_updated here because set set auto_now=True in the field ) quick_info_master.save() quick_info_master_id = quick_info_master.id quick_info_master_found = True status = 'CREATED_QUICK_INFO_MASTER' except Exception as e: status = 'FAILED_TO_CREATE_NEW_QUICK_INFO_MASTER' handle_record_not_saved_exception(e, logger=logger, exception_message_optional=status) results = { 'success': True if quick_info_master_found else False, 'status': status, 'quick_info_master_found': quick_info_master_found, 'quick_info_master_id': quick_info_master_id, 'quick_info_master': quick_info_master, } return results def delete_quick_info_master(self, quick_info_master_id): quick_info_master_id = convert_to_int(quick_info_master_id) quick_info_master_deleted = False try: if quick_info_master_id: results = self.retrieve_quick_info_master(quick_info_master_id) if results['quick_info_master_found']: quick_info_master = results['quick_info_master'] quick_info_master_id = quick_info_master.id quick_info_master.delete() quick_info_master_deleted = True except Exception as e: handle_exception(e, logger=logger) results = { 'success': quick_info_master_deleted, 'quick_info_master_deleted': quick_info_master_deleted, 'quick_info_master_id': quick_info_master_id, } return results<|fim▁end|>
handle_record_found_more_than_one_exception(e, logger=logger) error_result = True
<|file_name|>convertTupleWithoutParenthesesToList.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
xs = 1<caret>, 2
<|file_name|>platform.ts<|end_file_name|><|fim▁begin|>/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ 'use strict'; import Types = require('vs/base/common/types'); import Assert = require('vs/base/common/assert'); import {IInstantiationService, IConstructorSignature0} from 'vs/platform/instantiation/common/instantiation'; export interface IRegistry { /** * Adds the extension functions and properties defined by data to the * platform. The provided id must be unique. * @param id a unique identifier * @param data a contribution */ add(id: string, data: any): void; /** * Returns true iff there is an extension with the provided id. * @param id an extension idenifier */ knows(id: string): boolean; /** * Returns the extension functions and properties defined by the specified key or null. * @param id an extension idenifier */ as(id: string): any; as<T>(id: string): T; } class RegistryImpl implements IRegistry { private data: { [id: string]: any; }; constructor() { this.data = {}; } public add(id: string, data: any): void { Assert.ok(Types.isString(id)); Assert.ok(Types.isObject(data)); Assert.ok(!this.data.hasOwnProperty(id), 'There is already an extension with this id'); this.data[id] = data; } public knows(id: string): boolean { return this.data.hasOwnProperty(id); } public as(id: string): any { return this.data[id] || null; } } export var Registry = <IRegistry>new RegistryImpl(); /** * A base class for registries that leverage the instantiation service to create instances. */ export class BaseRegistry<T> { private toBeInstantiated: IConstructorSignature0<T>[] = []; private instances: T[] = []; private instantiationService: IInstantiationService; public setInstantiationService(service: IInstantiationService): void { this.instantiationService = service;<|fim▁hole|> let entry = this.toBeInstantiated.shift(); this.instantiate(entry); } } private instantiate(ctor: IConstructorSignature0<T>): void { let instance = this.instantiationService.createInstance(ctor); this.instances.push(instance); } _register(ctor: IConstructorSignature0<T>): void { if (this.instantiationService) { this.instantiate(ctor); } else { this.toBeInstantiated.push(ctor); } } _getInstances(): T[] { return this.instances.slice(0); } _setInstances(instances: T[]): void { this.instances = instances; } }<|fim▁end|>
while (this.toBeInstantiated.length > 0) {
<|file_name|>configurationGui.py<|end_file_name|><|fim▁begin|>''' ' configurationGui.py ' Author: Iker Pedrosa ' ' License: ' This file is part of orderedFileCopy. ' ' orderedFileCopy is free software: you can redistribute it and/or modify ' it under the terms of the GNU General Public License as published by ' the Free Software Foundation, either version 3 of the License, or ' (at your option) any later version. ' ' orderedFileCopy is distributed in the hope that it will be useful, ' but WITHOUT ANY WARRANTY; without even the implied warranty of ' MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ' GNU General Public License for more details. ' ' You should have received a copy of the GNU General Public License ' along with orderedFileCopy. If not, see <http://www.gnu.org/licenses/>. ' ''' #Imported modules from Tkinter import * from fileManager import * import tkFileDialog import globals #Global variables class configurationGUI: def __init__(self, master): master.grab_set() #The contrary is master.grab_release() #Window title self.master = master master.title("Configuration menu") #Window position and size windowWidth = 600 windowHeight = 150 screenWidth = master.winfo_screenwidth() screenHeight = master.winfo_screenheight() print("configurationGui: screenWidth %d" % screenWidth) print("configurationGui: screenHeight %d" % screenHeight) windowWidthPosition = (screenWidth - windowWidth) / 2 windowHeightPosition = ((screenHeight - windowHeight) / 2) - windowHeight print("configurationGui: windowWidthPosition %d" % windowWidthPosition) print("configurationGui: windowHeightPosition %d" % windowHeightPosition) master.geometry("%dx%d+%d+%d" % (windowWidth, windowHeight, windowWidthPosition, windowHeightPosition)) #Create layouts top_frame = Frame(master, width = 600, height = 50) centre_frame = Frame(master, width = 600, height = 50) below_frame = Frame(master, width = 600, height = 50) bottom_frame = Frame(master, width = 600, height = 50) top_frame.grid(row = 0) centre_frame.grid(row = 1) below_frame.grid(row = 2) bottom_frame.grid(row = 3) #Extension information self.labelExtension = Label(top_frame, height = 1, width = 30, font = ("Helvetica", 11), text = "File extension to copy:") self.labelExtension.grid(row = 0, column = 0) self.textExtension = Text(top_frame, height = 1, width = 5, font = ("Helvetica", 11)) self.textExtension.grid(row = 0, column = 1) self.textExtension.insert(END, globals.extension) #Default origin information globals.windowDefaultOrigin = StringVar() globals.windowDefaultOrigin.set(globals.selectedDefaultOrigin) self.textDefaultOriginPath = Entry(centre_frame, width = 55, font = ("Helvetica", 11), textvariable = globals.windowDefaultOrigin) self.textDefaultOriginPath.grid(row = 1, column = 0) self.buttonDefaultOriginPath = Button(centre_frame, text = "...", command = self.defaultOriginFileChooser) self.buttonDefaultOriginPath.grid(row = 1, column = 1, padx = 10) #Destination by USB information self.labelUsb = Label(below_frame, width = 15, font = ("Helvetica", 11), text = "Destination by USB") self.labelUsb.grid(row = 0, column = 0) self.localUsbState = IntVar() self.localUsbState.set(globals.selectedUsbState) self.checkboxUsb = Checkbutton(below_frame, command = self.activateUsbName, variable = self.localUsbState, onvalue=1, offvalue=0) self.checkboxUsb.grid(row = 0, column = 1) self.textUsb = Text(below_frame, height = 1, width = 25, font = ("Helvetica", 11), state = "disabled") self.textUsb.grid(row = 0, column = 2) if globals.selectedUsbState == 1: self.textUsb.configure(state = "normal") else: self.textUsb.configure(state = "disabled") self.textUsb.insert(END, globals.selectedUsbName) #Buttons self.buttonAccept = Button(bottom_frame, text = "Accept", command = self.accept) self.buttonAccept.grid(row = 2, column = 0, padx = 25, pady = 20) self.buttonCancel = Button(bottom_frame, text = "Cancel", command = self.cancel) self.buttonCancel.grid(row = 2, column = 1, padx = 25, pady = 20) #Finished __init__ def defaultOriginFileChooser(self): resultPath = tkFileDialog.askdirectory(initialdir = globals.selectedDefaultOrigin) + "/" if resultPath != "/" and resultPath != "": globals.selectedDefaultOrigin = resultPath.encode("utf-8") globals.windowDefaultOrigin.set(globals.selectedDefaultOrigin) #Finished originFileChooser def accept(self): globals.extension = self.textExtension.get("1.0", "end-1c") globals.selectedUsbName = self.textUsb.get("1.0", "end-1c") writeConfiguration() print("accept: globals.selectedDefaultOrigin '%s'" % globals.selectedDefaultOrigin) print("accept: globals.extension '%s'" % globals.extension) self.master.destroy() #Finished accept def activateUsbName(self):<|fim▁hole|> globals.selectedUsbState = 1 self.textUsb.configure(state = "normal") self.textUsb.insert(END, globals.selectedUsbName) else: globals.selectedUsbState = 0 self.textUsb.delete("1.0", END) self.textUsb.configure(state = "disabled") #Finished activateUsbName def cancel(self): self.master.destroy() #Finished cancel #Finished configurationGUI<|fim▁end|>
if self.localUsbState.get() == 1:
<|file_name|>test.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var assert = require('assert') var d = new Deferred() var t = require('tap') t.match(d, { resolve: Function, reject: Function, promise: Object })<|fim▁end|>
var Deferred = require('./')
<|file_name|>ghdata-api-client.js<|end_file_name|><|fim▁begin|>/* SPDX-License-Identifier: MIT */ /** * Handles interaction with a GHData server. * @constructor */ function GHDataAPIClient (apiUrl, owner, repo, apiVersion) { this.owner = owner || ''; this.repo = repo || ''; this.url = apiUrl; this.apiversion = apiVersion || 'unstable'; } /* Request Handling * Create a friendly wrapper around XMLHttpRequest --------------------------------------------------------------*/ /** * Wraps XMLHttpRequest with many goodies. Credit to SomeKittens on StackOverflow. * @param {Object} opts - Stores the url (opts.url), method (opts.method), headers (opts.headers) and query parameters (opt.params). All optional. * @returns {Promise} Resolves with XMLHttpResponse.response */ GHDataAPIClient.prototype.request = function (opts) { // Use GHData by default opts.endpoint = opts.endpoint || ''; opts.url = opts.url || (this.url + this.apiversion + '/' + this.owner + '/' + this.repo + '/' + opts.endpoint); opts.method = opts.method || 'GET'; return new Promise(function (resolve, reject) { var xhr = new XMLHttpRequest(); xhr.open(opts.method, opts.url); xhr.onload = function () { if (this.status >= 200 && this.status < 300) { resolve(xhr.response); } else { reject({ status: this.status, statusText: xhr.statusText }); } }; xhr.onerror = function () { reject({ status: this.status, statusText: xhr.statusText }); }; if (opts.headers) { Object.keys(opts.headers).forEach(function (key) { xhr.setRequestHeader(key, opts.headers[key]); }); } var params = opts.params; // We'll need to stringify if we've been given an object // If we have a string, this is skipped. if (params && typeof params === 'object') { params = Object.keys(params).map(function (key) { return encodeURIComponent(key) + '=' + encodeURIComponent(params[key]); }).join('&'); } xhr.send(params); }); }; /** * Wraps the GET requests with the correct options for most GHData calls * @param {String} endpoint - Endpoint to send the request to * @param {Object} params - Query string params to pass to the API * @returns {Promise} Resolves with Object created from the JSON returned by GHData */ GHDataAPIClient.prototype.get = function (endpoint, params) { var self = this; return new Promise(function (resolve, request) { self.request({ method: 'GET', endpoint: endpoint, params: params }).then(function (response) { // Lets make this thing JSON var result = JSON.parse(response); resolve(result); }); }); }; /* Endpoints * Wrap all the API endpoints to make it as simple as possible --------------------------------------------------------------*/ /** * Commits timeseries * @param {Object} params - Query string params to pass to the API * @returns {Promise} Resolves with commits timeseries object */ GHDataAPIClient.prototype.commitsByWeek = function (params) { return this.get('timeseries/commits', params); }; /** * Forks timeseries * @param {Object} params - Query string params to pass to the API * @returns {Promise} Resolves with forks timeeseries object */ GHDataAPIClient.prototype.forksByWeek = function (params) { return this.get('timeseries/forks', params); }; /** * Stargazers timeseries * @param {Object} params - Query string params to pass to the API * @returns {Promise} Resolves with commits timeseries object */ GHDataAPIClient.prototype.stargazersByWeek = function (params) { return this.get('timeseries/stargazers', params); }; /** * Issues timeseries * @param {Object} params - Query string params to pass to the API * @returns {Promise} Resolves with commits timeseries object */ GHDataAPIClient.prototype.issuesByWeek = function (params) { return this.get('timeseries/issues', params); }; /** * Pull Requests timeseries * @param {Object} params - Query string params to pass to the API * @returns {Promise} Resolves with commits timeseries object */ GHDataAPIClient.prototype.pullRequestsByWeek = function (params) { return this.get('timeseries/pulls', params); }; /** * Pull Requests timeseries * @param {Object} params - Query string params to pass to the API * @returns {Promise} Resolves with commits timeseries object */ GHDataAPIClient.prototype.contributionsByWeek = function (params) {<|fim▁hole|> * How quickly after issues are made they are commented on * @param {Object} params - Query string params to pass to the API * @returns {Promise} Resolves with commits timeseries object */ GHDataAPIClient.prototype.issuesResponseTime = function (params) { return this.get('timeseries/issues/response_time', params); }; /** * Contributions timeseries * @param {Object} params - Query string params to pass to the API * @returns {Promise} Resolves with commits timeseries object */ GHDataAPIClient.prototype.contributors = function (params) { return this.get('timeseries/contributors', params); }; /** * Locations of the committers * @param {Object} params - Query string params to pass to the API * @returns {Promise} Resolves with commits timeseries object */ GHDataAPIClient.prototype.committerLocations = function (params) { return this.get('commits/locations', params); };<|fim▁end|>
return this.get('timeseries/contributions', params); }; /**
<|file_name|>step_interact.js<|end_file_name|><|fim▁begin|>function OpenReportOperations() { $("#report-operations-div").show(); $("#page-operations-div").hide(); $("#events-operations-div").hide(); $("#editandsave-operations-div").hide(); $("#report-operations-li").addClass('active'); $('#page-operations-li').removeClass('active'); $('#events-operations-li').removeClass('active'); $('#editandsave-operations-li').removeClass('active'); $("#report-operations-div .function-ul li.active").click() $("#selected-catogory-button").html("Report operations"); } function OpenPageOperations() { $("#page-operations-div").show(); $("#report-operations-div").hide(); $("#events-operations-div").hide(); $("#editandsave-operations-div").hide(); $("#page-operations-li").addClass('active'); $('#report-operations-li').removeClass('active'); $('#events-operations-li').removeClass('active'); $('#editandsave-operations-li').removeClass('active'); $("#page-operations-div .function-ul li.active").click();<|fim▁hole|>function OpenEventOperations() { $("#page-operations-div").hide(); $("#report-operations-div").hide(); $("#events-operations-div").show(); $("#editandsave-operations-div").hide(); $("#page-operations-li").removeClass('active'); $('#report-operations-li').removeClass('active'); $('#events-operations-li').addClass('active'); $('#editandsave-operations-li').removeClass('active'); $("#events-operations-div .function-ul li.active").click(); $("#selected-catogory-button").html("Events Listener"); } function OpenEditAndSaveOperations() { $("#page-operations-div").hide(); $("#report-operations-div").hide(); $("#events-operations-div").hide(); $("#editandsave-operations-div").show(); $("#page-operations-li").removeClass('active'); $('#report-operations-li').removeClass('active'); $('#events-operations-li').removeClass('active'); $('#editandsave-operations-li').addClass('active'); $("#editandsave-operations-div .function-ul li.active").click(); $("#selected-catogory-button").html("Edit and save operations"); } function SetToggleHandler(devId) { var selector = "#" + devId + " .function-ul li"; $(selector).each(function(index, li) { $(li).click(function() { $(selector).removeClass('active'); $(li).addClass('active'); }); }); }<|fim▁end|>
$("#selected-catogory-button").html("Page operations"); }
<|file_name|>test_checkout.py<|end_file_name|><|fim▁begin|>import pytest from django.conf import settings from django.contrib.auth.models import AnonymousUser from mock import MagicMock, Mock from prices import Price from saleor.checkout import views from saleor.checkout.core import STORAGE_SESSION_KEY, Checkout from saleor.shipping.models import ShippingMethodCountry from saleor.userprofile.models import Address def test_checkout_version(): checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code') storage = checkout.for_storage() assert storage['version'] == Checkout.VERSION @pytest.mark.parametrize('storage_data, expected_storage', [ ({'version': Checkout.VERSION, 'new': 1}, {'version': Checkout.VERSION, 'new': 1}), ({'version': 'wrong', 'new': 1}, {'version': Checkout.VERSION}), ({'new': 1}, {'version': Checkout.VERSION}), ({}, {'version': Checkout.VERSION}), (None, {'version': Checkout.VERSION}), ]) def test_checkout_version_with_from_storage(storage_data, expected_storage): checkout = Checkout.from_storage( storage_data, Mock(), AnonymousUser(), 'tracking_code') storage = checkout.for_storage() assert storage == expected_storage def test_checkout_clear_storage(): checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code') checkout.storage['new'] = 1 checkout.clear_storage() assert checkout.storage is None assert checkout.modified is True def test_checkout_is_shipping_required(): cart = Mock(is_shipping_required=Mock(return_value=True)) checkout = Checkout(cart, AnonymousUser(), 'tracking_code') assert checkout.is_shipping_required is True def test_checkout_deliveries(): partition = Mock( get_total=Mock(return_value=Price(10, currency=settings.DEFAULT_CURRENCY)), get_price_per_item=Mock(return_value=Price(10, currency=settings.DEFAULT_CURRENCY))) def f(): yield partition partition.__iter__ = Mock(return_value=f()) cart = Mock(partition=Mock(return_value=[partition]), currency=settings.DEFAULT_CURRENCY) checkout = Checkout( cart, AnonymousUser(), 'tracking_code') deliveries = list(checkout.deliveries) assert deliveries[0][1] == Price(0, currency=settings.DEFAULT_CURRENCY) assert deliveries[0][2] == partition.get_total() assert deliveries[0][0][0][0] == partition def test_checkout_deliveries_with_shipping_method(monkeypatch): shipping_cost = 5 items_cost = 5 partition = Mock( is_shipping_required=MagicMock(return_value=True), get_total=Mock(return_value=Price(items_cost, currency=settings.DEFAULT_CURRENCY)), get_price_per_item=Mock(return_value=Price(items_cost, currency=settings.DEFAULT_CURRENCY))) def f(): yield partition partition.__iter__ = Mock(return_value=f()) cart = Mock(partition=Mock(return_value=[partition]), currency=settings.DEFAULT_CURRENCY) shipping_method_mock = Mock(get_total=Mock(return_value=Price(shipping_cost, currency=settings.DEFAULT_CURRENCY))) monkeypatch.setattr(Checkout, 'shipping_method', shipping_method_mock) checkout = Checkout( cart, AnonymousUser(), 'tracking_code') deliveries = list(checkout.deliveries) assert deliveries[0][1] == Price(shipping_cost, currency=settings.DEFAULT_CURRENCY) assert deliveries[0][2] == Price(items_cost + shipping_cost, currency=settings.DEFAULT_CURRENCY) assert deliveries[0][0][0][0] == partition @pytest.mark.parametrize('user, shipping', [ (Mock(default_shipping_address='user_shipping'), 'user_shipping'), (AnonymousUser(), None), ]) def test_checkout_shipping_address_with_anonymous_user(user, shipping): checkout = Checkout(Mock(), user, 'tracking_code') assert checkout.shipping_address == shipping @pytest.mark.parametrize('address_objects, shipping', [ (Mock(get=Mock(return_value='shipping')), 'shipping'), (Mock(get=Mock(side_effect=Address.DoesNotExist)), None), ]) def test_checkout_shipping_address_with_storage(address_objects, shipping, monkeypatch): monkeypatch.setattr('saleor.checkout.core.Address.objects', address_objects) checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code') checkout.storage['shipping_address'] = {'id': 1} assert checkout.shipping_address == shipping def test_checkout_shipping_address_setter(): address = Address(first_name='Jan', last_name='Kowalski') checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code') checkout.shipping_address = address assert checkout.storage['shipping_address'] == { 'city': u'', 'city_area': u'', 'company_name': u'', 'country': '', 'phone': u'', 'country_area': u'', 'first_name': 'Jan', 'id': None, 'last_name': 'Kowalski', 'postal_code': u'', 'street_address_1': u'', 'street_address_2': u''} @pytest.mark.parametrize('shipping_address, shipping_method, value', [ (Mock(country=Mock(code='PL')), Mock(country_code='PL', __eq__=lambda n, o: n.country_code == o.country_code), Mock(country_code='PL')), (Mock(country=Mock(code='DE')), Mock(country_code='PL'), None), (None, Mock(country_code='PL'), None), ]) def test_checkout_shipping_method(shipping_address, shipping_method, value, monkeypatch): queryset = Mock(get=Mock(return_value=shipping_method)) monkeypatch.setattr(Checkout, 'shipping_address', shipping_address) monkeypatch.setattr('saleor.checkout.core.ShippingMethodCountry.objects', queryset) checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code') checkout.storage['shipping_method_country_id'] = 1 assert checkout.shipping_method == value def test_checkout_shipping_does_not_exists(monkeypatch): queryset = Mock(get=Mock(side_effect=ShippingMethodCountry.DoesNotExist)) monkeypatch.setattr('saleor.checkout.core.ShippingMethodCountry.objects', queryset) checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code') checkout.storage['shipping_method_country_id'] = 1 assert checkout.shipping_method is None def test_checkout_shipping_method_setter(): shipping_method = Mock(id=1) checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code') assert checkout.modified is False checkout.shipping_method = shipping_method assert checkout.modified is True assert checkout.storage['shipping_method_country_id'] == 1 @pytest.mark.parametrize('user, address', [ (AnonymousUser(), None), (Mock(default_billing_address='billing_address', addresses=Mock(is_authenticated=Mock(return_value=True))), 'billing_address'), ]) def test_checkout_billing_address(user, address): checkout = Checkout(Mock(), user, 'tracking_code') assert checkout.billing_address == address @pytest.mark.parametrize('cart, status_code, url', [ (Mock(__len__=Mock(return_value=0)), 302, '/cart/'), (Mock(__len__=Mock(return_value=1), is_shipping_required=Mock(return_value=True)), 302, '/checkout/shipping-address/'), (Mock(__len__=Mock(return_value=1), is_shipping_required=Mock(return_value=False)), 302, '/checkout/summary/'), (Mock(__len__=Mock(return_value=0), is_shipping_required=Mock(return_value=False)), 302, '/cart/'), ]) def test_index_view(cart, status_code, url, rf): checkout = Checkout(cart, AnonymousUser(), 'tracking_code') request = rf.get('checkout:index') request.user = checkout.user request.session = {STORAGE_SESSION_KEY: checkout.for_storage()} request.discounts = [] response = views.index_view(request, checkout, checkout.cart)<|fim▁hole|><|fim▁end|>
assert response.status_code == status_code assert response.url == url
<|file_name|>service.rs<|end_file_name|><|fim▁begin|>// TODO (CM): Take another look at the public exports of this module // (specifically, `pub mod spec`, and the various `pub use` // statements. Playing fast-and-loose with our imports has led to code // that's more confusing that it probably needs to be. // TODO (CM): Take a deeper look at the direct consumption of // Prost-generated types (habitat_sup_protocol::types::*) in // here. Ideally, those would exist only at the periphery of the // system, and we'd use separate internal types for our core logic. mod context; mod health; mod hook_runner; mod hooks; #[cfg(windows)] mod pipe_hook_client; pub mod spec; mod supervisor; mod terminator; use self::{context::RenderContext, hook_runner::HookRunner, hooks::{HookCompileTable, HookTable}, supervisor::Supervisor}; pub use self::{health::{HealthCheckBundle, HealthCheckHookStatus, HealthCheckResult}, hooks::{HealthCheckHook, ProcessOutput, StandardStreams}, spec::{DesiredState, ServiceSpec}}; use crate::{census::{CensusGroup, CensusRing, ElectionStatus, ServiceFile}, error::{Error, Result}, manager::{event, sync::GatewayState, FsCfg, ServicePidSource, ShutdownConfig, Sys}}; use futures::future::{self, AbortHandle}; use habitat_butterfly::rumor::service::Service as ServiceRumor; #[cfg(windows)] use habitat_common::templating::package::DEFAULT_USER; pub use habitat_common::templating::{config::{Cfg, UserConfigPath}, package::{Env, Pkg, PkgProxy}}; use habitat_common::{outputln, templating::{config::CfgRenderer, hooks::Hook}, FeatureFlag}; #[cfg(windows)] use habitat_core::os::users; use habitat_core::{crypto::Blake2bHash, fs::{atomic_write, svc_hooks_path, SvcDir, FS_ROOT_PATH}, os::process::ShutdownTimeout, package::{metadata::Bind, PackageIdent, PackageInstall}, service::{ServiceBind, ServiceGroup}, ChannelIdent}; use habitat_launcher_client::LauncherCli; use habitat_sup_protocol::types::BindingMode; pub use habitat_sup_protocol::types::{ProcessState, Topology, UpdateCondition, UpdateStrategy}; use parking_lot::RwLock; use prometheus::{HistogramTimer, HistogramVec}; use serde::{ser::SerializeStruct, Serialize, Serializer}; use std::{self, collections::HashSet, fmt, fs, ops::Deref, path::{Path, PathBuf}, result, sync::{Arc, Mutex}, time::SystemTime}; static LOGKEY: &str = "SR"; #[cfg(not(windows))] pub const GOSSIP_FILE_PERMISSIONS: u32 = 0o640; lazy_static! { static ref HOOK_DURATION: HistogramVec = register_histogram_vec!("hab_sup_hook_duration_seconds", "The time it takes for a hook to run", &["hook"]).unwrap(); } /// When evaluating whether a particular service group can satisfy a /// bind of the Service, there are several states it can be /// in. Depending on which point in the lifecycle of the Service we /// are in, we may want to take different actions depending on the /// current status. enum BindStatus<'a> { /// The bound group is not present in the census NotPresent, /// The bound group is present in the census, but has no active /// members. Empty, /// The bound group is present in the census, has active members, /// but does not satisfy the contract of the bind; the set of /// unsatisfied exports is returned. Unsatisfied(HashSet<&'a String>), /// The bound group is present, has active members, and fully /// satisfies the contract of the bind. Satisfied, /// An error was encountered determining the status Unknown(Error), } /// Encapsulate changes to `/hooks` and `/config`. #[derive(Default)] struct TemplateUpdate { hooks: HookCompileTable, config_changed: bool, have_reconfigure_hook: bool, } impl TemplateUpdate { fn new(hooks: HookCompileTable, config_changed: bool, have_reconfigure_hook: bool) -> Self { Self { hooks, config_changed, have_reconfigure_hook } } /// Returns `true` if the service needs to be restarted. /// /// A restart is needed under the following conditions: /// 1. the `run` or `post-run` hooks have changed. A restart is limited to these hooks /// because they are the only hooks that can impact the execution of the service. /// 2. `/config` changed and there is no `reconfigure` hook fn needs_restart(&self) -> bool { self.hooks.run_changed() || self.hooks.post_run_changed() || (!self.have_reconfigure_hook && self.config_changed) } /// Returns `true` if the service needs to be reconfigured. /// /// A reconfigure is needed if `/config` or the `reconfigure` hook changed. fn needs_reconfigure(&self) -> bool { self.config_changed || self.hooks.reconfigure_changed() || self.hooks.reload_changed() } } #[derive(Clone, Debug, PartialEq, Eq)] enum InitializationState { Uninitialized, Initializing, InitializerFinished, Initialized, } #[derive(Debug)] pub struct Service { spec: ServiceSpec, pub service_group: ServiceGroup, // TODO: `spec_file` is only used for serialization; unsure if // that's even useful, given that it's always the same value for a // given service. spec_file: PathBuf, pub cfg: Cfg, pub pkg: Pkg, pub sys: Arc<Sys>, pub user_config_updated: bool, // TODO (DM): This flag is a temporary hack to signal to the `Manager` that this service needs // to be restarted. As we continue refactoring lifecycle hooks this flag should be removed. pub needs_restart: bool, // TODO (DM): The need to track initialization state across ticks would be removed if we // migrated away from the event loop architecture to an architecture that had a top level // `Service` future. See https://github.com/habitat-sh/habitat/issues/7112 initialization_state: Arc<RwLock<InitializationState>>, config_renderer: CfgRenderer, // Note: This field is really only needed for serializing a // Service in the gateway (see ServiceProxy's Serialize // implementation). Ideally, we could get rid of this, since we're // *also* storing the health check result directly (see // manager::GatewayState#health_check_data), but because of how // the data is currently rendered, this is a little complicated. // // In order to access this field in an asynchronous health check // hook, we need to wrap some Arc<Mutex<_>> protection around it // :( health_check_result: Arc<Mutex<HealthCheckResult>>, last_election_status: ElectionStatus, /// The binds that the current service package declares, both /// required and optional. We don't differentiate because this is /// used to validate the user-specified bindings against the /// current state of the census; once you get into the actual /// running of the service, the distinction is immaterial. all_pkg_binds: Vec<Bind>, /// Binds specified by the user that are currently mapped to /// service groups that do _not_ satisfy the bind's contract, as /// defined in the service's current package. /// /// They may not satisfy them because they do not have the /// requisite exports, because no live members of the group exist, /// or because the group itself does not exist in the census. /// /// We don't serialize because this is purely runtime information /// that should be reconciled against the current state of the /// census. unsatisfied_binds: HashSet<ServiceBind>, hooks: HookTable, manager_fs_cfg: Arc<FsCfg>, supervisor: Arc<Mutex<Supervisor>>, gateway_state: Arc<GatewayState>, /// A "handle" to the never-ending future that periodically runs /// health checks on this service. This is the means by which we /// can stop that future. health_check_handle: Option<AbortHandle>, post_run_handle: Option<AbortHandle>, initialize_handle: Option<AbortHandle>, } impl Service { pub(crate) fn bldr_url(&self) -> String { self.spec.bldr_url.clone() } pub(crate) fn channel(&self) -> ChannelIdent { self.spec.channel.clone() } pub(crate) fn spec_ident(&self) -> PackageIdent { self.spec.ident.clone() } pub(crate) fn topology(&self) -> Topology { self.spec.topology } pub(crate) fn update_strategy(&self) -> UpdateStrategy { self.spec.update_strategy } pub(crate) fn update_condition(&self) -> UpdateCondition { self.spec.update_condition } pub(crate) fn shutdown_timeout(&self) -> Option<ShutdownTimeout> { self.spec.shutdown_timeout } pub(crate) fn spec(&self) -> ServiceSpec { self.spec.clone() } pub(crate) fn set_spec(&mut self, spec: ServiceSpec) { trace!("Setting spec for {}: {:?}", self.spec.ident, spec); self.spec = spec } #[allow(clippy::too_many_arguments)] async fn with_package(sys: Arc<Sys>, package: &PackageInstall, spec: ServiceSpec, manager_fs_cfg: Arc<FsCfg>, organization: Option<&str>, gateway_state: Arc<GatewayState>, pid_source: ServicePidSource, feature_flags: FeatureFlag) -> Result<Service> { spec.validate(package)?; let all_pkg_binds = package.all_binds()?; let mut pkg = Self::resolve_pkg(package, &spec).await?; if let Some(timeout) = spec.shutdown_timeout { pkg.shutdown_timeout = timeout; } let spec_file = manager_fs_cfg.specs_path.join(spec.file()); let service_group = ServiceGroup::new(&pkg.name, &spec.group, organization)?; let config_root = Self::config_root(&pkg, spec.config_from.as_ref()); let hooks_root = Self::hooks_root(&pkg, spec.config_from.as_ref()); let cfg = Cfg::new(&pkg, spec.config_from.as_ref())?; Ok(Service { spec, sys, cfg, config_renderer: CfgRenderer::new(&config_root)?, health_check_result: Arc::new(Mutex::new(HealthCheckResult::Unknown)), hooks: HookTable::load(&pkg.name, &hooks_root, svc_hooks_path(&service_group.service()), feature_flags), last_election_status: ElectionStatus::None, user_config_updated: false, needs_restart: false, initialization_state: Arc::new(RwLock::new(InitializationState::Uninitialized)), manager_fs_cfg, supervisor: Arc::new(Mutex::new(Supervisor::new(&service_group, pid_source))), pkg, service_group, all_pkg_binds, unsatisfied_binds: HashSet::new(), spec_file,<|fim▁hole|> initialize_handle: None }) } // And now prepare yourself for a little horribleness...Ready? // In releases 0.88.0 and prior, we would run hooks under // the hab user account on windows if it existed and no other // svc_user was specified just like we do on linux. That is problematic // and not a ubiquitous pattern for windows. The default user is now // always the current user. However, packages built on those older // versions included a SVC_USER metafile with the 'hab' user by default. // So to protect for scenarios where a user has launched an older package, // is on windows and has a 'hab' account on the system BUT never intended // to run hooks under that account and therefore has not passed a // '--password' argument to 'hab svc load', we will revert the user to // the current user. #[cfg(windows)] async fn resolve_pkg(package: &PackageInstall, spec: &ServiceSpec) -> Result<Pkg> { let mut pkg = Pkg::from_install(package).await?; if spec.svc_encrypted_password.is_none() && pkg.svc_user == DEFAULT_USER { if let Some(user) = users::get_current_username()? { pkg.svc_user = user; } } Ok(pkg) } #[cfg(unix)] async fn resolve_pkg(package: &PackageInstall, _spec: &ServiceSpec) -> Result<Pkg> { Ok(Pkg::from_install(package).await?) } /// Returns the config root given the package and optional config-from path. fn config_root(package: &Pkg, config_from: Option<&PathBuf>) -> PathBuf { config_from.map(PathBuf::as_path) .unwrap_or(&package.path) .join("config") } /// Returns the hooks root given the package and optional config-from path. fn hooks_root(package: &Pkg, config_from: Option<&PathBuf>) -> PathBuf { config_from.map(PathBuf::as_path) .unwrap_or(&package.path) .join("hooks") } pub async fn new(sys: Arc<Sys>, spec: ServiceSpec, manager_fs_cfg: Arc<FsCfg>, organization: Option<&str>, gateway_state: Arc<GatewayState>, pid_source: ServicePidSource, feature_flags: FeatureFlag) -> Result<Service> { // The package for a spec should already be installed. let fs_root_path = Path::new(&*FS_ROOT_PATH); let package = PackageInstall::load(&spec.ident, Some(fs_root_path))?; Ok(Self::with_package(sys, &package, spec, manager_fs_cfg, organization, gateway_state, pid_source, feature_flags).await?) } /// Create the service path for this package. pub fn create_svc_path(&self) -> Result<()> { debug!("{}, Creating svc paths", self.service_group); SvcDir::new(&self.pkg.name, &self.pkg.svc_user, &self.pkg.svc_group).create()?; Ok(()) } fn start(&mut self, launcher: &LauncherCli) { debug!("Starting service {}", self.pkg.ident); let result = self.supervisor .lock() .expect("Couldn't lock supervisor") .start(&self.pkg, &self.service_group, launcher, self.spec.svc_encrypted_password.as_deref()); match result { Ok(_) => { self.needs_restart = false; self.start_health_checks(); } Err(e) => { outputln!(preamble self.service_group, "Service start failed: {}", e); } } } fn initialized(&self) -> bool { *self.initialization_state.read() == InitializationState::Initialized } /// Initiate an endless task that performs periodic health checks for the service and takes /// appropriate actions upon receiving the results of a health check. The actions taken are: /// /// * Cache the health check result for this service /// * Set the health check result for this service in the gateway state /// * Send a `HealthCheckEvent` over the event stream fn start_health_checks(&mut self) { debug!("Starting health checks for {}", self.pkg.ident); let mut rx = health::check_repeatedly(Arc::clone(&self.supervisor), self.hooks.health_check.clone(), self.spec.health_check_interval, self.service_group.clone(), self.pkg.clone(), self.spec.svc_encrypted_password.clone()); let service_group = self.service_group.clone(); let service_event_metadata = self.to_service_metadata(); let service_health_result = Arc::clone(&self.health_check_result); let gateway_state = Arc::clone(&self.gateway_state); // Initialize the gateway_state for this service to Unknown. gateway_state.lock_gsw() .set_health_of(service_group.clone(), HealthCheckResult::Unknown); let f = async move { while let Some(HealthCheckBundle { status, result, interval, }) = rx.recv().await { debug!("Caching HealthCheckResult = '{}' for '{}'", result, service_group); *service_health_result.lock() .expect("Could not unlock service_health_result") = result; gateway_state.lock_gsw() .set_health_of(service_group.clone(), result); event::health_check(service_event_metadata.clone(), result, status, interval); } }; let (f, handle) = future::abortable(f); self.health_check_handle = Some(handle); tokio::spawn(f); } /// Stop the endless future that performs health checks for the /// service. fn stop_health_checks(&mut self) { if let Some(h) = self.health_check_handle.take() { debug!("Stopping health checks for {}", self.pkg.ident); h.abort(); } } /// Any currently-running health check future will be terminated /// and a new one started in its place. /// /// This is mainly good for "resetting" the checks, and will /// initiate a new health check immediately. fn restart_health_checks(&mut self) { debug!("Restarting health checks for {}", self.pkg.ident); self.stop_health_checks(); self.start_health_checks(); } /// Called when the Supervisor reattaches itself to an already /// running service. Use this to re-initiate any associated /// processes, futures, etc. /// /// This should generally be the opposite of `Service::detach`. fn reattach(&mut self) { outputln!("Reattaching to {}", self.service_group); *self.initialization_state.write() = InitializationState::Initialized; self.restart_health_checks(); // We intentionally do not restart the `post_run` retry future. Currently, there is not // a way to track if `post_run` ran successfully following a Supervisor restart. // See https://github.com/habitat-sh/habitat/issues/6739 } /// Called when stopping the Supervisor for an update and /// before stopping a service. Should *not* stop the service /// process itself, but should stop any associated processes, /// futures, etc., that would otherwise prevent the Supervisor /// from shutting itself down. /// /// Currently, this means stopping any associated long-running /// futures. /// /// See also `Service::reattach`, as these methods should /// generally be mirror images of each other. pub fn detach(&mut self) { debug!("Detatching service {}", self.pkg.ident); self.stop_initialize(); self.stop_post_run(); self.stop_health_checks(); } /// Return a future that will shut down a service, performing any /// necessary cleanup, and run its post-stop hook, if any. /// # Locking for the returned Future (see locking.md) /// * `GatewayState::inner` (write) pub async fn stop_gsw(&mut self, shutdown_config: ShutdownConfig) { debug!("Stopping service {}", self.pkg.ident); self.detach(); let service_group = self.service_group.clone(); let gs = Arc::clone(&self.gateway_state); self.supervisor .lock() .expect("Couldn't lock supervisor") .stop(shutdown_config); gs.lock_gsw().remove(&service_group); if let Some(hook) = self.post_stop() { if let Err(e) = hook.into_future().await { outputln!(preamble service_group, "Service stop failed: {}", e); } } } /// Only used as a way to see if anything has happened to this /// service since the last time we might have checked pub fn last_state_change(&self) -> SystemTime { self.supervisor .lock() .expect("Couldn't lock supervisor") .state_entered() } /// Performs updates and executes hooks. /// /// Returns `true` if the service was marked to be restarted or reconfigured. pub fn tick(&mut self, census_ring: &CensusRing, launcher: &LauncherCli) -> bool { // We may need to block the service from starting until all // its binds are satisfied if !self.initialized() { match self.spec.binding_mode { BindingMode::Relaxed => (), BindingMode::Strict => { self.validate_binds(census_ring); if !self.unsatisfied_binds.is_empty() { outputln!(preamble self.service_group, "Waiting for service binds..."); return false; } } } } // Binds may become unsatisfied as a service is running (e.g., // service members disappear, etc.) This can affect the data // we pass to templates, so we must account for it here. if census_ring.changed() { self.validate_binds(census_ring); } // TODO (DM): As a temporary fix, we return this `template_data_changed` boolean which does // not account for changes in the census ring. This is needed because when we restart a // service, we do not correctly produce the initial gossip message. let (template_data_changed, template_update) = self.update_templates(census_ring); if self.update_service_files(census_ring) { self.file_updated(); } match self.spec.topology { Topology::Standalone => { self.execute_hooks(launcher, &template_update); } Topology::Leader => { let census_group = census_ring.census_group_for(&self.service_group) .expect("Service Group's census entry missing from list!"); match census_group.election_status { ElectionStatus::None => { if self.last_election_status != census_group.election_status { outputln!(preamble self.service_group, "Waiting to execute hooks; election hasn't started"); self.last_election_status = census_group.election_status; } } ElectionStatus::ElectionInProgress => { if self.last_election_status != census_group.election_status { outputln!(preamble self.service_group, "Waiting to execute hooks; election in progress."); self.last_election_status = census_group.election_status; } } ElectionStatus::ElectionNoQuorum => { if self.last_election_status != census_group.election_status { outputln!(preamble self.service_group, "Waiting to execute hooks; election in progress, \ and we have no quorum."); self.last_election_status = census_group.election_status; } } ElectionStatus::ElectionFinished => { let leader_id = census_group.leader_id .as_ref() .expect("No leader with finished election"); if self.last_election_status != census_group.election_status { outputln!(preamble self.service_group, "Executing hooks; {} is the leader", leader_id.to_string()); self.last_election_status = census_group.election_status; } self.execute_hooks(launcher, &template_update); } } } } template_data_changed } /// Iterate through all the service binds, marking any that are /// unsatisfied in `self.unsatisfied_binds`. /// /// When starting with a "strict" binding mode, the presence of /// any unsatisfied binds will block service startup. /// /// Thereafter, if binds become unsatisfied during the running of /// the service, those binds will be removed from the rendering /// context, allowing services to take appropriate action. fn validate_binds(&mut self, census_ring: &CensusRing) { for bind in self.spec.binds.iter() { let mut bind_is_unsatisfied = true; match self.current_bind_status(census_ring, bind) { BindStatus::NotPresent => { outputln!(preamble self.service_group, "The specified service group '{}' for binding '{}' is not (yet?) present \ in the census data.", bind.service_group(), bind.name()); } BindStatus::Empty => { outputln!(preamble self.service_group, "The specified service group '{}' for binding '{}' is present in the \ census, but currently has no active members.", bind.service_group(), bind.name()); } BindStatus::Unsatisfied(ref unsatisfied) => { outputln!(preamble self.service_group, "The group '{}' cannot satisfy the `{}` bind because it does not export \ the following required fields: {:?}", bind.service_group(), bind.name(), unsatisfied); } BindStatus::Satisfied => { // Since this function is currently called any // time the census changes, and this is the // expected steady-state of a properly running // service, we won't log anything here. Otherwise // we'd just spam the logs. Instead, log only on a // state change (see below). bind_is_unsatisfied = false; } BindStatus::Unknown(ref e) => { outputln!(preamble self.service_group, "Error validating bind for {}=>{}: {}", bind.name(), bind.service_group(), e); } }; if bind_is_unsatisfied { // TODO (CM): use Entry API to clone only when necessary self.unsatisfied_binds.insert((bind).clone()) } else if self.unsatisfied_binds.remove(bind) { // We'll log if the bind was previously // unsatisfied, but now it is satisfied. outputln!(preamble self.service_group, "The group '{}' satisfies the `{}` bind", bind.service_group(), bind.name()); true } else { false }; } } /// Evaluate the suitability of the given `ServiceBind` based on /// current census information. fn current_bind_status<'a>(&'a self, census_ring: &'a CensusRing, service_bind: &'a ServiceBind) -> BindStatus<'a> { match census_ring.census_group_for(service_bind.service_group()) { None => BindStatus::NotPresent, Some(group) => { if group.active_members().count() == 0 { BindStatus::Empty } else { match self.unsatisfied_bind_exports(group, service_bind.name()) { Ok(unsatisfied) => { if unsatisfied.is_empty() { BindStatus::Satisfied } else { BindStatus::Unsatisfied(unsatisfied) } } Err(e) => BindStatus::Unknown(e), } } } } } /// Does the service we've bound to actually satisfy the bind's /// contract (i.e., does it export everything we need)? /// /// Returns the set of unsatisfied exports. If everything is /// present, though, you get an empty set. /// /// Can return `Error::NoSuchBind` if there's not a bind with the /// given name. /// Can return `Error::NoActiveMembers` if there are no active members /// of the group. fn unsatisfied_bind_exports<'a>(&'a self, group: &'a CensusGroup, bind_name: &'a str) -> Result<HashSet<&'a String>> { let exports = self.exports_required_for_bind(bind_name)?; let group_exports = group.group_exports()?; let diff: HashSet<&String> = exports .difference(&group_exports) .cloned() // &&String -> &String .collect(); Ok(diff) } /// Returns the list of exported values a given bind requires /// /// Returns Err if there is no bind by the given name... by the /// time we get to this code, though, that shouldn't happen. fn exports_required_for_bind<'a>(&'a self, binding_name: &str) -> Result<HashSet<&'a String>> { // TODO (CM): Really, we want a HashMap of name => HashSet instead of a // Vec<Bind>... this finding is for the birds self.all_pkg_binds .iter() .find(|b| b.service == binding_name) .ok_or_else(|| Error::NoSuchBind(binding_name.to_string())) .map(|b| b.exports.iter().collect()) } /// Updates the process state of the service's supervisor fn check_process(&mut self, launcher: &LauncherCli) -> bool { self.supervisor .lock() .expect("Couldn't lock supervisor") .check_process(launcher) } /// Updates the service configuration with data from a census group if the census group has /// newer data than the current configuration. /// /// Returns `true` if the configuration was updated. fn update_gossip(&mut self, census_group: &CensusGroup) -> bool { match census_group.service_config { Some(ref config) => { if config.incarnation <= self.cfg.gossip_incarnation { return false; } self.cfg .set_gossip(config.incarnation, config.value.clone()); true } None => false, } } /// Compares the current state of the service to the current state of the census ring and the /// user-config, and re-renders all templatable content to disk. fn update_templates(&mut self, census_ring: &CensusRing) -> (bool, TemplateUpdate) { let census_group = census_ring.census_group_for(&self.service_group) .expect("Service update failed; unable to find own service group"); let cfg_updated_from_rumors = self.update_gossip(census_group); let template_data_changed = cfg_updated_from_rumors || self.user_config_updated; if self.user_config_updated { if let Err(e) = self.cfg.reload_user() { outputln!(preamble self.service_group, "Reloading user-config failed: {}", e); } self.user_config_updated = false; } let template_update = if template_data_changed || census_ring.changed() { let ctx = self.render_context(census_ring); TemplateUpdate::new(self.compile_hooks(&ctx), self.compile_configuration(&ctx), self.hooks.reconfigure.is_some() || self.hooks.reload.is_some()) } else { TemplateUpdate::default() }; (template_data_changed, template_update) } pub fn to_rumor(&self, incarnation: u64) -> ServiceRumor { let exported = match self.cfg.to_exported(&self.pkg) { Ok(exported) => Some(exported), Err(err) => { outputln!(preamble self.service_group, "Failed to generate exported cfg for service rumor: {}", err); None } }; let mut rumor = ServiceRumor::new(self.sys.member_id.as_str(), &self.pkg.ident, self.service_group.clone(), self.sys.as_sys_info(), exported); rumor.incarnation = incarnation; rumor } /// Run initialization hook if present. fn initialize(&mut self) { outputln!(preamble self.service_group, "Initializing"); *self.initialization_state.write() = InitializationState::Initializing; if let Some(ref hook) = self.hooks.init { let hook_runner = HookRunner::new(Arc::clone(hook), self.service_group.clone(), self.pkg.clone(), self.spec.svc_encrypted_password.clone()); // These clones are unfortunate. async/await will make this much better. let service_group = self.service_group.clone(); let initialization_state = Arc::clone(&self.initialization_state); let initialization_state_for_err = Arc::clone(&self.initialization_state); let f = async move { match hook_runner.into_future().await { Ok((exit_value, _)) => { *initialization_state.write() = if exit_value { InitializationState::InitializerFinished } else { InitializationState::Uninitialized }; } Err(e) => { outputln!(preamble service_group, "Service initialization failed: {}", e); *initialization_state_for_err.write() = InitializationState::Uninitialized; } } }; let (f, handle) = future::abortable(f); self.initialize_handle = Some(handle); tokio::spawn(f); } else { *self.initialization_state.write() = InitializationState::InitializerFinished; } } fn stop_initialize(&mut self) { if let Some(h) = self.initialize_handle.take() { h.abort(); } } /// Run reconfigure hook if present. fn reconfigure(&mut self) { let _timer = hook_timer("reconfigure"); if let Some(ref hook) = self.hooks.reload { hook.run(&self.service_group, &self.pkg, self.spec.svc_encrypted_password.as_ref()) .ok(); } if let Some(ref hook) = self.hooks.reconfigure { hook.run(&self.service_group, &self.pkg, self.spec.svc_encrypted_password.as_ref()) .ok(); // The intention here is to do a health check soon after a service's configuration // changes, as a way to (among other things) detect potential impacts when bound // services change exported configuration. self.restart_health_checks(); } } fn post_run(&mut self) { if let Some(ref hook) = self.hooks.post_run { let hook_runner = HookRunner::new(Arc::clone(hook), self.service_group.clone(), self.pkg.clone(), self.spec.svc_encrypted_password.clone()); let f = HookRunner::retryable_future(hook_runner); let (f, handle) = future::abortable(f); self.post_run_handle = Some(handle); tokio::spawn(f); } } /// Stop the `post-run` retry future. This will stop this retry loop regardless of `post-run`'s /// exit code. fn stop_post_run(&mut self) { if let Some(h) = self.post_run_handle.take() { h.abort(); } } fn post_stop(&self) -> Option<HookRunner<hooks::PostStopHook>> { self.hooks.post_stop.as_ref().map(|hook| { HookRunner::new(Arc::clone(hook), self.service_group.clone(), self.pkg.clone(), self.spec.svc_encrypted_password.clone()) }) } pub fn suitability(&self) -> Option<u64> { let _timer = hook_timer("suitability"); if !self.initialized() { return None; } self.hooks .suitability .as_ref() .and_then(|hook| { hook.run(&self.service_group, &self.pkg, self.spec.svc_encrypted_password.as_ref()) .ok() }) .unwrap_or(None) } /// Helper for compiling configuration templates into configuration files. /// /// Returns `true` if the configuration has changed. fn compile_configuration(&self, ctx: &RenderContext) -> bool { match self.config_renderer.compile(&ctx.service_group_name(), &self.pkg, &self.pkg.svc_config_path, ctx) { Ok(true) => true, Ok(false) => false, Err(e) => { outputln!(preamble self.service_group, "Failed to compile configuration: {}", e); false } } } /// Helper for compiling hook templates into hooks. /// /// This function will also perform any necessary post-compilation tasks. fn compile_hooks(&self, ctx: &RenderContext<'_>) -> HookCompileTable { let hook_update_table = self.hooks.compile(&self.service_group, ctx); if let Some(err) = self.copy_run().err() { outputln!(preamble self.service_group, "Failed to copy run hook: {}", err); } if hook_update_table.changed() { outputln!(preamble self.service_group, "Hooks recompiled"); } hook_update_table } // Copy the "run" file to the svc path. fn copy_run(&self) -> Result<()> { let svc_run = self.pkg.svc_path.join(hooks::RunHook::FILE_NAME); match self.hooks.run { Some(ref hook) => { fs::copy(hook.path(), &svc_run)?; Self::set_hook_permissions(&svc_run.to_str().unwrap())?; } None => { let run = self.pkg.path.join(hooks::RunHook::FILE_NAME); match fs::metadata(&run) { Ok(_) => { fs::copy(&run, &svc_run)?; Self::set_hook_permissions(&svc_run)?; } Err(err) => { outputln!(preamble self.service_group, "Error finding run file: {}", err); } } } } Ok(()) } #[cfg(not(windows))] fn set_hook_permissions<T: AsRef<Path>>(path: T) -> habitat_core::error::Result<()> { use habitat_common::templating::hooks::HOOK_PERMISSIONS; use habitat_core::util::posix_perm; posix_perm::set_permissions(path.as_ref(), HOOK_PERMISSIONS) } #[cfg(windows)] fn set_hook_permissions<T: AsRef<Path>>(path: T) -> habitat_core::error::Result<()> { use habitat_core::util::win_perm; win_perm::harden_path(path.as_ref()) } /// Returns `true` if the service was marked to be restarted or reconfigured. fn execute_hooks(&mut self, launcher: &LauncherCli, template_update: &TemplateUpdate) -> bool { let up = self.check_process(launcher); // It is ok that we do not hold this lock while we are performing the match. If we // transistion states while we are matching, we will catch the new state on the next tick. let initialization_state = self.initialization_state.read().clone(); match initialization_state { InitializationState::Uninitialized => { // If the service is not initialized and the process is still running, the // Supervisor was restarted and we just have to reattach to the // process. if up { self.reattach(); } else { self.initialize(); } } InitializationState::Initializing => { // Wait until the initializer finishes running } InitializationState::InitializerFinished => { self.start(launcher); self.post_run(); *self.initialization_state.write() = InitializationState::Initialized; } InitializationState::Initialized => { // If the service is initialized and the process is not running, the process // unexpectedly died and needs to be restarted. if !up || template_update.needs_restart() { // TODO (DM): This flag is a hack. We have the `TaskExecutor` here. We could // just schedule the `stop` future, but the `Manager` wraps // the `stop` future with additional functionality. Can we // refactor to make this flag unnecessary? self.needs_restart = true; return true; } else if template_update.needs_reconfigure() { // Only reconfigure if we did NOT restart the service self.reconfigure(); return true; } } }; false } /// Run file-updated hook if present. fn file_updated(&self) -> bool { let _timer = hook_timer("file-updated"); if self.initialized() { if let Some(ref hook) = self.hooks.file_updated { return hook.run(&self.service_group, &self.pkg, self.spec.svc_encrypted_password.as_ref()) .unwrap_or(false); } } false } /// Writes out all service files for a service. /// /// Must be called before a loaded service starts (even before any /// init hook, since the operation of the hook may depend on the /// presence of service files). /// /// Doesn't return a boolean (cf. `update_service_files` below) /// because we don't particularly care in this case. pub fn write_initial_service_files(&mut self, census_ring: &CensusRing) { // In this case, a service group not being found is fine; this // may be a non-peered Supervisor running this service for the // first time, for instance. if let Some(census_group) = census_ring.census_group_for(&self.service_group) { self.write_service_files(census_group, CensusGroup::service_files); } } /// Write service files from gossip data to disk under /// [`svc_files_path()`](../../fs/fn.svc_files_path.html). /// /// Returns `true` if a file was changed, added, or removed, and /// `false` if there were no updates. fn update_service_files(&mut self, census_ring: &CensusRing) -> bool { let census_group = census_ring.census_group_for(&self.service_group) .expect("Service update service files failed; unable to find own service \ group"); self.write_service_files(census_group, CensusGroup::changed_service_files) } /// Abstracts the logic for writing out service files for a /// service. /// /// The key bit here is `file_fn`, which returns the list of files /// to write out. In practice, this will be either /// `CensusGroup::service_files`, to write _all_ files to disk, or /// `CensusGroup::changed_service_files`, to write out only the /// files that have had recent gossip activity. /// /// Returns `true` if any service files were written to disk. fn write_service_files<'a, F, I>(&mut self, census_group: &'a CensusGroup, file_fn: F) -> bool where F: Fn(&'a CensusGroup) -> I, I: IntoIterator<Item = &'a ServiceFile> { let mut updated = false; for service_file in file_fn(census_group) { if self.cache_service_file(service_file) { outputln!(preamble self.service_group, "Service file updated, {}", service_file.filename); updated = true; } } updated } /// Helper for constructing a new render context for the service. fn render_context<'a>(&'a self, census: &'a CensusRing) -> RenderContext<'a> { // Unsatisfied binds are filtered out; you only get bind // information in the render context if they actually satisfy // the contract! RenderContext::new(&self.service_group, &self.sys, &self.pkg, &self.cfg, census, self.spec .binds .iter() .filter(|b| !self.unsatisfied_binds.contains(b))) } // Returns `false` if the write fails. fn cache_service_file(&mut self, service_file: &ServiceFile) -> bool { let file = self.pkg.svc_files_path.join(&service_file.filename); self.write_cache_file(file, &service_file.body) } // Returns `false` if the write fails. fn write_cache_file<T>(&self, file: T, contents: &[u8]) -> bool where T: AsRef<Path> { let current_checksum = match Blake2bHash::from_file(&file) { Ok(current_checksum) => Some(current_checksum), Err(err) => { outputln!(preamble self.service_group, "Failed to get current checksum for {}, {}", file.as_ref().display(), err); None } }; let new_checksum = Blake2bHash::from_bytes(&contents); if let Some(current_checksum) = current_checksum { if new_checksum == current_checksum { return false; } } if let Err(e) = atomic_write(file.as_ref(), contents) { outputln!(preamble self.service_group, "Failed to write to cache file {}, {}", file.as_ref().display(), e); return false; } self.set_gossip_permissions(&file) } #[cfg(not(windows))] fn set_gossip_permissions<T: AsRef<Path>>(&self, path: T) -> bool { use habitat_core::{os::process, util::posix_perm}; if process::can_run_services_as_svc_user() { let result = posix_perm::set_owner(path.as_ref(), &self.pkg.svc_user, &self.pkg.svc_group); if let Err(e) = result { outputln!(preamble self.service_group, "Failed to set ownership of cache file {}, {}", path.as_ref().display(), e); return false; } } if let Err(e) = posix_perm::set_permissions(path.as_ref(), GOSSIP_FILE_PERMISSIONS) { outputln!(preamble self.service_group, "Failed to set permissions on cache file {}, {}", path.as_ref().display(), e); return false; } true } #[cfg(windows)] fn set_gossip_permissions<T: AsRef<Path>>(&self, path: T) -> bool { use habitat_core::util::win_perm; if let Err(e) = win_perm::harden_path(path.as_ref()) { outputln!(preamble self.service_group, "Failed to set permissions on cache file {}, {}", path.as_ref().display(), e); return false; } true } } impl fmt::Display for Service { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{} [{}]", self.service_group, self.pkg.ident) } } // This returns a HistogramTimer that we can use to track how long hooks take to execute. Note that // times will get tracked automatically when the HistogramTimer goes out of scope. fn hook_timer(name: &str) -> HistogramTimer { HOOK_DURATION.with_label_values(&[name]).start_timer() } /// This enum represents whether or not we want to render config information when we serialize this /// service via the ServiceProxy struct below. Choosing ConfigRendering::Full will render the /// config, and choosing ConfigRendering::Redacted will not render it. This matches up to the /// feature flag we have in place to redact config information from a service's serialized output, /// which shows up in the supervisor's HTTP API responses. /// /// Please note that this enum derives the Copy trait, so that it behaves more like the boolean /// that it is, and so that we don't have to clone() it everywhere. Adding anything to this enum /// that consumes a large amount of memory would be a bad idea (without removing Copy first) #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum ConfigRendering { Full, Redacted, } /// This is a proxy struct to represent what information we're writing to the dat file, and /// therefore what information gets sent out via the HTTP API. Right now, we're just wrapping the /// actual Service struct, but this will give us something we can refactor against without /// worrying about breaking the data returned to users. pub struct ServiceProxy<'a> { service: &'a Service, config_rendering: ConfigRendering, } impl<'a> ServiceProxy<'a> { pub fn new(s: &'a Service, c: ConfigRendering) -> Self { ServiceProxy { service: s, config_rendering: c, } } } impl<'a> Serialize for ServiceProxy<'a> { fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error> where S: Serializer { let num_fields: usize = if self.config_rendering == ConfigRendering::Full { 27 } else { 26 }; let s = &self.service; let mut strukt = serializer.serialize_struct("service", num_fields)?; strukt.serialize_field("all_pkg_binds", &s.all_pkg_binds)?; strukt.serialize_field("binding_mode", &s.spec.binding_mode)?; strukt.serialize_field("binds", &s.spec.binds)?; strukt.serialize_field("bldr_url", &s.spec.bldr_url)?; if self.config_rendering == ConfigRendering::Full { strukt.serialize_field("cfg", &s.cfg)?; } strukt.serialize_field("channel", &s.spec.channel)?; strukt.serialize_field("config_from", &s.spec.config_from)?; strukt.serialize_field("desired_state", &s.spec.desired_state)?; strukt.serialize_field("health_check", &s.health_check_result)?; strukt.serialize_field("hooks", &s.hooks)?; strukt.serialize_field("initialized", &s.initialized())?; strukt.serialize_field("last_election_status", &s.last_election_status)?; strukt.serialize_field("manager_fs_cfg", &s.manager_fs_cfg)?; let pkg_proxy = PkgProxy::new(&s.pkg); strukt.serialize_field("pkg", &pkg_proxy)?; strukt.serialize_field("process", s.supervisor .lock() .expect("Couldn't lock supervisor") .deref())?; strukt.serialize_field("service_group", &s.service_group)?; strukt.serialize_field("spec_file", &s.spec_file)?; // Deprecated field; use spec_identifier instead strukt.serialize_field("spec_ident", &s.spec.ident)?; strukt.serialize_field("spec_identifier", &s.spec.ident.to_string())?; strukt.serialize_field("svc_encrypted_password", &s.spec.svc_encrypted_password)?; strukt.serialize_field("health_check_interval", &s.spec.health_check_interval)?; strukt.serialize_field("sys", &s.sys)?; strukt.serialize_field("topology", &s.spec.topology)?; strukt.serialize_field("update_strategy", &s.spec.update_strategy)?; strukt.serialize_field("update_condition", &s.spec.update_condition)?; strukt.serialize_field("user_config_updated", &s.user_config_updated)?; strukt.end() } } #[cfg(test)] mod tests { use super::*; use crate::test_helpers::*; use habitat_common::types::{GossipListenAddr, HttpListenAddr, ListenCtlAddr}; use std::{net::{IpAddr, Ipv4Addr}, str::FromStr}; async fn initialize_test_service() -> Service { let listen_ctl_addr = ListenCtlAddr::from_str("127.0.0.1:1234").expect("Can't parse IP into SocketAddr"); let sys = Sys::new(false, GossipListenAddr::default(), listen_ctl_addr, HttpListenAddr::default(), IpAddr::V4(Ipv4Addr::LOCALHOST)); let ident = if cfg!(target_os = "linux") { PackageIdent::new("core", "tree", Some("1.7.0"), Some("20180609045201")) } else if cfg!(target_os = "windows") { PackageIdent::new("core", "7zip", Some("16.04"), Some("20170131110814")) } else { panic!("This is being run on a platform that's not currently supported"); }; let spec = ServiceSpec::new(ident); let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests") .join("fixtures") .join("pkgs"); let install = PackageInstall::load(&spec.ident, Some(&path)).expect("PackageInstall \ should've loaded my \ spec, but it didn't"); let asys = Arc::new(sys); let fscfg = FsCfg::new("/tmp"); let afs = Arc::new(fscfg); let gs = Arc::default(); Service::with_package(asys, &install, spec, afs, Some("haha"), gs, ServicePidSource::Launcher, FeatureFlag::empty()).await .expect("I wanted a service to load, but it \ didn't") } #[tokio::test] async fn service_proxy_conforms_to_the_schema() { let service = initialize_test_service().await; // With config let proxy_with_config = ServiceProxy::new(&service, ConfigRendering::Full); let proxies_with_config = vec![proxy_with_config]; let json_with_config = serde_json::to_string(&proxies_with_config).expect("Expected to convert \ proxies_with_config to JSON but \ failed"); assert_valid(&json_with_config, "http_gateway_services_schema.json"); // Without config let proxy_without_config = ServiceProxy::new(&service, ConfigRendering::Redacted); let proxies_without_config = vec![proxy_without_config]; let json_without_config = serde_json::to_string(&proxies_without_config).expect("Expected to convert \ proxies_without_config to \ JSON but failed"); assert_valid(&json_without_config, "http_gateway_services_schema.json"); } }<|fim▁end|>
gateway_state, health_check_handle: None, post_run_handle: None,
<|file_name|>expressions.py<|end_file_name|><|fim▁begin|>""" Contains all elements of this package. They act as the formal elements of the law. """ import json import sys def from_json(data): """ Reconstructs any `BaseElement` from its own `.as_json()`. Returns the element. """ def _decode(data_dict): values = [] if isinstance(data_dict, str): return data_dict assert(len(data_dict) == 1) klass_string = next(iter(data_dict.keys())) klass = getattr(sys.modules[__name__], klass_string) args = [] for e in data_dict[klass_string]: x = _decode(e) if isinstance(x, str): args.append(x) else: args += x values.append(klass(*args)) return values return _decode(json.loads(data))[0] class BaseElement(object): """ Defines the interface of all elements. """ def as_html(self): """ How the element converts itself to HTML. """ raise NotImplementedError def as_str(self): """ How the element converts itself to simple text. """ raise NotImplementedError def as_dict(self): """ How the element converts itself to a dictionary. """ raise NotImplementedError def as_json(self): """ How the element converts itself to JSON. Not to be overwritten. """ return json.dumps(self.as_dict()) def __repr__(self): return '<%s %s>' % (self.__class__.__name__, repr(self.as_str())) def __eq__(self, other): if isinstance(other, self.__class__): return self.as_dict() == other.as_dict() else: return False @staticmethod def _build_html(tag, text, attrib): text = text.replace('\n', '') # \n have no meaning in HTML if not text: # ignore empty elements return '' attributes = ' '.join('%s="%s"' % (key, value) for (key, value) in sorted(attrib.items()) if value is not None) if attributes: attributes = ' ' + attributes return '<{0}{1}>{2}</{0}>'.format(tag, attributes, text) class Token(BaseElement): """ A simple string. """ def __init__(self, string): assert isinstance(string, str) self._string = string def as_str(self): return self.string def as_html(self): return self.as_str() def as_dict(self): return {self.__class__.__name__: [self.as_str()]} @property def string(self): return self._string class Reference(Token): """ A generic reference to anything. Contains a number (str) and a parent, which must be either `None` or a `Token` (or a subclass of `Token`). """ def __init__(self, number, parent=None): super(Reference, self).__init__(number) assert isinstance(number, str) assert isinstance(parent, Token) or parent is None self._parent = parent def __repr__(self): return '<%s %s %s>' % (self.__class__.__name__, repr(self.number), repr(self.parent)) def as_html(self): return self._build_html('a', self.as_str(), {}) def as_dict(self): r = {self.__class__.__name__: [self.number]} if self.parent: r[self.__class__.__name__].append(self.parent.as_dict()) return r @property def number(self): return self.string @property def parent(self): return self._parent class DocumentReference(Reference): """ A concrete Reference to a document. Contains an href that identifies where it points to, as well as a `set_href` to set it. """ def __init__(self, number, parent, href=''): super(DocumentReference, self).__init__(number, parent) self._href = href def __repr__(self): return '<%s %s %s>' % (self.__class__.__name__, repr(self.as_str()), repr(self.parent.as_str())) @property def name(self): return self.parent.as_str() def set_href(self, href): self._href = href def as_html(self): if self._href: return self._build_html('a', self.as_str(), {'href': self._href}) return super(DocumentReference, self).as_html() def as_dict(self): r = super(DocumentReference, self).as_dict() if self._href: r[self.__class__.__name__].append(self._href) return r class LineReference(Reference): pass class NumberReference(Reference): pass class ArticleReference(Reference): pass class EULawReference(Reference): """ A reference to EU law. Its href is built from its name and number. """ @staticmethod def _build_eu_url(name, number): # example: '2000/29/CE' year, iden = number.split('/')[:2] label = {'Diretiva': 'L', 'Decisão de Execução': 'D', 'Regulamento (CE)': 'R', 'Regulamento CE': 'R', 'Regulamento CEE': 'R'}[name] if label == 'R': year, iden = iden, year eur_id = '3%s%s%04d' % (year, label, int(iden)) return 'http://eur-lex.europa.eu/legal-content/PT/TXT/?uri=CELEX:%s' \ % eur_id def __init__(self, number, parent): super(EULawReference, self).__init__(number, parent) def as_html(self): return self._build_html('a', self.as_str(), {'href': self._build_eu_url(self.parent.as_str(), self.number)}) class Anchor(Token): """ A generic anchor that defines a section that can be referred to. """ name = None def __init__(self, string): super(Anchor, self).__init__(string) self._document_section = None def as_str(self): return '%s %s\n' % (self.name, self.number) def as_dict(self): return {self.__class__.__name__: [self.number]} @property def number(self): return self.string @property def format(self): return self.__class__ @property def reference(self): return self._document_section @reference.setter def reference(self, document_section): assert(isinstance(document_section, DocumentSection)) self._document_section = document_section def ref_as_href(self): if self.reference.id_as_html(): return '#' + self.reference.id_as_html() else: return None class Section(Anchor): name = 'Secção' class SubSection(Anchor): name = 'Sub-Secção' class Clause(Anchor): name = 'Clausula' def as_str(self): return '%s\n' % self.number class Part(Anchor): name = 'Parte' class Chapter(Anchor): name = 'Capítulo' class Title(Anchor): name = 'Título' class Annex(Anchor): name = 'Anexo' def as_str(self): if self.number: return '%s %s\n' % (self.name, self.number) else: return '%s\n' % self.name class Article(Anchor): name = 'Artigo' def as_html(self): anchor = self._build_html('a', self.number, {'href': self.ref_as_href()}) return '%s %s' % (self.name, anchor) class Number(Anchor): name = 'Número' def as_str(self): return '%s -' % self.number def as_html(self): return self._build_html('a', self.as_str(), {'href': self.ref_as_href()}) class Line(Number): name = 'Alínea' def as_str(self): return '%s' % self.number class Item(Number): """ An item of an unordered list. """ name = 'Item' def as_str(self): return '%s' % self.number class BaseDocumentSection(BaseElement): def __init__(self, *children): self._children = [] for child in children: self.append(child) self._parent_section = None def append(self, element): if isinstance(element, BaseDocumentSection): element._parent_section = self self._children.append(element) def __len__(self): return len(self._children) def as_str(self): return ''.join(child.as_str() for child in self._children) def as_html(self): string = '' ol = False ul = False for child in self._children: if ul and not isinstance(child, UnorderedDocumentSection): string += '</ul>' ul = False if ol and not isinstance(child, OrderedDocumentSection): string += '</ol>' ol = False if not ul and isinstance(child, UnorderedDocumentSection): string += '<ul>' ul = True if not ol and isinstance(child, OrderedDocumentSection): string += '<ol>' ol = True string += child.as_html() if ol: string += '</ol>' if ul: string += '</ul>' return string def as_dict(self): return {self.__class__.__name__: [child.as_dict() for child in self._children]} def find_all(self, condition, recursive=False): if recursive: def _find_all(root): result = [] if isinstance(root, BaseDocumentSection): for child in root._children: if condition(child): result.append(child) result += _find_all(child) return result return _find_all(self) return [child for child in self._children if condition(child)] def id_tree(self): tree = [] if self._parent_section is not None: tree = self._parent_section.id_tree() tree += [self] return tree def get_doc_refs(self): """ Yields tuples (name, number) of all its `DocumentReference`s. """ refs = self.find_all(lambda x: isinstance(x, DocumentReference), True) ref_set = set() for ref in refs: ref_set.add((ref.name, ref.number)) return ref_set def set_doc_refs(self, mapping): """ Uses a dictionary of the form `(name, ref)-> url` to set the href of its own `DocumentReference`s. """ refs = self.find_all(lambda x: isinstance(x, DocumentReference), True) for ref in refs: if (ref.name, ref.number) in mapping: ref.set_href(mapping[(ref.name, ref.number)]) class Paragraph(BaseDocumentSection): def as_html(self): return self._build_html('p', super(Paragraph, self).as_html(), {}) class InlineParagraph(Paragraph): def as_html(self): return self._build_html('span', super(Paragraph, self).as_html(), {}) class Document(BaseDocumentSection): pass class DocumentSection(BaseDocumentSection): formal_sections = [Annex, Article, Number, Line, Item] html_classes = { Annex: 'annex', Part: 'part', Title: 'title', Chapter: 'chapter', Section: 'section', SubSection: 'sub-section', Clause: 'clause', Article: 'article', Number: 'number list-unstyled', Line: 'line list-unstyled', Item: 'item list-unstyled', } def __init__(self, anchor, *children): super(DocumentSection, self).__init__(*children) self._anchor = anchor self._anchor.reference = self def as_dict(self): json = super(DocumentSection, self).as_dict() json[self.__class__.__name__].insert(0, self.anchor.as_dict()) return json @property def anchor(self): return self._anchor @property def format(self): return self.anchor.format def formal_id_tree(self): filtered_tree = [] for e in self.id_tree(): if isinstance(e, QuotationSection): return [] # sections inside quotations have no tree if isinstance(e, DocumentSection) and e.format in self.formal_sections: filtered_tree.append(e) return filtered_tree def id_as_html(self): string = '-'.join(e.anchor.name + '-' + e.anchor.number for e in self.formal_id_tree()) if string != '': return string else: return None class TitledDocumentSection(DocumentSection): def __init__(self, anchor, title=None, *children): super(TitledDocumentSection, self).__init__(anchor, *children) self._title = title def as_dict(self): json = super(TitledDocumentSection, self).as_dict() if self._title is not None: json[self.__class__.__name__].insert(1, self._title.as_dict()) return json hierarchy_html_titles = { Part: 'h2', Annex: 'h2', Title: 'h3', Chapter: 'h3', Section: 'h4', SubSection: 'h5', Article: 'h5', Clause: 'h5', } def as_html(self): inner = self.anchor.as_html() if self._title is not None: inner += self._title.as_html() container = self._build_html(self.hierarchy_html_titles[self.format], inner, {'class': 'title'}) rest = super(TitledDocumentSection, self).as_html() return self._build_html('div', container + rest, {'class': self.html_classes[self.format], 'id': self.id_as_html()}) def as_str(self): string = self.anchor.as_str() if self._title is not None: string += self._title.as_str() return string + super(TitledDocumentSection, self).as_str() <|fim▁hole|> return self._title @title.setter def title(self, title): assert(isinstance(title, Paragraph)) self._title = title class InlineDocumentSection(DocumentSection): """ A section whose elements are inline. """ formats = {} def as_html(self): container = self._build_html('span', self.anchor.as_html(), {}) rest = super(InlineDocumentSection, self).as_html() return self._build_html('li', container + rest, {'class': self.html_classes[self.format], 'id': self.id_as_html()}) def as_str(self): return self.anchor.as_str() + super(InlineDocumentSection, self).as_str() class OrderedDocumentSection(InlineDocumentSection): """ A section whose elements are inline and ordered. """ formats = {Number, Line} class UnorderedDocumentSection(InlineDocumentSection): """ A section whose elements are inline and un-ordered. """ formats = {Item} class QuotationSection(BaseDocumentSection): """ A Section quoting something. """ def as_html(self): return '<blockquote>%s</blockquote>' % \ super(QuotationSection, self).as_html() def as_str(self): return '«%s»' % super(QuotationSection, self).as_str()<|fim▁end|>
@property def title(self):
<|file_name|>GetAllUsersDatabaseAction.java<|end_file_name|><|fim▁begin|>package org.bimserver.database.actions; /****************************************************************************** * Copyright (C) 2009-2019 BIMserver.org * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see {@literal<http://www.gnu.org/licenses/>}. *****************************************************************************/ import java.util.Set; import org.bimserver.BimserverDatabaseException; import org.bimserver.database.BimserverLockConflictException; import org.bimserver.database.DatabaseSession; import org.bimserver.database.OldQuery; import org.bimserver.database.query.conditions.AttributeCondition; import org.bimserver.database.query.conditions.Condition; import org.bimserver.database.query.conditions.IsOfTypeCondition; import org.bimserver.database.query.conditions.Not; import org.bimserver.database.query.literals.EnumLiteral; import org.bimserver.models.log.AccessMethod; import org.bimserver.models.store.ObjectState; import org.bimserver.models.store.StorePackage; import org.bimserver.models.store.User; import org.bimserver.models.store.UserType; import org.bimserver.shared.exceptions.UserException; import org.bimserver.utils.CollectionUtils; import org.bimserver.webservices.authorization.Authorization; public class GetAllUsersDatabaseAction extends BimDatabaseAction<Set<User>> { private Authorization authorization; public GetAllUsersDatabaseAction(DatabaseSession databaseSession, AccessMethod accessMethod, Authorization authorization) { super(databaseSession, accessMethod); this.authorization = authorization; } @Override public Set<User> execute() throws UserException, BimserverLockConflictException, BimserverDatabaseException { User actingUser = getUserByUoid(authorization.getUoid()); Condition condition = new IsOfTypeCondition(StorePackage.eINSTANCE.getUser()); condition = condition.and(new Not(new AttributeCondition(StorePackage.eINSTANCE.getUser_UserType(), new EnumLiteral(UserType.SYSTEM)))); if (actingUser.getUserType() != UserType.ADMIN) { condition = condition.and(new AttributeCondition(StorePackage.eINSTANCE.getUser_State(), new EnumLiteral(ObjectState.ACTIVE))); } return CollectionUtils.mapToSet(getDatabaseSession().query(condition, User.class, OldQuery.getDefault())); } <|fim▁hole|><|fim▁end|>
}
<|file_name|>MethodMissingClass.js<|end_file_name|><|fim▁begin|>export default class MethodMissingClass { constructor() { const handler = { get: this._handleMethodMissing }; return new Proxy(this, handler); } _handleMethodMissing(target, name, receiver) { const origMethod = target[name]; // If it exist, return original member or function.<|fim▁hole|> // If the method doesn't exist, call methodMissing. return function(...args) { return Reflect.get(target, "methodMissing").call(receiver, name, ...args); }; } methodMissing(name, ...args) { console.log( `Method "${name}" does not exist. Please override methodMissing method to add functionality.` ); } }<|fim▁end|>
if (Reflect.has(target, name) || name === "methodMissing") { return Reflect.get(target, name, receiver); }
<|file_name|>test.py<|end_file_name|><|fim▁begin|>import numpy as np mecanum_matrix = np.matrix([ [+1, +1, +1, +1], # Unitless! Shooting for rad/s [+1, -1, +1, -1], # Unitless! Shooting for rad/s [+1, +1, -1, -1], # Unitless! Shooting for rad/s # [+1, -1, -1, +1], # This is the error row (May not be necessary) ], dtype=np.float32) / 4.0 # All of the rows are divided by 4 v_target = np.array([0.0, 1.0, 0.0])<|fim▁hole|><|fim▁end|>
print np.linalg.lstsq(mecanum_matrix, v_target)[0]
<|file_name|>particle.rs<|end_file_name|><|fim▁begin|>use crate::numeric::StateVector; use crate::typenum::consts::{U1, U2, U3}; use crate::typenum::{Exp, Pow, Prod, Same, Unsigned}; use diffgeom::coordinates::{ConversionTo, CoordinateSystem, Point}; use diffgeom::inner; use diffgeom::metric::MetricSystem; use diffgeom::tensors::Vector; use generic_array::{ArrayLength, GenericArray}; use numeric_algs::State; use std::ops::Mul; pub struct Particle<C: CoordinateSystem> where C::Dimension: Pow<U1>, Exp<C::Dimension, U1>: ArrayLength<f64>, { x: Point<C>, v: Vector<C>, } impl<C: CoordinateSystem> Clone for Particle<C> where C::Dimension: Pow<U1>, Exp<C::Dimension, U1>: ArrayLength<f64>, { fn clone(&self) -> Self { Particle { x: self.x.clone(), v: self.v.clone(), } } } impl<C: CoordinateSystem> Particle<C> where C::Dimension: Pow<U1>, Exp<C::Dimension, U1>: ArrayLength<f64>, { pub fn new(x: Point<C>, v: Vector<C>) -> Self { Particle { x: x, v: v } }<|fim▁hole|> pub fn get_pos(&self) -> &Point<C> { &self.x } pub fn get_vel(&self) -> &Vector<C> { &self.v } } impl<C: CoordinateSystem> State for Particle<C> where C::Dimension: Pow<U1> + Mul<U2> + Unsigned, Exp<C::Dimension, U1>: ArrayLength<f64>, Prod<C::Dimension, U2>: ArrayLength<f64>, <Prod<C::Dimension, U2> as ArrayLength<f64>>::ArrayType: Copy, { type Derivative = StateVector<Prod<C::Dimension, U2>>; fn shift_in_place(&mut self, dir: &Self::Derivative, amount: f64) { let d = C::Dimension::to_usize(); for i in 0..d { self.x[i] += dir.0[i] * amount; self.v[i] += dir.0[i + d] * amount; self.v.set_point(self.x.clone()); } } } impl<C: CoordinateSystem> Particle<C> where C::Dimension: Pow<U1> + Mul<U2> + Unsigned + Pow<U2> + Pow<U3>, Exp<C::Dimension, U1>: ArrayLength<f64>, Prod<C::Dimension, U2>: ArrayLength<f64>, <Prod<C::Dimension, U2> as ArrayLength<f64>>::ArrayType: Copy, C: MetricSystem, Exp<C::Dimension, U2>: ArrayLength<f64>, Exp<C::Dimension, U3>: ArrayLength<f64>, { pub fn derivative(&self) -> StateVector<Prod<C::Dimension, U2>> { let christoffel = C::christoffel(&self.x); let temp = inner!(_, Vector<C>; U1, U3; christoffel, self.v.clone()); let cov_der = inner!(_, Vector<C>; U1, U2; temp, self.v.clone()); let mut result = GenericArray::default(); let d = C::Dimension::to_usize(); for i in 0..d { result[i] = self.v[i]; result[i + d] = -cov_der[i]; } StateVector(result) } } impl<C: CoordinateSystem> Particle<C> where C::Dimension: Pow<U1>, Exp<C::Dimension, U1>: ArrayLength<f64>, { pub fn convert<C2: CoordinateSystem + 'static>(&self) -> Particle<C2> where C: ConversionTo<C2>, C2::Dimension: Pow<U1> + Pow<U2>, Exp<C2::Dimension, U1>: ArrayLength<f64>, Exp<C2::Dimension, U2>: ArrayLength<f64>, C2::Dimension: Same<C::Dimension>, { let new_x: Point<C2> = C::convert_point(&self.x); let new_v: Vector<C2> = self.v.convert(); Particle { x: new_x, v: new_v } } } pub trait PosAndVel<D: Unsigned + ArrayLength<f64> + Pow<U1>> where Exp<D, U1>: ArrayLength<f64>, { fn get_pos(&self) -> &GenericArray<f64, D>; fn get_vel(&self) -> &GenericArray<f64, Exp<D, U1>>; } impl<C: CoordinateSystem> PosAndVel<C::Dimension> for Particle<C> where C::Dimension: Pow<U1>, Exp<C::Dimension, U1>: ArrayLength<f64>, { fn get_pos(&self) -> &GenericArray<f64, C::Dimension> { self.x.coords_array() } fn get_vel(&self) -> &GenericArray<f64, Exp<C::Dimension, U1>> { self.v.coords_array() } }<|fim▁end|>
<|file_name|>code.py<|end_file_name|><|fim▁begin|># All-Terrain-Life-Vest All Terrain Life Vest- IEA Raspverry Pi Competition Entry # Description import RPi.GPIO as GPIO import time import os GPIO.setmode (GPIO.BCM) GPIO.cleanup() GPIO.setwarnings(False) GPIO.setup(17,GPIO.OUT) GPIO.setup(04,GPIO.OUT) GPIO.setup(22, GPIO.IN) print("---------------") print("Button+GPIO") print("---------------") print GPIO.input(22) while True: if(GPIO.input(22)==False): GPIO.output(17,GPIO.HIGH) GPIO.output(04,GPIO.HIGH) print("air bag activated") os.system('date') print GPIO.input(22) time.sleep(1) GPIO.output(17,GPIO.LOW) GPIO.output(04,GPIO.LOW) else: os.system('clear')<|fim▁hole|><|fim▁end|>
print("air bag NOT activated") time.sleep(1)
<|file_name|>resources.index.js<|end_file_name|><|fim▁begin|>jQuery(document).ready(function ($) { const $resourcesTable = $('#resources-table'); $resourcesTable.find('tbody tr:not(.sub-table-header)').hide(); $resourcesTable.find('.sub-table-header').click(function () { $(this).nextUntil('tr.sub-table-header').toggle(); const visibleLength = $resourcesTable.find('tbody tr:has(:checkbox):visible').length,<|fim▁hole|> $('#mark-all').prop('checked', allVisibleChecked); }); });<|fim▁end|>
allVisibleChecked = visibleLength > 0 && visibleLength === $resourcesTable.find('tbody tr.selected:visible').length;
<|file_name|>supertest-as-promised.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for SuperTest as Promised v2.0.2 // Project: https://github.com/WhoopInc/supertest-as-promised // Definitions by: Tanguy Krotoff <https://github.com/tkrotoff> // Definitions: https://github.com/borisyankov/DefinitelyTyped /// <reference path='../superagent/superagent.d.ts' /> /// <reference path="../bluebird/bluebird.d.ts" /> declare module "supertest-as-promised" { // Mostly copy-pasted from supertest.d.ts<|fim▁hole|> function supertest(app: any): supertest.SuperTest; module supertest { function agent(app?: any): supertest.SuperTest; interface SuperTest extends superagent.SuperAgent<Test> { } interface Promise<T> extends PromiseBluebird<T> { toPromise(): PromiseBluebird<T>; } interface Test extends superagent.Request<Test> { url: string; serverAddress(app: any, path: string): string; expect(status: number): Promise<supertest.Response>; expect(status: number, body: string): Promise<supertest.Response>; expect(body: string): Promise<supertest.Response>; expect(body: RegExp): Promise<supertest.Response>; expect(body: Object): Promise<supertest.Response>; expect(field: string, val: string): Promise<supertest.Response>; expect(field: string, val: RegExp): Promise<supertest.Response>; expect(checker: (res: Response) => any): Promise<supertest.Response>; } interface Response extends superagent.Response { } } export = supertest; }<|fim▁end|>
import * as superagent from 'superagent'; import * as PromiseBluebird from 'bluebird';
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub trait Summary { fn summarize(&self) -> String; } pub struct NewsArticle { pub headline: String, pub location: String, pub author: String, pub content: String, } impl Summary for NewsArticle { fn summarize(&self) -> String { format!("{}, by {} ({})", self.headline, self.author, self.location) } } pub struct Tweet { pub username: String, pub content: String, pub reply: bool, pub retweet: bool, } impl Summary for Tweet { fn summarize(&self) -> String { format!("{}: {}", self.username, self.content) }<|fim▁hole|>} // ANCHOR: here fn returns_summarizable(switch: bool) -> impl Summary { if switch { NewsArticle { headline: String::from( "Penguins win the Stanley Cup Championship!", ), location: String::from("Pittsburgh, PA, USA"), author: String::from("Iceburgh"), content: String::from( "The Pittsburgh Penguins once again are the best \ hockey team in the NHL.", ), } } else { Tweet { username: String::from("horse_ebooks"), content: String::from( "of course, as you probably already know, people", ), reply: false, retweet: false, } } } // ANCHOR_END: here<|fim▁end|>
<|file_name|>client.py<|end_file_name|><|fim▁begin|>############################################################################### # # The MIT License (MIT) # # Copyright (c) Tavendo GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. #<|fim▁hole|> import sys from twisted.internet import reactor from twisted.python import log from autobahn.twisted.websocket import WebSocketClientProtocol, \ WebSocketClientFactory, \ connectWS class EchoClientProtocol(WebSocketClientProtocol): def sendHello(self): self.sendMessage("Hello, world!".encode('utf8')) def onOpen(self): self.sendHello() def onMessage(self, payload, isBinary): if not isBinary: print("Text message received: {}".format(payload.decode('utf8'))) reactor.callLater(1, self.sendHello) if __name__ == '__main__': if len(sys.argv) < 2: print("Need the WebSocket server address, i.e. ws://127.0.0.1:9000") sys.exit(1) if len(sys.argv) > 2 and sys.argv[2] == 'debug': log.startLogging(sys.stdout) debug = True else: debug = False factory = WebSocketClientFactory(sys.argv[1], debug=debug, debugCodePaths=debug) factory.protocol = EchoClientProtocol connectWS(factory) reactor.run()<|fim▁end|>
###############################################################################
<|file_name|>lines.rs<|end_file_name|><|fim▁begin|>use super::{HorizontalAlign, SectionGlyph, SectionText, VerticalAlign}; use crate::{linebreak::LineBreaker, words::*}; use ab_glyph::*; use std::iter::{FusedIterator, Iterator, Peekable}; /// A line of `Word`s limited to a max width bound. #[derive(Default)] pub(crate) struct Line { pub glyphs: Vec<SectionGlyph>, pub max_v_metrics: VMetrics, pub rightmost: f32, } impl Line { #[inline] pub(crate) fn line_height(&self) -> f32 { self.max_v_metrics.ascent - self.max_v_metrics.descent + self.max_v_metrics.line_gap } /// Returns line glyphs positioned on the screen and aligned. pub fn aligned_on_screen( mut self, screen_position: (f32, f32), h_align: HorizontalAlign, v_align: VerticalAlign, ) -> Vec<SectionGlyph> { if self.glyphs.is_empty() { return Vec::new(); } <|fim▁hole|> // leftwards by the rightmost x distance from render position // - Central alignment is attained from left by shifting the line // leftwards by half the rightmost x distance from render position HorizontalAlign::Center | HorizontalAlign::Right => { let mut shift_left = self.rightmost; if h_align == HorizontalAlign::Center { shift_left /= 2.0; } point(screen_position.0 - shift_left, screen_position.1) } }; let screen_pos = match v_align { VerticalAlign::Top => screen_left, VerticalAlign::Center => { let mut screen_pos = screen_left; screen_pos.y -= self.line_height() / 2.0; screen_pos } VerticalAlign::Bottom => { let mut screen_pos = screen_left; screen_pos.y -= self.line_height(); screen_pos } }; self.glyphs .iter_mut() .for_each(|sg| sg.glyph.position += screen_pos); self.glyphs } } /// `Line` iterator. /// /// Will iterator through `Word` until the next word would break the `width_bound`. /// /// Note: Will always have at least one word, if possible, even if the word itself /// breaks the `width_bound`. pub(crate) struct Lines<'a, 'b, L, F, S> where L: LineBreaker, F: Font, S: Iterator<Item = SectionText<'a>>, { pub(crate) words: Peekable<Words<'a, 'b, L, F, S>>, pub(crate) width_bound: f32, } impl<'a, L, F, S> Iterator for Lines<'a, '_, L, F, S> where L: LineBreaker, F: Font, S: Iterator<Item = SectionText<'a>>, { type Item = Line; fn next(&mut self) -> Option<Self::Item> { let mut caret = point(0.0, 0.0); let mut line = Line::default(); let mut progressed = false; while let Some(word) = self.words.peek() { let word_right = caret.x + word.layout_width_no_trail; // Reduce float errors by using relative "<= width bound" check let word_in_bounds = word_right < self.width_bound || approx::relative_eq!(word_right, self.width_bound); // only if `progressed` means the first word is allowed to overlap the bounds if !word_in_bounds && progressed { break; } let word = self.words.next().unwrap(); progressed = true; line.rightmost = word_right; if (line.glyphs.is_empty() || !word.glyphs.is_empty()) && word.max_v_metrics.height() > line.max_v_metrics.height() { let diff_y = word.max_v_metrics.ascent - caret.y; caret.y += diff_y; // modify all smaller lined glyphs to occupy the new larger line for SectionGlyph { glyph, .. } in &mut line.glyphs { glyph.position.y += diff_y; } line.max_v_metrics = word.max_v_metrics; } line.glyphs.extend(word.glyphs.into_iter().map(|mut sg| { sg.glyph.position += caret; sg })); caret.x += word.layout_width; if word.hard_break { break; } } Some(line).filter(|_| progressed) } } impl<'a, L, F, S> FusedIterator for Lines<'a, '_, L, F, S> where L: LineBreaker, F: Font, S: Iterator<Item = SectionText<'a>>, { }<|fim▁end|>
// implement v-aligns when they're are supported let screen_left = match h_align { HorizontalAlign::Left => point(screen_position.0, screen_position.1), // - Right alignment attained from left by shifting the line
<|file_name|>lng.js<|end_file_name|><|fim▁begin|>// languages index var localeIndex = { "en" : 0, "ja" : 1, "es" : 2, "hu" : 3, "lt" : 4, "ru" : 5, "it" : 6, "pt" : 7, "sp_ch" : 8, "fr" : 9, "ge" : 10, "ua" : 11, "lv" : 12, "no" : 13, "pt_br" : 14<|fim▁hole|><|fim▁end|>
};
<|file_name|>findOverlapGene.py<|end_file_name|><|fim▁begin|>import subprocess import os.path import re import argparse import sys from pybedtools import BedTool<|fim▁hole|> parser = argparse.ArgumentParser(description="find overlap gene.", formatter_class=argparse.ArgumentDefaultsHelpFormatter) if not DEBUG: parser.add_argument('-i', '--input', action='store', nargs='?', help='Input locus file (bed format)', required=True) parser.add_argument('-g', '--gene_sorted_bed', action='store', nargs='?', help='Gene locus file (sorted bed format)', required=True) parser.add_argument('-o', '--output', action='store', nargs='?', help='Output overlap file', required=True) args = parser.parse_args() input_file=args.input gene_file = args.gene_sorted_bed output_file=args.output else: input_file= "/scratch/cqs/shengq1/vickers/20170720_AGO_human_CLIP/macs2/result/GSM1020022/GSM1020022_peaks.narrowPeak.bed" gene_file = "/scratch/cqs/shengq1/references/smallrna/v3/hg19_miRBase21_GtRNAdb2_gencode19_ncbi.sorted.bed" output_file="/scratch/cqs/shengq1/vickers/20170720_AGO_human_CLIP/macs2/result/GSM1020022/GSM1020022_peaks.narrowPeak.overlap.tsv" closet = [nearest for nearest in BedTool(input_file).closest(gene_file, d=True)] with open(output_file, 'w') as w: for nearest in closet: overlap = nearest.fields[12] if overlap == u'0': w.write(str(nearest))<|fim▁end|>
DEBUG = False
<|file_name|>issue-33185.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // run-pass #![allow(dead_code)] #[macro_export] macro_rules! state { ( $( $name:ident : $field:ty )* ) => ( #[derive(Default)] struct State { $($name : $field),*<|fim▁hole|> } ) } state! { x: i64 } pub fn main() { }<|fim▁end|>
<|file_name|>upconvertCoordinates.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from argparse import ArgumentParser from collections import defaultdict import sys import os from sonLib.bioio import cigarRead, cigarWrite, getTempFile, system def getSequenceRanges(fa): """Get dict of (untrimmed header) -> [(start, non-inclusive end)] mappings from a trimmed fasta.""" ret = defaultdict(list) curSeq = "" curHeader = None curTrimmedStart = None for line in fa: line = line.strip() if line == '': continue if line[0] == '>': if curHeader is not None: # Add previous seq info to dict trimmedRange = (curTrimmedStart, curTrimmedStart + len(curSeq)) untrimmedHeader = "|".join(curHeader.split("|")[:-1]) ret[untrimmedHeader].append(trimmedRange) curHeader = line[1:].split()[0] curTrimmedStart = int(curHeader.split('|')[-1]) curSeq = "" else: curSeq += line if curHeader is not None: # Add final seq info to dict trimmedRange = (curTrimmedStart, curTrimmedStart + len(curSeq)) untrimmedHeader = "|".join(curHeader.split("|")[:-1]) ret[untrimmedHeader].append(trimmedRange) for key in ret.keys(): # Sort by range's start pos ret[key] = sorted(ret[key], key=lambda x: x[0]) return ret def validateRanges(seqRanges): """Fail if the given range dict contains overlapping ranges or if the ranges aren't sorted. """ for seq, ranges in seqRanges.items(): for i, range in enumerate(ranges): start = range[0] if i - 1 >= 0: range2 = ranges[i - 1]<|fim▁hole|> assert start < range2[0] def sortCigarByContigAndPos(cigarPath, contigNum): contigNameKey = 2 if contigNum == 1 else 6 startPosKey = 3 if contigNum == 1 else 7 tempFile = getTempFile() system("sort -k %d,%d -k %d,%dn %s > %s" % (contigNameKey, contigNameKey, startPosKey, startPosKey, cigarPath, tempFile)) return tempFile def upconvertCoords(cigarPath, fastaPath, contigNum, outputFile): """Convert the coordinates of the given alignment, so that the alignment refers to a set of trimmed sequences originating from a contig rather than to the contig itself.""" with open(fastaPath) as f: seqRanges = getSequenceRanges(f) validateRanges(seqRanges) sortedCigarPath = sortCigarByContigAndPos(cigarPath, contigNum) sortedCigarFile = open(sortedCigarPath) currentContig = None currentRangeIdx = None currentRange = None for alignment in cigarRead(sortedCigarFile): # contig1 and contig2 are reversed in python api!! contig = alignment.contig2 if contigNum == 1 else alignment.contig1 minPos = min(alignment.start2, alignment.end2) if contigNum == 1 else min(alignment.start1, alignment.end1) maxPos = max(alignment.start2, alignment.end2) if contigNum == 1 else max(alignment.start1, alignment.end1) if contig in seqRanges: if contig != currentContig: currentContig = contig currentRangeIdx = 0 currentRange = seqRanges[contig][0] while (minPos >= currentRange[1] or minPos < currentRange[0]) and currentRangeIdx < len(seqRanges[contig]) - 1: currentRangeIdx += 1 currentRange = seqRanges[contig][currentRangeIdx] if currentRange[0] <= minPos < currentRange[1]: if maxPos - 1 > currentRange[1]: raise RuntimeError("alignment on %s:%d-%d crosses " "trimmed sequence boundary" %\ (contig, minPos, maxPos)) if contigNum == 1: alignment.start2 -= currentRange[0] alignment.end2 -= currentRange[0] alignment.contig2 = contig + ("|%d" % currentRange[0]) else: alignment.start1 -= currentRange[0] alignment.end1 -= currentRange[0] alignment.contig1 = contig + ("|%d" % currentRange[0]) else: raise RuntimeError("No trimmed sequence containing alignment " "on %s:%d-%d" % (contig, minPos, maxPos)) cigarWrite(outputFile, alignment, False) os.remove(sortedCigarPath)<|fim▁end|>
assert start >= range2[1] if i + 1 < len(ranges): range2 = ranges[i + 1]
<|file_name|>mock_encrypter.cc<|end_file_name|><|fim▁begin|>// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "net/quic/mock_encrypter.h" #include "net/third_party/quic/core/quic_data_writer.h" #include "net/third_party/quic/core/quic_utils.h" #include "starboard/memory.h" using quic::DiversificationNonce; using quic::Perspective; using quic::QuicPacketNumber; using quic::QuicStringPiece;<|fim▁hole|>using quic::QuicTransportVersion; namespace net { MockEncrypter::MockEncrypter(Perspective perspective) {} bool MockEncrypter::SetKey(QuicStringPiece key) { return key.empty(); } bool MockEncrypter::SetNoncePrefix(QuicStringPiece nonce_prefix) { return nonce_prefix.empty(); } bool MockEncrypter::SetIV(QuicStringPiece iv) { return iv.empty(); } bool MockEncrypter::EncryptPacket(uint64_t /*packet_number*/, QuicStringPiece associated_data, QuicStringPiece plaintext, char* output, size_t* output_length, size_t max_output_length) { if (max_output_length < plaintext.size()) { return false; } memcpy(output, plaintext.data(), plaintext.length()); *output_length = plaintext.size(); return true; } size_t MockEncrypter::GetKeySize() const { return 0; } size_t MockEncrypter::GetNoncePrefixSize() const { return 0; } size_t MockEncrypter::GetIVSize() const { return 0; } size_t MockEncrypter::GetMaxPlaintextSize(size_t ciphertext_size) const { return ciphertext_size; } size_t MockEncrypter::GetCiphertextSize(size_t plaintext_size) const { return plaintext_size; } QuicStringPiece MockEncrypter::GetKey() const { return QuicStringPiece(); } QuicStringPiece MockEncrypter::GetNoncePrefix() const { return QuicStringPiece(); } } // namespace net<|fim▁end|>
<|file_name|>extern.rs<|end_file_name|><|fim▁begin|>extern "C" { fn c_func(x: *mut *mut libc::c_void); fn c_func(x: XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX, y: YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY); #[test123] fn foo() -> uint64_t; pub fn bar(); } extern "C" { fn DMR_GetDevice(pHDev: *mut HDEV, searchMode: DeviceSearchMode, pSearchString: *const c_char, devNr: c_uint, wildcard: c_char) -> TDMR_ERROR; fn quux() -> (); // Post comment } extern "Rust" { static ext: u32; // Some comment. pub static mut var: SomeType; } extern "C" { fn syscall(number: libc::c_long, // comment 1 // comm 2 ... /* sup? */) -> libc::c_long;<|fim▁hole|> fn foo(x: *const c_char, ...) -> libc::c_long; } extern "C" { pub fn freopen(filename: *const c_char, mode: *const c_char, mode2: *const c_char, mode3: *const c_char, file: *mut FILE) -> *mut FILE; } extern "C" {}<|fim▁end|>
<|file_name|>test_upload_crash_report_json_schema.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import mock from nose.tools import ok_ from crontabber.app import CronTabber from socorro.unittest.cron.jobs.base import IntegrationTestBase<|fim▁hole|>from socorro.unittest.cron.setup_configman import ( get_config_manager_for_crontabber, ) from socorro.schemas import CRASH_REPORT_JSON_SCHEMA_AS_STRING class TestUploadCrashReportJSONSchemaCronApp(IntegrationTestBase): def _setup_config_manager(self): return get_config_manager_for_crontabber( jobs='socorro.cron.jobs.upload_crash_report_json_schema.' 'UploadCrashReportJSONSchemaCronApp|30d', ) @mock.patch('boto.connect_s3') def test_run(self, connect_s3): key = mock.MagicMock() connect_s3().get_bucket().get_key.return_value = None connect_s3().get_bucket().new_key.return_value = key with self._setup_config_manager().context() as config: tab = CronTabber(config) tab.run_all() information = self._load_structure() app_name = 'upload-crash-report-json-schema' ok_(information[app_name]) ok_(not information[app_name]['last_error']) ok_(information[app_name]['last_success']) key.set_contents_from_string.assert_called_with( CRASH_REPORT_JSON_SCHEMA_AS_STRING )<|fim▁end|>
<|file_name|>mxconfig_nl.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.0"> <context> <name>MConfig</name> <message> <location filename="mconfig.cpp" line="440"/> <source>Copying X config appears to have failed. There may be something wrong with the destination root partition.</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="445"/> <source>Your current X configuration has been installed on the destination partition.</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="468"/> <source>The display text size (dpi) has been updated. The change will take effect when you restart X or reboot</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="514"/> <source>The enabled mouse types have been updated. The new config will take effect when you restart X or reboot. If a type is not checked, it still may be configured automatically by the X server.</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="536"/> <source>The monitor specs have been updated. The new config will take effect when you restart X or reboot</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="590"/> <source>The nvidia (new) driver is designed for NVIDIA graphics chips newer than Quatro4 700. The nvidia (new) driver requires that the nvidia-glx package be installed. You must be running from hard drive, and you must be connected to the internet. This will take a while! Are you sure you want to do this now?</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="591"/> <location filename="mconfig.cpp" line="619"/> <location filename="mconfig.cpp" line="648"/> <location filename="mconfig.cpp" line="669"/> <source>Yes</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="591"/> <location filename="mconfig.cpp" line="619"/> <location filename="mconfig.cpp" line="648"/> <location filename="mconfig.cpp" line="669"/> <source>No</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="598"/> <location filename="mconfig.cpp" line="624"/> <source>Update package list (apt)...</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="609"/> <location filename="mconfig.cpp" line="636"/> <location filename="mconfig.cpp" line="686"/> <source>Enable driver...</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="618"/> <source>The nvidia (legacy) driver is designed for NVIDIA graphics chips as old as GeForce2 MX and as new as Quatro4 700. The legacy driver requires that the nvidia-glx-legacy-96xx package be installed. You must be running from hard drive, and you must be connected to the internet. This will take a while! Are you sure you want to do this now?</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="647"/> <source>The nvidia-glx package is installed. It must be removed before using a different driver. Are you sure you want to do this now?</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="653"/> <location filename="mconfig.cpp" line="674"/> <source>Remove nvidia driver...</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="668"/> <source>The nvidia-glx-legacy-96xx package is installed. It must be removed before using a different driver. Are you sure you want to do this now?</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="702"/> <source>nv driver...enabled</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="707"/> <source>vesa driver...enabled</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="716"/> <location filename="mconfig.cpp" line="732"/> <source>The driver has been changed. You must reboot so the change will take effect.</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="730"/> <source>nvidia driver...enabled</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="753"/> <source>Install nvidia-glx driver...</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="766"/> <location filename="mconfig.cpp" line="807"/> <source>Update package list (apt)...failed</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="768"/> <location filename="mconfig.cpp" line="809"/> <source>Updating the apt package list failed. If you are sure you are on-line you can try again.</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="774"/> <source>Install nvidia-glx driver...finalizing</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="786"/> <source>Install nvidia-glx driver...failed</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="788"/> <source>Installing the nvidia-glx driver failed. It is highly advised that you select nv or vesa and click Apply immediately.</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="795"/> <source>Install nvidia-glx-legacy driver...</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="815"/> <source>Install nvidia-glx-legacy driver...finalizing</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="827"/> <source>Install nvidia-glx-legacy driver...failed</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="829"/> <source>Installing the nvidia-glx-legacy driver failed. It is highly advised that you select nv or vesa and click Apply immediately.</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="843"/> <source>Remove nvidia driver...failed</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="1086"/> <source>The MEPIS manual is not installed</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="1087"/> <source>The MEPIS manual is not installed and no Internet connection could be detected so it cannot be installed</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="1061"/> <source>mepis-manual</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="1066"/> <source>The Mepis manual is not installed</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="1067"/> <source>The Mepis manual is not installed, do you want to install it now?</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="1074"/> <source>The Mepis manual hasn&apos;t been installed</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="1075"/> <source>The Mepis manual cannot be installed. This may mean you are using the LiveCD or that there are some kind of transitory problem with the repositories,</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="1093"/> <source>file:///usr/share/mepis-manual/en/index.html#section05-3-4</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="1101"/> <source>About</source> <translation type="unfinished"></translation> </message> <message> <location filename="mconfig.cpp" line="1102"/> <source>&lt;p&gt;&lt;b&gt;MEPIS XConfig&lt;/b&gt;&lt;/p&gt;&lt;p&gt;Copyright (C) 2003-10 by MEPIS LLC. All rights reserved.&lt;/p&gt;</source> <oldsource>&lt;p&gt;&lt;b&gt;MEPIS XConfig&lt;/b&gt;&lt;/p&gt;&lt;p&gt;Copyright (C) 2003-9 by MEPIS LLC. All rights reserved.&lt;/p&gt;</oldsource> <translation type="unfinished"></translation> </message> </context> <context> <name>MEConfig</name> <message> <location filename="meconfig.ui" line="26"/> <source>MEPIS X-Windows Assistant</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="55"/> <source>Repair</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="58"/> <source>Repair an X installation</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="70"/> <source>Copy Current XConfig</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="88"/> <source>Select partition containing root</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="101"/> <source>Select disk to copy to</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="117"/> <source>Partition containing root</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="120"/> <source>/ (root) partition:</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="139"/> <source>Disk to copy to</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="142"/> <source>Destination disk:</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="173"/> <source>&amp;General</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="176"/> <source>General X configs</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="194"/> <source>Relative Size of Text</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="212"/> <source>Select for small text</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="215"/> <source>Small (75 dpi)</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="228"/> <source>Select for medium text</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="231"/> <source>Medium (96 dpi)</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="244"/> <source>Select for large text</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="247"/> <source>Large (120 dpi)</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="278"/> <source>Mouse</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="281"/> <source>Mouse configs</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="293"/> <source>Manually Enable Mouse Types</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="305"/> <source>Serial</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="312"/> <source>AppleTouch</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="319"/> <source>ALPS</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="326"/> <source>Synaptics</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="333"/> <source>USB</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="340"/> <source>PS/2 (Default)</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="350"/> <source>Wacom </source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="378"/> <source>Monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="381"/> <source>Monitor configs</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="399"/> <source>Primary Monitor Settings</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="417"/> <location filename="meconfig.ui" line="455"/> <location filename="meconfig.ui" line="1429"/> <location filename="meconfig.ui" line="1479"/> <source>Horizontal frequency</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="420"/> <location filename="meconfig.ui" line="1482"/> <source>KHz</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="436"/> <location filename="meconfig.ui" line="477"/> <location filename="meconfig.ui" line="1454"/> <location filename="meconfig.ui" line="1501"/> <source>Vertical frequency</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="439"/> <location filename="meconfig.ui" line="1504"/> <source>Hz</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="458"/> <location filename="meconfig.ui" line="1432"/> <source>Horizontal</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="480"/> <location filename="meconfig.ui" line="1457"/> <source>Vertical</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="499"/> <source>Set the horizontal frequency</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="515"/> <location filename="meconfig.ui" line="1542"/> <source>Set the vertical frequency</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="531"/> <source>Select model of monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="535"/> <location filename="meconfig.ui" line="553"/> <location filename="meconfig.ui" line="1615"/> <location filename="meconfig.ui" line="1661"/> <source>unknown</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="549"/> <source>Select brand of monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="558"/> <location filename="meconfig.ui" line="1666"/> <source>Acer</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="563"/> <location filename="meconfig.ui" line="1671"/> <source>Action Systems</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="568"/> <location filename="meconfig.ui" line="1676"/> <source>ADI</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="573"/> <source>Amptron</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="578"/> <location filename="meconfig.ui" line="1681"/> <source>AOC</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="583"/> <source>Apple</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="588"/> <location filename="meconfig.ui" line="1686"/> <source>AST</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="593"/> <location filename="meconfig.ui" line="1691"/> <source>AT&amp;T</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="598"/> <source>Belinea</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="603"/> <source>BenQ</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="608"/> <location filename="meconfig.ui" line="1696"/> <source>Compal</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="613"/> <location filename="meconfig.ui" line="1701"/> <source>Compaq</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="618"/> <location filename="meconfig.ui" line="1706"/> <source>Conrac</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="623"/> <location filename="meconfig.ui" line="1711"/> <source>Cordata</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="628"/> <location filename="meconfig.ui" line="1716"/> <source>Cornerstone</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="633"/> <location filename="meconfig.ui" line="1721"/> <source>CTX</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="638"/> <location filename="meconfig.ui" line="1726"/> <source>Daewoo</source> <translation type="unfinished"></translation> </message> <message><|fim▁hole|> </message> <message> <location filename="meconfig.ui" line="648"/> <location filename="meconfig.ui" line="1736"/> <source>Dell</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="653"/> <location filename="meconfig.ui" line="1741"/> <source>Delta</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="658"/> <location filename="meconfig.ui" line="1746"/> <source>Digital</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="663"/> <location filename="meconfig.ui" line="1751"/> <source>Eizo</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="668"/> <location filename="meconfig.ui" line="1756"/> <source>Elitegroup</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="673"/> <location filename="meconfig.ui" line="1761"/> <source>Epson</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="678"/> <location filename="meconfig.ui" line="1766"/> <source>Fujitsu</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="683"/> <location filename="meconfig.ui" line="1771"/> <source>Gateway</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="688"/> <location filename="meconfig.ui" line="1776"/> <source>Generic LCD</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="693"/> <location filename="meconfig.ui" line="1781"/> <source>Generic CRT</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="698"/> <location filename="meconfig.ui" line="1786"/> <source>Goldstar</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="703"/> <location filename="meconfig.ui" line="1791"/> <source>GoldStar</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="708"/> <source>Hansol</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="713"/> <location filename="meconfig.ui" line="1801"/> <source>Hewlett-Packard</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="718"/> <location filename="meconfig.ui" line="1806"/> <source>Hitachi</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="723"/> <location filename="meconfig.ui" line="1811"/> <source>Hyundai</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="728"/> <location filename="meconfig.ui" line="1816"/> <source>IBM</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="733"/> <location filename="meconfig.ui" line="1821"/> <source>ICL</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="738"/> <location filename="meconfig.ui" line="1826"/> <source>Iiyama</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="743"/> <location filename="meconfig.ui" line="1831"/> <source>Ikegami</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="748"/> <location filename="meconfig.ui" line="1836"/> <source>Impression</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="753"/> <location filename="meconfig.ui" line="1841"/> <source>KFC</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="758"/> <location filename="meconfig.ui" line="1846"/> <source>KDS</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="763"/> <location filename="meconfig.ui" line="1851"/> <source>LG</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="768"/> <location filename="meconfig.ui" line="1856"/> <source>Lite-On</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="773"/> <location filename="meconfig.ui" line="1861"/> <source>Logitec</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="778"/> <location filename="meconfig.ui" line="1866"/> <source>Magnavox</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="783"/> <location filename="meconfig.ui" line="1871"/> <source>MAG</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="788"/> <source>Maxdata</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="793"/> <location filename="meconfig.ui" line="1876"/> <source>Microvitec</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="798"/> <location filename="meconfig.ui" line="1881"/> <source>Miro</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="803"/> <location filename="meconfig.ui" line="1886"/> <source>MiTAC</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="808"/> <location filename="meconfig.ui" line="1891"/> <source>Mitsubishi</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="813"/> <source>Monitronix</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="818"/> <location filename="meconfig.ui" line="1896"/> <source>Nanao</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="823"/> <location filename="meconfig.ui" line="1901"/> <source>NEC</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="828"/> <location filename="meconfig.ui" line="1906"/> <source>Nissei Sangyo</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="833"/> <location filename="meconfig.ui" line="1911"/> <source>Nokia</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="838"/> <location filename="meconfig.ui" line="1916"/> <source>Olivetti</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="843"/> <location filename="meconfig.ui" line="1921"/> <source>Optiquest</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="848"/> <location filename="meconfig.ui" line="1926"/> <source>Packard Bell</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="853"/> <location filename="meconfig.ui" line="1931"/> <source>Panasonic</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="858"/> <location filename="meconfig.ui" line="1936"/> <source>Philips</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="863"/> <source>PLB</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="868"/> <location filename="meconfig.ui" line="1941"/> <source>Princeton</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="873"/> <source>Proview</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="878"/> <source>Radius</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="883"/> <source>RasterOps</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="888"/> <location filename="meconfig.ui" line="1946"/> <source>Relisys</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="893"/> <source>Royal</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="898"/> <location filename="meconfig.ui" line="1956"/> <source>Sampo</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="903"/> <location filename="meconfig.ui" line="1961"/> <source>Samsung</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="908"/> <location filename="meconfig.ui" line="1966"/> <source>Samtron</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="913"/> <source>Seiko</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="918"/> <location filename="meconfig.ui" line="1971"/> <source>SGI</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="923"/> <source>Siemens</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="928"/> <location filename="meconfig.ui" line="1976"/> <source>Smile</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="933"/> <location filename="meconfig.ui" line="1981"/> <source>Sony</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="938"/> <source>Sun</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="943"/> <source>Targa</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="948"/> <location filename="meconfig.ui" line="1986"/> <source>Tatung</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="953"/> <location filename="meconfig.ui" line="1991"/> <source>Taxan</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="958"/> <location filename="meconfig.ui" line="1996"/> <source>TECO</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="963"/> <location filename="meconfig.ui" line="2001"/> <source>Toshiba</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="968"/> <source>TTX</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="973"/> <location filename="meconfig.ui" line="2006"/> <source>TVM</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="978"/> <location filename="meconfig.ui" line="2011"/> <source>Unisys</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="983"/> <location filename="meconfig.ui" line="2016"/> <source>ViewSonic</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="988"/> <location filename="meconfig.ui" line="2021"/> <source>Zenith</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1005"/> <source>Wrong settings can damage the monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1008"/> <source>(be careful!)</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1027"/> <source>Monitor frequencies</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1030"/> <location filename="meconfig.ui" line="1592"/> <source>Frequencies:</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1049"/> <source>Model of monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1052"/> <location filename="meconfig.ui" line="1638"/> <source>Model:</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1071"/> <source>Brand of monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1074"/> <location filename="meconfig.ui" line="2079"/> <source>Brand:</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1105"/> <source>&amp;NVIDIA</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1108"/> <source>NVIDIA only configs</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1126"/> <source>Driver</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1144"/> <source>Select the xorg vesa driver</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1147"/> <source>vesa (xorg)</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1163"/> <source>Select the xorg nv driver</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1166"/> <source>nv (&amp;xorg)</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1169"/> <source>Alt+X</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1185"/> <source>Select an older nvidia driver</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1188"/> <source>nvidia (legac&amp;y)</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1191"/> <source>Alt+Y</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1204"/> <source>Select the newer nvidia driver</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1207"/> <source>nvidia (new)</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1226"/> <source>Progress</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1247"/> <location filename="meconfig.ui" line="1263"/> <source>Status of the changes</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1266"/> <source>Status:</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1307"/> <source>nvidia Options</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1328"/> <source>Location of second monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1331"/> <source>Second monitor:</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1350"/> <source>2nd on left</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1353"/> <source>On left</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1372"/> <source>2nd on right</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1375"/> <source>On right</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1391"/> <source>Enable nvidia shadow cursor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1394"/> <source>Shadow cursor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1410"/> <source>Enable nvidia twinview mode</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1413"/> <source>TwinView mode</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1523"/> <source>Set the horizontal requency</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1564"/> <source>Wrong settings can damage the 2nd monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1567"/> <source>(dangerous!)</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1589"/> <source>2nd monitor frequencies</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1611"/> <source>Select model of 2nd monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1635"/> <source>Model of 2nd monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1657"/> <source>Select brand of 2nd monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1796"/> <source>Hansol Electronics</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="1951"/> <source>Royal Information Company</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2038"/> <source>2nd same as first</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2041"/> <source>Clone</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2057"/> <source>2nd is a TV</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2060"/> <source>TV</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2076"/> <source>Brand of 2nd monitor</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2130"/> <source>About this app</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2133"/> <source>About...</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2150"/> <source>Help</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2209"/> <source>Apply any changes and quit</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2212"/> <source>&amp;OK</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2232"/> <source>Apply any changes</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2235"/> <source>&amp;Apply</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2242"/> <source>Alt+A</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2249"/> <source>Cancel changes and quit</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2252"/> <source>&amp;Cancel</source> <translation type="unfinished"></translation> </message> <message> <location filename="meconfig.ui" line="2259"/> <source>Alt+C</source> <translation type="unfinished"></translation> </message> </context> <context> <name>QApplication</name> <message> <location filename="main.cpp" line="40"/> <source>You must run this program as root.</source> <translation type="unfinished"></translation> </message> </context> </TS><|fim▁end|>
<location filename="meconfig.ui" line="643"/> <location filename="meconfig.ui" line="1731"/> <source>Daytek</source> <translation type="unfinished"></translation>
<|file_name|>ClickActivity.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2013-2015 by appPlant UG. All rights reserved. * * @APPPLANT_LICENSE_HEADER_START@ * * This file contains Original Code and/or Modifications of Original Code * as defined in and that are subject to the Apache License * Version 2.0 (the 'License'). You may not use this file except in * compliance with the License. Please obtain a copy of the License at * http://opensource.org/licenses/Apache-2.0/ and read it before using this * file. * * The Original Code and all software distributed under the License are * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. * Please see the License for the specific language governing rights and * limitations under the License. * * @APPPLANT_LICENSE_HEADER_END@ */ <|fim▁hole|>import de.appplant.cordova.plugin.notification.TriggerReceiver; /** * The receiver activity is triggered when a notification is clicked by a user. * The activity calls the background callback and brings the launch intent * up to foreground. */ public class ClickActivity extends de.appplant.cordova.plugin.notification.ClickActivity { /** * Called when local notification was clicked by the user. * * @param notification * Wrapper around the local notification */ @Override public void onClick(Notification notification) { LocalNotification.fireEvent("click", notification); if (!notification.getOptions().isOngoing()) { String event = notification.isRepeating() ? "clear" : "cancel"; LocalNotification.fireEvent(event, notification); } super.onClick(notification); } /** * Build notification specified by options. * * @param builder * Notification builder */ @Override public Notification buildNotification (Builder builder) { return builder .setTriggerReceiver(TriggerReceiver.class) .build(); } }<|fim▁end|>
package de.appplant.cordova.plugin.localnotification; import de.appplant.cordova.plugin.notification.Builder; import de.appplant.cordova.plugin.notification.Notification;
<|file_name|>main.cc<|end_file_name|><|fim▁begin|>#include "log/logger.h" #include "log/stdoutlog.h" //#include <windows.h> //void SetColor(unsigned short forecolor = 4, unsigned short backgroudcolor = 0) //{ // HANDLE hCon = GetStdHandle(STD_OUTPUT_HANDLE); //»ñÈ¡»º³åÇø¾ä±ú // SetConsoleTextAttribute(hCon, forecolor | backgroudcolor); //ÉèÖÃÎı¾¼°±³¾°É« //} int main(){ Logger l; StdoutLogSink cl; l.Trace("trace ....."); l.Debug("debug --- %d", 11); l.Info("info --- %d", 11); l.Warn("warn ..yellow"); l.Error("error --- %d", 11); l.Fatal("fatal--------------"); <|fim▁hole|> return 0; }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # Odoo Addon, Open Source Management Solution # Copyright (C) 2014-now Equitania Software GmbH(<http://www.equitania.de>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version.<|fim▁hole|># GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ##############################################################################<|fim▁end|>
# # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Iwyu(CMakePackage): """include-what-you-use: A tool for use with clang to analyze #includes in C and C++ source files """ homepage = "https://include-what-you-use.org" url = "https://include-what-you-use.org/downloads/include-what-you-use-0.13.src.tar.gz" maintainers = ['sethrj'] version('0.14', sha256='43184397db57660c32e3298a6b1fd5ab82e808a1f5ab0591d6745f8d256200ef') version('0.13', sha256='49294270aa64e8c04182369212cd919f3b3e0e47601b1f935f038c761c265bc9') version('0.12', sha256='a5892fb0abccb820c394e4e245c00ef30fc94e4ae58a048b23f94047c0816025') version('0.11', sha256='2d2877726c4aed9518cbb37673ffbc2b7da9c239bf8fe29432da35c1c0ec367a') patch('iwyu-013-cmake.patch', when='@0.13:0.14') depends_on('[email protected]:10.999', when='@0.14') depends_on('[email protected]:9.999', when='@0.13') depends_on('[email protected]:8.999', when='@0.12') depends_on('[email protected]:7.999', when='@0.11') # Non-X86 CPU use all_targets variants because iwyu use X86AsmParser depends_on('llvm+all_targets', when='target=aarch64:')<|fim▁hole|> depends_on('llvm+all_targets', when='target=ppcle:') depends_on('llvm+all_targets', when='target=ppc64:') depends_on('llvm+all_targets', when='target=ppc64le:') depends_on('llvm+all_targets', when='target=sparc:') depends_on('llvm+all_targets', when='target=sparc64:') @when('@0.14:') def cmake_args(self): return [self.define('CMAKE_CXX_STANDARD', 14), self.define('CMAKE_CXX_EXTENSIONS', False)]<|fim▁end|>
depends_on('llvm+all_targets', when='target=arm:') depends_on('llvm+all_targets', when='target=ppc:')