prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>geojson.component.d.ts<|end_file_name|><|fim▁begin|>/// <reference types="openlayers" />
import { OnInit } from '@angular/core';
import { source, ProjectionLike, format } from 'openlayers';
import { LayerVectorComponent } from '../layers';
import { SourceComponent } from './source.component';
export declare class SourceGeoJSONComponent extends SourceComponent implements OnInit {
instance: source.Vector;
format: format.Feature;
defaultDataProjection: ProjectionLike;
featureProjection: ProjectionLike;
geometryName: string;
url: string;
constructor(layer: LayerVectorComponent);<|fim▁hole|><|fim▁end|> | ngOnInit(): void;
} |
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import unittest
from contented.app import Application
class AppTests(unittest.TestCase):
def test_load_app(self):<|fim▁hole|> self.assertTrue(hasattr(app, "settings"))
self.assertTrue(hasattr(app, "content_map"))
self.assertTrue(hasattr(app, "request_processors"))<|fim▁end|> | app = Application({}) |
<|file_name|>defargs.cpp<|end_file_name|><|fim▁begin|>#line 3 "<stdout>"
#define YY_INT_ALIGNED short int
/* A lexical scanner generated by flex */
#define yy_create_buffer defargsYY_create_buffer
#define yy_delete_buffer defargsYY_delete_buffer
#define yy_flex_debug defargsYY_flex_debug
#define yy_init_buffer defargsYY_init_buffer
#define yy_flush_buffer defargsYY_flush_buffer
#define yy_load_buffer_state defargsYY_load_buffer_state
#define yy_switch_to_buffer defargsYY_switch_to_buffer
#define yyin defargsYYin
#define yyleng defargsYYleng
#define yylex defargsYYlex
#define yylineno defargsYYlineno
#define yyout defargsYYout
#define yyrestart defargsYYrestart
#define yytext defargsYYtext
#define yywrap defargsYYwrap
#define yyalloc defargsYYalloc
#define yyrealloc defargsYYrealloc
#define yyfree defargsYYfree
#define FLEX_SCANNER
#define YY_FLEX_MAJOR_VERSION 2
#define YY_FLEX_MINOR_VERSION 5
#define YY_FLEX_SUBMINOR_VERSION 35
#if YY_FLEX_SUBMINOR_VERSION > 0
#define FLEX_BETA
#endif
/* First, we deal with platform-specific or compiler-specific issues. */
/* begin standard C headers. */
#include <stdio.h>
#include <string.h>
#include <errno.h>
#include <stdlib.h>
/* end standard C headers. */
/* flex integer type definitions */
#ifndef FLEXINT_H
#define FLEXINT_H
/* C99 systems have <inttypes.h>. Non-C99 systems may or may not. */
#if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
/* C99 says to define __STDC_LIMIT_MACROS before including stdint.h,
* if you want the limit (max/min) macros for int types.
*/
#ifndef __STDC_LIMIT_MACROS
#define __STDC_LIMIT_MACROS 1
#endif
#include <inttypes.h>
typedef int8_t flex_int8_t;
typedef uint8_t flex_uint8_t;
typedef int16_t flex_int16_t;
typedef uint16_t flex_uint16_t;
typedef int32_t flex_int32_t;
typedef uint32_t flex_uint32_t;
#else
typedef signed char flex_int8_t;
typedef short int flex_int16_t;
typedef int flex_int32_t;
typedef unsigned char flex_uint8_t;
typedef unsigned short int flex_uint16_t;
typedef unsigned int flex_uint32_t;
#endif /* ! C99 */
/* Limits of integral types. */
#ifndef INT8_MIN
#define INT8_MIN (-128)
#endif
#ifndef INT16_MIN
#define INT16_MIN (-32767-1)
#endif
#ifndef INT32_MIN
#define INT32_MIN (-2147483647-1)
#endif
#ifndef INT8_MAX
#define INT8_MAX (127)
#endif
#ifndef INT16_MAX
#define INT16_MAX (32767)
#endif
#ifndef INT32_MAX
#define INT32_MAX (2147483647)
#endif
#ifndef UINT8_MAX
#define UINT8_MAX (255U)
#endif
#ifndef UINT16_MAX
#define UINT16_MAX (65535U)
#endif
#ifndef UINT32_MAX
#define UINT32_MAX (4294967295U)
#endif
#endif /* ! FLEXINT_H */
#ifdef __cplusplus
/* The "const" storage-class-modifier is valid. */
#define YY_USE_CONST
#else /* ! __cplusplus */
/* C99 requires __STDC__ to be defined as 1. */
#if defined (__STDC__)
#define YY_USE_CONST
#endif /* defined (__STDC__) */
#endif /* ! __cplusplus */
#ifdef YY_USE_CONST
#define yyconst const
#else
#define yyconst
#endif
/* Returned upon end-of-file. */
#define YY_NULL 0
/* Promotes a possibly negative, possibly signed char to an unsigned
* integer for use as an array index. If the signed char is negative,
* we want to instead treat it as an 8-bit unsigned char, hence the
* double cast.
*/
#define YY_SC_TO_UI(c) ((unsigned int) (unsigned char) c)
/* Enter a start condition. This macro really ought to take a parameter,
* but we do it the disgusting crufty way forced on us by the ()-less
* definition of BEGIN.
*/
#define BEGIN (yy_start) = 1 + 2 *
/* Translate the current start state into a value that can be later handed
* to BEGIN to return to the state. The YYSTATE alias is for lex
* compatibility.
*/
#define YY_START (((yy_start) - 1) / 2)
#define YYSTATE YY_START
/* Action number for EOF rule of a given start state. */
#define YY_STATE_EOF(state) (YY_END_OF_BUFFER + state + 1)
/* Special action meaning "start processing a new file". */
#define YY_NEW_FILE defargsYYrestart(defargsYYin )
#define YY_END_OF_BUFFER_CHAR 0
/* Size of default input buffer. */
#ifndef YY_BUF_SIZE
#define YY_BUF_SIZE 262144
#endif
/* The state buf must be large enough to hold one state per character in the main buffer.
*/
#define YY_STATE_BUF_SIZE ((YY_BUF_SIZE + 2) * sizeof(yy_state_type))
#ifndef YY_TYPEDEF_YY_BUFFER_STATE
#define YY_TYPEDEF_YY_BUFFER_STATE
typedef struct yy_buffer_state *YY_BUFFER_STATE;
#endif
extern int defargsYYleng;
extern FILE *defargsYYin, *defargsYYout;
#define EOB_ACT_CONTINUE_SCAN 0
#define EOB_ACT_END_OF_FILE 1
#define EOB_ACT_LAST_MATCH 2
#define YY_LESS_LINENO(n)
/* Return all but the first "n" matched characters back to the input stream. */
#define yyless(n) \
do \
{ \
/* Undo effects of setting up defargsYYtext. */ \
int yyless_macro_arg = (n); \
YY_LESS_LINENO(yyless_macro_arg);\
*yy_cp = (yy_hold_char); \
YY_RESTORE_YY_MORE_OFFSET \
(yy_c_buf_p) = yy_cp = yy_bp + yyless_macro_arg - YY_MORE_ADJ; \
YY_DO_BEFORE_ACTION; /* set up defargsYYtext again */ \
} \
while ( 0 )
#define unput(c) yyunput( c, (yytext_ptr) )
#ifndef YY_TYPEDEF_YY_SIZE_T
#define YY_TYPEDEF_YY_SIZE_T
typedef size_t yy_size_t;
#endif
#ifndef YY_STRUCT_YY_BUFFER_STATE
#define YY_STRUCT_YY_BUFFER_STATE
struct yy_buffer_state
{
FILE *yy_input_file;
char *yy_ch_buf; /* input buffer */
char *yy_buf_pos; /* current position in input buffer */
/* Size of input buffer in bytes, not including room for EOB
* characters.
*/
yy_size_t yy_buf_size;
/* Number of characters read into yy_ch_buf, not including EOB
* characters.
*/
int yy_n_chars;
/* Whether we "own" the buffer - i.e., we know we created it,
* and can realloc() it to grow it, and should free() it to
* delete it.
*/
int yy_is_our_buffer;
/* Whether this is an "interactive" input source; if so, and
* if we're using stdio for input, then we want to use getc()
* instead of fread(), to make sure we stop fetching input after
* each newline.
*/
int yy_is_interactive;
/* Whether we're considered to be at the beginning of a line.
* If so, '^' rules will be active on the next match, otherwise
* not.
*/
int yy_at_bol;
int yy_bs_lineno; /**< The line count. */
int yy_bs_column; /**< The column count. */
/* Whether to try to fill the input buffer when we reach the
* end of it.
*/
int yy_fill_buffer;
int yy_buffer_status;
#define YY_BUFFER_NEW 0
#define YY_BUFFER_NORMAL 1
/* When an EOF's been seen but there's still some text to process
* then we mark the buffer as YY_EOF_PENDING, to indicate that we
* shouldn't try reading from the input source any more. We might
* still have a bunch of tokens to match, though, because of
* possible backing-up.
*
* When we actually see the EOF, we change the status to "new"
* (via defargsYYrestart()), so that the user can continue scanning by
* just pointing defargsYYin at a new input file.
*/
#define YY_BUFFER_EOF_PENDING 2
};
#endif /* !YY_STRUCT_YY_BUFFER_STATE */
/* Stack of input buffers. */
static size_t yy_buffer_stack_top = 0; /**< index of top of stack. */
static size_t yy_buffer_stack_max = 0; /**< capacity of stack. */
static YY_BUFFER_STATE * yy_buffer_stack = 0; /**< Stack as an array. */
/* We provide macros for accessing buffer states in case in the
* future we want to put the buffer states in a more general
* "scanner state".
*
* Returns the top of the stack, or NULL.
*/
#define YY_CURRENT_BUFFER ( (yy_buffer_stack) \
? (yy_buffer_stack)[(yy_buffer_stack_top)] \
: NULL)
/* Same as previous macro, but useful when we know that the buffer stack is not
* NULL or when we need an lvalue. For internal use only.
*/
#define YY_CURRENT_BUFFER_LVALUE (yy_buffer_stack)[(yy_buffer_stack_top)]
/* yy_hold_char holds the character lost when defargsYYtext is formed. */
static char yy_hold_char;
static int yy_n_chars; /* number of characters read into yy_ch_buf */
int defargsYYleng;
/* Points to current character in buffer. */
static char *yy_c_buf_p = (char *) 0;
static int yy_init = 0; /* whether we need to initialize */
static int yy_start = 0; /* start state number */
/* Flag which is used to allow defargsYYwrap()'s to do buffer switches
* instead of setting up a fresh defargsYYin. A bit of a hack ...
*/
static int yy_did_buffer_switch_on_eof;
void defargsYYrestart (FILE *input_file );
void defargsYY_switch_to_buffer (YY_BUFFER_STATE new_buffer );
YY_BUFFER_STATE defargsYY_create_buffer (FILE *file,int size );
void defargsYY_delete_buffer (YY_BUFFER_STATE b );
void defargsYY_flush_buffer (YY_BUFFER_STATE b );
void defargsYYpush_buffer_state (YY_BUFFER_STATE new_buffer );
void defargsYYpop_buffer_state (void );
static void defargsYYensure_buffer_stack (void );
static void defargsYY_load_buffer_state (void );
static void defargsYY_init_buffer (YY_BUFFER_STATE b,FILE *file );
#define YY_FLUSH_BUFFER defargsYY_flush_buffer(YY_CURRENT_BUFFER )
YY_BUFFER_STATE defargsYY_scan_buffer (char *base,yy_size_t size );
YY_BUFFER_STATE defargsYY_scan_string (yyconst char *yy_str );
YY_BUFFER_STATE defargsYY_scan_bytes (yyconst char *bytes,int len );
void *defargsYYalloc (yy_size_t );
void *defargsYYrealloc (void *,yy_size_t );
void defargsYYfree (void * );
#define yy_new_buffer defargsYY_create_buffer
#define yy_set_interactive(is_interactive) \
{ \
if ( ! YY_CURRENT_BUFFER ){ \
defargsYYensure_buffer_stack (); \
YY_CURRENT_BUFFER_LVALUE = \
defargsYY_create_buffer(defargsYYin,YY_BUF_SIZE ); \
} \
YY_CURRENT_BUFFER_LVALUE->yy_is_interactive = is_interactive; \
}
#define yy_set_bol(at_bol) \
{ \
if ( ! YY_CURRENT_BUFFER ){\
defargsYYensure_buffer_stack (); \
YY_CURRENT_BUFFER_LVALUE = \
defargsYY_create_buffer(defargsYYin,YY_BUF_SIZE ); \
} \
YY_CURRENT_BUFFER_LVALUE->yy_at_bol = at_bol; \
}
#define YY_AT_BOL() (YY_CURRENT_BUFFER_LVALUE->yy_at_bol)
/* Begin user sect3 */
#define defargsYYwrap(n) 1
#define YY_SKIP_YYWRAP
typedef unsigned char YY_CHAR;
FILE *defargsYYin = (FILE *) 0, *defargsYYout = (FILE *) 0;
typedef int yy_state_type;
extern int defargsYYlineno;
int defargsYYlineno = 1;
extern char *defargsYYtext;
#define yytext_ptr defargsYYtext
static yy_state_type yy_get_previous_state (void );
static yy_state_type yy_try_NUL_trans (yy_state_type current_state );
static int yy_get_next_buffer (void );
static void yy_fatal_error (yyconst char msg[] );
/* Done after the current pattern has been matched and before the
* corresponding action - sets up defargsYYtext.
*/
#define YY_DO_BEFORE_ACTION \
(yytext_ptr) = yy_bp; \
defargsYYleng = (size_t) (yy_cp - yy_bp); \
(yy_hold_char) = *yy_cp; \
*yy_cp = '\0'; \
(yy_c_buf_p) = yy_cp;
#define YY_NUM_RULES 56
#define YY_END_OF_BUFFER 57
/* This struct is not used in this scanner,
but its presence is necessary. */
struct yy_trans_info
{
flex_int32_t yy_verify;
flex_int32_t yy_nxt;
};
static yyconst flex_int16_t yy_acclist[291] =
{ 0,
2, 2, 57, 55, 56, 54, 56, 55, 56, 1,
55, 56, 36, 55, 56, 29, 36, 55, 56, 36,
55, 56, 36, 55, 56, 36, 55, 56, 36, 55,
56, 36, 55, 56, 36, 55, 56, 38, 55, 56,
16, 38, 55, 56, 17, 18, 38, 55, 56, 38,
55, 56, 37, 38, 55, 56, 17, 38, 55, 56,
23, 38, 55, 56, 24, 38, 55, 56, 21, 38,
55, 56, 22, 38, 55, 56, 25, 38, 55, 56,
26, 38, 55, 56, 34, 55, 56, 2, 34, 55,
56, 34, 55, 56, 15, 34, 55, 56, 32, 34,
55, 56, 34, 55, 56, 34, 55, 56, 15, 34,
55, 56, 30, 34, 55, 56, 32, 34, 55, 56,
33, 34, 55, 56, 34, 55, 56, 15, 34, 55,
56, 8, 36, 55, 56, 36, 55, 56, 15, 36,
55, 56, 32, 36, 55, 56, 15, 36, 55, 56,
32, 36, 55, 56, 36, 55, 56, 36, 55, 56,
36, 55, 56, 13, 34, 55, 56, 10, 33, 34,
55, 56, 55, 56, 55, 56, 55, 56, 55, 56,
55, 56, 47, 52, 55, 56, 51, 54, 56, 52,
55, 56, 47, 52, 55, 56, 48, 55, 56, 50,
54, 56, 48, 55, 56, 44, 55, 56, 44, 55,
56, 45, 54, 56, 44, 55, 56, 44, 55, 56,
35, 27, 28, 18, 17, 37, 19, 20, 2, 33,
16393, 14, 33, 3, 11, 12, 10, 33, 42, 41,
47, 49, 47, 48, 48, 48, 53, 28, 17, 17,
16393, 8201, 6, 6, 7, 46, 47, 53, 48, 53,
53, 28, 8201, 5, 4, 5, 47, 53, 48, 53,
28, 31, 4, 39, 28, 43, 28, 43, 28, 40,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28
} ;
static yyconst flex_int16_t yy_accept[244] =
{ 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 2, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 4, 6, 8, 10, 13, 16, 20, 23, 26,
29, 32, 35, 38, 41, 45, 50, 53, 57, 61,
65, 69, 73, 77, 81, 85, 88, 92, 95, 99,
103, 106, 109, 113, 117, 121, 125, 128, 132, 136,
139, 143, 147, 151, 155, 158, 161, 164, 168, 173,
175, 177, 179, 181, 183, 187, 190, 193, 197, 200,
203, 206, 209, 212, 215, 218, 221, 221, 221, 222,
223, 223, 224, 225, 226, 227, 227, 228, 229, 230,
231, 231, 231, 232, 232, 232, 233, 234, 234, 235,
235, 235, 235, 235, 235, 235, 236, 237, 239, 239,
239, 240, 240, 241, 241, 241, 242, 243, 244, 245,
246, 247, 247, 247, 248, 248, 249, 250, 251, 251,
251, 251, 252, 253, 253, 253, 254, 254, 255, 255,
255, 256, 256, 257, 257, 257, 259, 261, 261, 261,
262, 262, 263, 263, 264, 264, 265, 267, 267, 267,
267, 267, 269, 271, 271, 271, 271, 272, 273, 274,
274, 274, 275, 275, 275, 275, 275, 276, 276, 276,
277, 277, 277, 278, 278, 278, 279, 279, 279, 280,
280, 281, 281, 281, 282, 282, 282, 283, 283, 283,
284, 284, 284, 285, 285, 285, 286, 286, 286, 287,
287, 287, 288, 288, 288, 289, 289, 289, 290, 290,
291, 291, 291
} ;
static yyconst flex_int32_t yy_ec[256] =
{ 0,
1, 1, 1, 1, 1, 1, 1, 1, 2, 3,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 2, 4, 5, 1, 6, 1, 7, 8, 9,
10, 11, 1, 12, 13, 1, 14, 15, 16, 16,
16, 16, 16, 16, 16, 17, 18, 19, 1, 20,
21, 22, 1, 1, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 24, 23, 23, 23, 23,
23, 25, 23, 23, 26, 23, 23, 23, 23, 23,
27, 28, 29, 7, 23, 1, 30, 23, 31, 32,
33, 34, 23, 23, 35, 23, 23, 36, 23, 37,
38, 23, 23, 39, 40, 41, 42, 43, 23, 23,
23, 23, 44, 1, 45, 1, 1, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23
} ;
static yyconst flex_int32_t yy_meta[46] =
{ 0,
1, 2, 3, 1, 1, 1, 1, 1, 4, 5,
6, 1, 1, 1, 7, 7, 7, 7, 8, 1,
1, 1, 9, 9, 9, 9, 1, 10, 1, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 1, 1
} ;
static yyconst flex_int16_t yy_base[294] =
{ 0,
0, 1, 2, 3, 21, 47, 15, 17, 75, 0,
796, 790, 35, 44, 27, 29, 120, 0, 163, 0,
207, 0, 250, 291, 37, 167, 5, 7, 44, 177,
799, 877, 877, 19, 877, 877, 877, 775, 45, 42,
171, 0, 786, 877, 877, 786, 183, 0, 780, 877,
877, 761, 757, 877, 877, 877, 771, 0, 188, 172,
748, 189, 181, 877, 252, 0, 735, 877, 877, 735,
877, 182, 235, 255, 736, 750, 242, 256, 0, 5,
730, 167, 708, 707, 0, 877, 729, 261, 0, 877,
268, 877, 246, 877, 274, 276, 257, 282, 877, 877,
732, 729, 726, 293, 0, 725, 877, 877, 723, 0,
268, 295, 333, 301, 278, 877, 0, 690, 877, 708,
305, 705, 700, 682, 298, 877, 877, 0, 296, 672,
877, 314, 877, 661, 656, 0, 877, 313, 0, 315,
314, 656, 646, 663, 675, 669, 330, 0, 322, 654,
331, 2, 376, 326, 335, 877, 663, 657, 345, 653,
877, 627, 877, 615, 623, 627, 624, 601, 604, 877,
630, 624, 420, 0, 606, 877, 877, 417, 611, 578,
576, 0, 0, 571, 559, 584, 580, 877, 877, 575,
573, 877, 546, 544, 535, 568, 566, 553, 519, 550,
513, 534, 520, 512, 485, 510, 477, 503, 502, 497,
877, 472, 499, 493, 488, 490, 481, 421, 424, 423,
417, 419, 418, 377, 357, 350, 345, 346, 338, 327,
326, 294, 281, 257, 201, 195, 162, 65, 59, 877,
33, 877, 433, 443, 453, 463, 473, 483, 493, 501,
504, 19, 513, 517, 526, 536, 540, 549, 559, 569,
577, 586, 596, 605, 615, 624, 633, 642, 651, 660,
669, 678, 687, 696, 705, 714, 723, 732, 741, 750,
759, 768, 777, 786, 795, 804, 813, 822, 831, 840,
849, 858, 867
} ;
static yyconst flex_int16_t yy_def[294] =
{ 0,
243, 243, 243, 243, 244, 244, 244, 244, 242, 9,
9, 9, 9, 9, 9, 9, 242, 17, 244, 19,
242, 21, 243, 243, 245, 245, 246, 246, 247, 247,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 248, 249, 242, 242, 250, 242, 251, 250, 242,
242, 242, 242, 242, 242, 242, 242, 252, 253, 242,
242, 242, 242, 242, 242, 254, 255, 242, 242, 256,
242, 242, 242, 242, 242, 242, 242, 242, 257, 242,
242, 242, 242, 242, 258, 242, 242, 258, 259, 242,
259, 242, 242, 242, 242, 242, 242, 242, 242, 242,
260, 260, 250, 261, 251, 250, 242, 242, 242, 254,
253, 253, 253, 242, 242, 242, 254, 255, 242, 242,
262, 242, 263, 242, 242, 242, 242, 257, 242, 264,
242, 242, 242, 242, 242, 258, 242, 258, 259, 259,
259, 242, 242, 242, 265, 265, 261, 147, 253, 242,
253, 113, 242, 242, 242, 242, 242, 242, 242, 266,
242, 264, 242, 242, 242, 258, 259, 242, 242, 242,
267, 267, 253, 153, 242, 242, 242, 242, 268, 242,
242, 258, 259, 242, 242, 269, 269, 242, 242, 242,
270, 242, 242, 242, 242, 271, 271, 272, 242, 242,
242, 273, 273, 274, 242, 242, 242, 275, 275, 276,
242, 242, 277, 277, 278, 279, 279, 280, 281, 281,
282, 283, 283, 284, 285, 285, 286, 287, 287, 288,
289, 289, 290, 291, 291, 292, 242, 242, 293, 242,
242, 0, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242, 242
} ;
static yyconst flex_int16_t yy_nxt[923] =
{ 0,
242, 242, 33, 33, 33, 33, 129, 90, 111, 90,
35, 35, 111, 34, 34, 34, 34, 33, 91, 33,
91, 35, 35, 33, 43, 37, 43, 110, 39, 97,
39, 130, 98, 38, 39, 44, 44, 44, 44, 86,
40, 161, 41, 50, 51, 93, 94, 87, 42, 33,
88, 37, 50, 51, 52, 97, 53, 95, 98, 38,
39, 99, 99, 52, 96, 53, 40, 161, 41, 240,
54, 55, 54, 55, 42, 44, 44, 33, 44, 44,
44, 44, 44, 45, 46, 44, 44, 44, 47, 44,
44, 44, 44, 44, 44, 44, 44, 48, 48, 48,
48, 44, 44, 44, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48, 44, 44,
56, 57, 33, 56, 56, 58, 56, 56, 59, 60,
56, 60, 61, 62, 56, 56, 56, 56, 56, 63,
64, 65, 66, 66, 66, 66, 67, 56, 56, 66,
66, 66, 66, 66, 66, 66, 66, 66, 66, 66,
66, 66, 66, 68, 56, 33, 240, 69, 132, 86,
70, 71, 72, 114, 72, 38, 39, 87, 93, 94,
88, 133, 73, 114, 74, 115, 75, 76, 75, 112,
95, 99, 99, 97, 113, 115, 98, 96, 113, 97,
116, 116, 98, 161, 77, 238, 71, 56, 56, 33,
56, 56, 58, 56, 56, 56, 78, 56, 56, 56,
62, 56, 56, 56, 56, 56, 56, 56, 56, 79,
79, 79, 79, 56, 56, 56, 79, 79, 79, 79,
79, 79, 79, 79, 79, 79, 79, 79, 79, 79,
56, 56, 33, 114, 99, 99, 114, 125, 124, 80,
144, 238, 81, 34, 126, 115, 122, 144, 115, 149,
82, 97, 116, 116, 138, 99, 99, 132, 140, 142,
83, 141, 127, 143, 97, 144, 150, 98, 154, 161,
133, 155, 84, 33, 106, 144, 151, 129, 235, 125,
80, 113, 114, 81, 34, 113, 126, 147, 147, 147,
147, 82, 158, 150, 115, 132, 166, 167, 167, 159,
159, 83, 130, 149, 127, 167, 166, 167, 133, 175,
235, 106, 151, 84, 152, 161, 175, 113, 175, 113,
150, 113, 232, 113, 147, 147, 147, 147, 175, 150,
232, 150, 177, 161, 229, 153, 153, 153, 153, 178,
178, 229, 153, 153, 153, 153, 153, 153, 153, 153,
153, 153, 153, 153, 153, 153, 111, 149, 111, 111,
111, 111, 111, 111, 111, 161, 111, 111, 111, 111,
174, 174, 174, 174, 150, 111, 111, 111, 174, 174,
174, 174, 111, 111, 111, 174, 174, 174, 174, 174,
174, 174, 174, 174, 174, 174, 174, 174, 174, 111,
111, 112, 226, 226, 189, 161, 113, 223, 223, 161,
113, 190, 190, 32, 32, 32, 32, 32, 32, 32,
32, 32, 32, 36, 36, 36, 36, 36, 36, 36,
36, 36, 36, 85, 85, 85, 85, 85, 85, 85,
85, 85, 85, 89, 89, 89, 89, 89, 89, 89,
89, 89, 89, 92, 92, 92, 92, 92, 92, 92,
92, 92, 92, 100, 100, 220, 100, 100, 100, 100,
100, 100, 100, 101, 220, 101, 161, 217, 101, 101,
101, 101, 104, 217, 200, 161, 214, 214, 212, 104,
105, 206, 105, 111, 111, 111, 111, 211, 111, 111,
161, 111, 111, 117, 209, 117, 118, 118, 118, 118,
118, 118, 118, 118, 118, 118, 120, 120, 209, 120,
120, 120, 120, 120, 120, 120, 128, 207, 128, 136,
136, 206, 136, 136, 205, 136, 136, 136, 136, 139,
139, 161, 139, 139, 139, 139, 139, 139, 139, 145,
203, 145, 203, 201, 145, 145, 145, 145, 148, 200,
199, 161, 189, 148, 197, 148, 157, 157, 197, 157,
157, 157, 157, 157, 157, 157, 160, 195, 160, 160,
194, 160, 160, 160, 160, 162, 162, 162, 162, 162,
162, 162, 162, 162, 162, 171, 193, 171, 192, 161,
171, 171, 171, 171, 179, 188, 179, 179, 187, 179,
179, 179, 179, 186, 187, 186, 185, 184, 186, 186,
186, 186, 191, 183, 191, 191, 182, 191, 191, 191,
191, 196, 181, 196, 180, 163, 196, 196, 196, 196,
198, 161, 198, 198, 176, 198, 198, 198, 198, 202,
176, 202, 173, 172, 202, 202, 202, 202, 204, 172,
204, 204, 170, 204, 204, 204, 204, 208, 169, 208,
168, 165, 208, 208, 208, 208, 210, 164, 210, 210,
163, 210, 210, 210, 210, 213, 122, 213, 161, 123,
213, 213, 213, 213, 215, 156, 215, 215, 119, 215,
215, 215, 215, 216, 109, 216, 106, 103, 216, 216,
216, 216, 218, 146, 218, 218, 146, 218, 218, 218,
218, 219, 137, 219, 135, 134, 219, 219, 219, 219,
221, 131, 221, 221, 123, 221, 221, 221, 221, 222,
122, 222, 121, 119, 222, 222, 222, 222, 224, 116,
224, 224, 109, 224, 224, 224, 224, 225, 108, 225,
107, 106, 225, 225, 225, 225, 227, 103, 227, 227,
102, 227, 227, 227, 227, 228, 99, 228, 242, 49,
228, 228, 228, 228, 230, 49, 230, 230, 242, 230,
230, 230, 230, 231, 242, 231, 242, 242, 231, 231,
231, 231, 233, 242, 233, 233, 242, 233, 233, 233,
233, 234, 242, 234, 242, 242, 234, 234, 234, 234,
236, 242, 236, 236, 242, 236, 236, 236, 236, 237,
242, 237, 242, 242, 237, 237, 237, 237, 239, 242,
239, 239, 242, 239, 239, 239, 239, 241, 242, 241,
241, 242, 241, 241, 241, 241, 31, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242
} ;
static yyconst flex_int16_t yy_chk[923] =
{ 0,
0, 0, 1, 2, 3, 4, 80, 27, 152, 28,
3, 4, 152, 1, 2, 3, 4, 7, 27, 8,
28, 3, 4, 5, 7, 5, 8, 252, 7, 34,
8, 80, 34, 5, 5, 15, 15, 16, 16, 25,
5, 241, 5, 13, 13, 29, 29, 25, 5, 6,
25, 6, 14, 14, 13, 39, 13, 29, 39, 6,
6, 40, 40, 14, 29, 14, 6, 239, 6, 238,
15, 15, 16, 16, 6, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 19, 237, 19, 82, 26,
19, 19, 19, 60, 19, 19, 19, 26, 30, 30,
26, 82, 19, 72, 19, 60, 19, 19, 19, 59,
30, 41, 41, 47, 59, 72, 47, 30, 59, 62,
63, 63, 62, 236, 19, 235, 19, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 23, 65, 73, 73, 74, 78, 77, 23,
97, 234, 23, 23, 78, 65, 77, 97, 74, 111,
23, 88, 65, 65, 88, 74, 74, 96, 91, 93,
23, 91, 78, 93, 95, 98, 111, 95, 115, 233,
96, 115, 23, 24, 104, 98, 112, 129, 232, 125,
24, 112, 114, 24, 24, 112, 125, 104, 104, 104,
104, 24, 121, 112, 114, 132, 138, 141, 140, 121,
121, 24, 129, 149, 125, 140, 138, 141, 132, 154,
231, 147, 151, 24, 113, 230, 154, 151, 155, 113,
149, 151, 229, 113, 147, 147, 147, 147, 155, 151,
228, 113, 159, 227, 226, 113, 113, 113, 113, 159,
159, 225, 113, 113, 113, 113, 113, 113, 113, 113,
113, 113, 113, 113, 113, 113, 153, 153, 153, 153,
153, 153, 153, 153, 153, 224, 153, 153, 153, 153,
153, 153, 153, 153, 153, 153, 153, 153, 153, 153,
153, 153, 153, 153, 153, 153, 153, 153, 153, 153,
153, 153, 153, 153, 153, 153, 153, 153, 153, 153,
153, 173, 223, 222, 178, 221, 173, 220, 219, 218,
173, 178, 178, 243, 243, 243, 243, 243, 243, 243,
243, 243, 243, 244, 244, 244, 244, 244, 244, 244,
244, 244, 244, 245, 245, 245, 245, 245, 245, 245,
245, 245, 245, 246, 246, 246, 246, 246, 246, 246,
246, 246, 246, 247, 247, 247, 247, 247, 247, 247,
247, 247, 247, 248, 248, 217, 248, 248, 248, 248,
248, 248, 248, 249, 216, 249, 215, 214, 249, 249,
249, 249, 250, 213, 212, 210, 209, 208, 207, 250,
251, 206, 251, 253, 253, 253, 253, 205, 253, 253,
204, 253, 253, 254, 203, 254, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 256, 256, 202, 256,
256, 256, 256, 256, 256, 256, 257, 201, 257, 258,
258, 200, 258, 258, 199, 258, 258, 258, 258, 259,
259, 198, 259, 259, 259, 259, 259, 259, 259, 260,
197, 260, 196, 195, 260, 260, 260, 260, 261, 194,
193, 191, 190, 261, 187, 261, 262, 262, 186, 262,
262, 262, 262, 262, 262, 262, 263, 185, 263, 263,
184, 263, 263, 263, 263, 264, 264, 264, 264, 264,
264, 264, 264, 264, 264, 265, 181, 265, 180, 179,
265, 265, 265, 265, 266, 175, 266, 266, 172, 266,
266, 266, 266, 267, 171, 267, 169, 168, 267, 267,
267, 267, 268, 167, 268, 268, 166, 268, 268, 268,
268, 269, 165, 269, 164, 162, 269, 269, 269, 269,
270, 160, 270, 270, 158, 270, 270, 270, 270, 271,
157, 271, 150, 146, 271, 271, 271, 271, 272, 145,
272, 272, 144, 272, 272, 272, 272, 273, 143, 273,
142, 135, 273, 273, 273, 273, 274, 134, 274, 274,
130, 274, 274, 274, 274, 275, 124, 275, 123, 122,
275, 275, 275, 275, 276, 120, 276, 276, 118, 276,
276, 276, 276, 277, 109, 277, 106, 103, 277, 277,
277, 277, 278, 102, 278, 278, 101, 278, 278, 278,
278, 279, 87, 279, 84, 83, 279, 279, 279, 279,
280, 81, 280, 280, 76, 280, 280, 280, 280, 281,
75, 281, 70, 67, 281, 281, 281, 281, 282, 61,
282, 282, 57, 282, 282, 282, 282, 283, 53, 283,
52, 49, 283, 283, 283, 283, 284, 46, 284, 284,
43, 284, 284, 284, 284, 285, 38, 285, 31, 12,
285, 285, 285, 285, 286, 11, 286, 286, 0, 286,
286, 286, 286, 287, 0, 287, 0, 0, 287, 287,
287, 287, 288, 0, 288, 288, 0, 288, 288, 288,
288, 289, 0, 289, 0, 0, 289, 289, 289, 289,
290, 0, 290, 290, 0, 290, 290, 290, 290, 291,
0, 291, 0, 0, 291, 291, 291, 291, 292, 0,
292, 292, 0, 292, 292, 292, 292, 293, 0, 293,
293, 0, 293, 293, 293, 293, 242, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242, 242, 242, 242, 242, 242, 242, 242, 242,
242, 242
} ;
extern int defargsYY_flex_debug;
int defargsYY_flex_debug = 0;
static yy_state_type *yy_state_buf=0, *yy_state_ptr=0;
static char *yy_full_match;
static int yy_lp;
static int yy_looking_for_trail_begin = 0;
static int yy_full_lp;
static int *yy_full_state;
#define YY_TRAILING_MASK 0x2000
#define YY_TRAILING_HEAD_MASK 0x4000
#define REJECT \
{ \
*yy_cp = (yy_hold_char); /* undo effects of setting up defargsYYtext */ \
yy_cp = (yy_full_match); /* restore poss. backed-over text */ \
(yy_lp) = (yy_full_lp); /* restore orig. accepting pos. */ \
(yy_state_ptr) = (yy_full_state); /* restore orig. state */ \
yy_current_state = *(yy_state_ptr); /* restore curr. state */ \
++(yy_lp); \
goto find_rule; \
}
#define yymore() yymore_used_but_not_detected
#define YY_MORE_ADJ 0
#define YY_RESTORE_YY_MORE_OFFSET
char *defargsYYtext;
#line 1 "defargs.l"
/******************************************************************************
*
*
*
* Copyright (C) 1997-2014 by Dimitri van Heesch.
*
* Permission to use, copy, modify, and distribute this software and its
* documentation under the terms of the GNU General Public License is hereby
* granted. No representations are made about the suitability of this software
* for any purpose. It is provided "as is" without express or implied warranty.
* See the GNU General Public License for more details.
*
* Documents produced by Doxygen are derivative works derived from the
* input used in their production; they are not affected by this license.
*
*/
/*! \file
* This scanner is used to convert a string into a list of function or
* template arguments. Each parsed argument results in a Argument struct,
* that is put into an ArgumentList in declaration order.
* Comment blocks for arguments can also be included in the string.
* The argument string does not contain new-lines (except inside any
* comment blocks).
* An Argument consists of the string fields:
* type,name,default value, and documentation
* The Argument list as a whole can be pure, constant or volatile.
*
* Examples of input strings are:
* \code
* "(int a,int b) const"
* "(const char *s="hello world",int=5) = 0"
* "<class T,class N>"
* "(char c,const char)"
* \endcode
*
* Note: It is not always possible to distinguish between the name and
* type of an argument. In case of doubt the name is added to the
* type, and the matchArgumentList in util.cpp is be used to
* further determine the correct separation.
*/
#line 44 "defargs.l"
/*
* includes
*/
#include <stdio.h>
//#include <iostream.h>
#include <assert.h>
#include <ctype.h>
#include <qregexp.h>
#include "defargs.h"
#include "entry.h"
#include "util.h"
#include "arguments.h"
#include "message.h"
#define YY_NEVER_INTERACTIVE 1
#define YY_NO_INPUT 1
/* -----------------------------------------------------------------
* state variables
*/
static const char *g_inputString;
static int g_inputPosition;
static ArgumentList *g_argList;
static QCString *g_copyArgValue;
static QCString g_curArgTypeName;
static QCString g_curArgDefValue;
static QCString g_curArgName;
static QCString g_curArgDocs;
static QCString g_curArgAttrib;
static QCString g_curArgArray;
static QCString g_extraTypeChars;
static int g_argRoundCount;
static int g_argSharpCount;
static int g_argCurlyCount;
static int g_readArgContext;
static int g_lastDocContext;
static int g_lastDocChar;
static QCString g_delimiter;
/* -----------------------------------------------------------------
*/
#undef YY_INPUT
#define YY_INPUT(buf,result,max_size) result=yyread(buf,max_size);
static int yyread(char *buf,int max_size)
{
int c=0;
while( c < max_size && g_inputString[g_inputPosition] )
{
*buf = g_inputString[g_inputPosition++] ;
c++; buf++;
}
return c;
}
#line 926 "<stdout>"
#define INITIAL 0
#define Start 1
#define CopyArgString 2
#define CopyRawString 3
#define CopyArgRound 4
#define CopyArgRound2 5
#define CopyArgSharp 6
#define CopyArgCurly 7
#define ReadFuncArgType 8
#define ReadFuncArgDef 9
#define ReadFuncArgPtr 10
#define FuncQual 11
#define ReadDocBlock 12
#define ReadDocLine 13
#define TrailingReturn 14
#ifndef YY_NO_UNISTD_H
/* Special case for "unistd.h", since it is non-ANSI. We include it way
* down here because we want the user's section 1 to have been scanned first.
* The user has a chance to override it with an option.
*/
#include <unistd.h>
#endif
#ifndef YY_EXTRA_TYPE
#define YY_EXTRA_TYPE void *
#endif
static int yy_init_globals (void );
/* Accessor methods to globals.
These are made visible to non-reentrant scanners for convenience. */
int defargsYYlex_destroy (void );
int defargsYYget_debug (void );
void defargsYYset_debug (int debug_flag );
YY_EXTRA_TYPE defargsYYget_extra (void );
void defargsYYset_extra (YY_EXTRA_TYPE user_defined );
FILE *defargsYYget_in (void );
void defargsYYset_in (FILE * in_str );
FILE *defargsYYget_out (void );
void defargsYYset_out (FILE * out_str );
int defargsYYget_leng (void );
char *defargsYYget_text (void );
int defargsYYget_lineno (void );
void defargsYYset_lineno (int line_number );
/* Macros after this point can all be overridden by user definitions in
* section 1.
*/
#ifndef YY_SKIP_YYWRAP
#ifdef __cplusplus
extern "C" int defargsYYwrap (void );
#else
extern int defargsYYwrap (void );
#endif
#endif
static void yyunput (int c,char *buf_ptr );
#ifndef yytext_ptr
static void yy_flex_strncpy (char *,yyconst char *,int );
#endif
#ifdef YY_NEED_STRLEN
static int yy_flex_strlen (yyconst char * );
#endif
#ifndef YY_NO_INPUT
#ifdef __cplusplus
static int yyinput (void );
#else
static int input (void );
#endif
#endif
/* Amount of stuff to slurp up with each read. */
#ifndef YY_READ_BUF_SIZE
#define YY_READ_BUF_SIZE 262144
#endif
/* Copy whatever the last rule matched to the standard output. */
#ifndef ECHO
/* This used to be an fputs(), but since the string might contain NUL's,
* we now use fwrite().
*/
#define ECHO do { if (fwrite( defargsYYtext, defargsYYleng, 1, defargsYYout )) {} } while (0)
#endif
/* Gets input and stuffs it into "buf". number of characters read, or YY_NULL,
* is returned in "result".
*/
#ifndef YY_INPUT
#define YY_INPUT(buf,result,max_size) \
if ( YY_CURRENT_BUFFER_LVALUE->yy_is_interactive ) \
{ \
int c = '*'; \
unsigned n; \
for ( n = 0; n < max_size && \
(c = getc( defargsYYin )) != EOF && c != '\n'; ++n ) \
buf[n] = (char) c; \
if ( c == '\n' ) \
buf[n++] = (char) c; \
if ( c == EOF && ferror( defargsYYin ) ) \
YY_FATAL_ERROR( "input in flex scanner failed" ); \
result = n; \
} \
else \
{ \
errno=0; \
while ( (result = fread(buf, 1, max_size, defargsYYin))==0 && ferror(defargsYYin)) \
{ \
if( errno != EINTR) \
{ \
YY_FATAL_ERROR( "input in flex scanner failed" ); \
break; \
} \
errno=0; \
clearerr(defargsYYin); \
} \
}\
\
#endif
/* No semi-colon after return; correct usage is to write "yyterminate();" -
* we don't want an extra ';' after the "return" because that will cause
* some compilers to complain about unreachable statements.
*/
#ifndef yyterminate
#define yyterminate() return YY_NULL
#endif
/* Number of entries by which start-condition stack grows. */
#ifndef YY_START_STACK_INCR
#define YY_START_STACK_INCR 25
#endif
/* Report a fatal error. */
#ifndef YY_FATAL_ERROR
#define YY_FATAL_ERROR(msg) yy_fatal_error( msg )
#endif
/* end tables serialization structures and prototypes */
/* Default declaration of generated scanner - a define so the user can
* easily add parameters.
*/
#ifndef YY_DECL
#define YY_DECL_IS_OURS 1
extern int defargsYYlex (void);
#define YY_DECL int defargsYYlex (void)
#endif /* !YY_DECL */
/* Code executed at the beginning of each rule, after defargsYYtext and defargsYYleng
* have been set up.
*/
#ifndef YY_USER_ACTION
#define YY_USER_ACTION
#endif
/* Code executed at the end of each rule. */
#ifndef YY_BREAK
#define YY_BREAK break;
#endif
#define YY_RULE_SETUP \
YY_USER_ACTION
/** The main scanner function which does all the work.
*/
YY_DECL
{
register yy_state_type yy_current_state;
register char *yy_cp, *yy_bp;
register int yy_act;
#line 126 "defargs.l"
#line 1125 "<stdout>"
if ( !(yy_init) )
{
(yy_init) = 1;
#ifdef YY_USER_INIT
YY_USER_INIT;
#endif
/* Create the reject buffer large enough to save one state per allowed character. */
if ( ! (yy_state_buf) )
(yy_state_buf) = (yy_state_type *)defargsYYalloc(YY_STATE_BUF_SIZE );
if ( ! (yy_state_buf) )
YY_FATAL_ERROR( "out of dynamic memory in defargsYYlex()" );
if ( ! (yy_start) )
(yy_start) = 1; /* first start state */
if ( ! defargsYYin )
defargsYYin = stdin;
if ( ! defargsYYout )
defargsYYout = stdout;
if ( ! YY_CURRENT_BUFFER ) {
defargsYYensure_buffer_stack ();
YY_CURRENT_BUFFER_LVALUE =
defargsYY_create_buffer(defargsYYin,YY_BUF_SIZE );
}
defargsYY_load_buffer_state( );
}
while ( 1 ) /* loops until end-of-file is reached */
{
yy_cp = (yy_c_buf_p);
/* Support of defargsYYtext. */
*yy_cp = (yy_hold_char);
/* yy_bp points to the position in yy_ch_buf of the start of
* the current run.
*/
yy_bp = yy_cp;
yy_current_state = (yy_start);
(yy_state_ptr) = (yy_state_buf);
*(yy_state_ptr)++ = yy_current_state;
yy_match:
do
{
register YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)];
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
if ( yy_current_state >= 243 )
yy_c = yy_meta[(unsigned int) yy_c];
}
yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
*(yy_state_ptr)++ = yy_current_state;
++yy_cp;
}
while ( yy_base[yy_current_state] != 877 );
yy_find_action:
yy_current_state = *--(yy_state_ptr);
(yy_lp) = yy_accept[yy_current_state];
find_rule: /* we branch to this label when backing up */
for ( ; ; ) /* until we find what rule we matched */
{
if ( (yy_lp) && (yy_lp) < yy_accept[yy_current_state + 1] )
{
yy_act = yy_acclist[(yy_lp)];
if ( yy_act & YY_TRAILING_HEAD_MASK ||
(yy_looking_for_trail_begin) )
{
if ( yy_act == (yy_looking_for_trail_begin) )
{
(yy_looking_for_trail_begin) = 0;
yy_act &= ~YY_TRAILING_HEAD_MASK;
break;
}
}
else if ( yy_act & YY_TRAILING_MASK )
{
(yy_looking_for_trail_begin) = yy_act & ~YY_TRAILING_MASK;
(yy_looking_for_trail_begin) |= YY_TRAILING_HEAD_MASK;
(yy_full_match) = yy_cp;
(yy_full_state) = (yy_state_ptr);
(yy_full_lp) = (yy_lp);
}
else
{
(yy_full_match) = yy_cp;
(yy_full_state) = (yy_state_ptr);
(yy_full_lp) = (yy_lp);
break;
}
++(yy_lp);
goto find_rule;
}
--yy_cp;
yy_current_state = *--(yy_state_ptr);
(yy_lp) = yy_accept[yy_current_state];
}
YY_DO_BEFORE_ACTION;
do_action: /* This label is used only to access EOF actions. */
switch ( yy_act )
{ /* beginning of action switch */
case 1:
YY_RULE_SETUP
#line 128 "defargs.l"
{ BEGIN(ReadFuncArgType); }
YY_BREAK
case 2:
YY_RULE_SETUP
#line 130 "defargs.l"
{
g_curArgTypeName+=" ";
}
YY_BREAK
case 3:
/* rule 3 can match eol */
YY_RULE_SETUP
#line 133 "defargs.l"
{
if (g_curArgTypeName.stripWhiteSpace().isEmpty())
{
g_curArgAttrib=defargsYYtext; // for M$-IDL
}
else // array type
{
g_curArgArray+=defargsYYtext;
}
}
YY_BREAK
case 4:
YY_RULE_SETUP
#line 143 "defargs.l"
{ g_curArgDefValue+=defargsYYtext; }
YY_BREAK
case 5:
YY_RULE_SETUP
#line 144 "defargs.l"
{ g_curArgDefValue+=defargsYYtext; }
YY_BREAK
case 6:
YY_RULE_SETUP
#line 145 "defargs.l"
{ g_curArgDefValue+=defargsYYtext; }
YY_BREAK
case 7:
/* rule 7 can match eol */
YY_RULE_SETUP
#line 146 "defargs.l"
{ g_curArgDefValue+=defargsYYtext;
QCString text=defargsYYtext;
int i=text.find('"');
g_delimiter = defargsYYtext+i+1;
g_delimiter=g_delimiter.left(g_delimiter.length()-1);
BEGIN( CopyRawString );
}
YY_BREAK
case 8:
YY_RULE_SETUP
#line 153 "defargs.l"
{
g_curArgDefValue+=*defargsYYtext;
BEGIN( CopyArgString );
}
YY_BREAK
case 9:
/* rule 9 can match eol */
YY_RULE_SETUP
#line 157 "defargs.l"
{
// function pointer as argument
g_curArgTypeName+=defargsYYtext;
//g_curArgTypeName=g_curArgTypeName.simplifyWhiteSpace();
BEGIN( ReadFuncArgPtr );
}
YY_BREAK
case 10:
YY_RULE_SETUP
#line 163 "defargs.l"
{
g_curArgName=defargsYYtext;
}
YY_BREAK
case 11:
YY_RULE_SETUP
#line 166 "defargs.l"
{ // function pointer
g_curArgTypeName+=defargsYYtext;
//g_curArgTypeName=g_curArgTypeName.simplifyWhiteSpace();
g_readArgContext = ReadFuncArgType;
g_copyArgValue=&g_curArgTypeName;
g_argRoundCount=0;
BEGIN( CopyArgRound2 );
}
YY_BREAK
case 12:
*yy_cp = (yy_hold_char); /* undo effects of setting up defargsYYtext */
(yy_c_buf_p) = yy_cp = yy_bp + 1;
YY_DO_BEFORE_ACTION; /* set up defargsYYtext again */
YY_RULE_SETUP
#line 174 "defargs.l"
{ // pointer to fixed size array
g_curArgTypeName+=defargsYYtext;
g_curArgTypeName+=g_curArgName;
//g_curArgTypeName=g_curArgTypeName.simplifyWhiteSpace();
BEGIN( ReadFuncArgType );
}
YY_BREAK
case 13:
YY_RULE_SETUP
#line 180 "defargs.l"
{ // redundant braces detected / remove them
int i=g_curArgTypeName.findRev('('),l=g_curArgTypeName.length();
if (i!=-1)
g_curArgTypeName=g_curArgTypeName.left(i)+
g_curArgTypeName.right(l-i-1);
g_curArgTypeName+=g_curArgName;
BEGIN( ReadFuncArgType );
}
YY_BREAK
case 14:
YY_RULE_SETUP
#line 188 "defargs.l"
{ // handle operators in defargs
g_curArgTypeName+=defargsYYtext;
}
YY_BREAK
case 15:
YY_RULE_SETUP
#line 191 "defargs.l"
{
if (YY_START==ReadFuncArgType)
{
g_curArgTypeName+=*defargsYYtext;
g_copyArgValue=&g_curArgTypeName;
}
else // YY_START==ReadFuncArgDef
{
g_curArgDefValue+=*defargsYYtext;
g_copyArgValue=&g_curArgDefValue;
}
g_readArgContext = YY_START;
if (*defargsYYtext=='(')
{
g_argRoundCount=0;
BEGIN( CopyArgRound );
}
else if (*defargsYYtext=='{')
{
g_argCurlyCount=0;
BEGIN( CopyArgCurly );
}
else // defargsYYtext=='<'
{
g_argSharpCount=0;
g_argRoundCount=0;
BEGIN( CopyArgSharp );
}
}
YY_BREAK
case 16:
YY_RULE_SETUP
#line 220 "defargs.l"
{
g_argRoundCount++;
*g_copyArgValue += *defargsYYtext;
}
YY_BREAK
case 17:
YY_RULE_SETUP
#line 224 "defargs.l"
{
*g_copyArgValue += defargsYYtext;
if (g_argRoundCount>0)
{
g_argRoundCount--;
}
else
{
if (YY_START==CopyArgRound2)
{
*g_copyArgValue+=" "+g_curArgName;
}
BEGIN( g_readArgContext );
}
}
YY_BREAK
case 18:
*yy_cp = (yy_hold_char); /* undo effects of setting up defargsYYtext */
(yy_c_buf_p) = yy_cp = yy_bp + 1;
YY_DO_BEFORE_ACTION; /* set up defargsYYtext again */
YY_RULE_SETUP
#line 239 "defargs.l"
{
*g_copyArgValue += *defargsYYtext;
if (g_argRoundCount>0) g_argRoundCount--;
else BEGIN( g_readArgContext );
}
YY_BREAK
case 19:
YY_RULE_SETUP
#line 244 "defargs.l"
{
if (g_argRoundCount>0)
{
*g_copyArgValue += defargsYYtext;
}
else
{
REJECT;
}
}
YY_BREAK
case 20:
YY_RULE_SETUP
#line 254 "defargs.l"
{
if (g_argRoundCount>0)
{
*g_copyArgValue += defargsYYtext;
}
else
{
REJECT;
}
}
YY_BREAK
case 21:
YY_RULE_SETUP
#line 264 "defargs.l"
{
g_argSharpCount++;
*g_copyArgValue += *defargsYYtext;
}
YY_BREAK
case 22:
YY_RULE_SETUP
#line 268 "defargs.l"
{
*g_copyArgValue += *defargsYYtext;
if (g_argSharpCount>0) g_argSharpCount--;
else BEGIN( g_readArgContext );
}
YY_BREAK
case 23:
YY_RULE_SETUP
#line 273 "defargs.l"
{
g_argRoundCount++;
*g_copyArgValue += *defargsYYtext;
}
YY_BREAK
case 24:
YY_RULE_SETUP
#line 277 "defargs.l"
{
g_argRoundCount--;
*g_copyArgValue += *defargsYYtext;
}
YY_BREAK
case 25:
YY_RULE_SETUP
#line 281 "defargs.l"
{
g_argCurlyCount++;
*g_copyArgValue += *defargsYYtext;
}
YY_BREAK
case 26:
YY_RULE_SETUP
#line 285 "defargs.l"
{
*g_copyArgValue += *defargsYYtext;
if (g_argCurlyCount>0) g_argCurlyCount--;
else BEGIN( g_readArgContext );
}
YY_BREAK
case 27:
YY_RULE_SETUP
#line 290 "defargs.l"
{
g_curArgDefValue+=defargsYYtext;
}
YY_BREAK
case 28:
/* rule 28 can match eol */
YY_RULE_SETUP
#line 293 "defargs.l"
{
g_curArgDefValue+=defargsYYtext;
QCString delimiter = defargsYYtext+1;
delimiter=delimiter.left(delimiter.length()-1);
if (delimiter==g_delimiter)
{
BEGIN( ReadFuncArgDef );
}
}
YY_BREAK
case 29:
YY_RULE_SETUP
#line 302 "defargs.l"
{
g_curArgDefValue+=*defargsYYtext;
BEGIN( ReadFuncArgDef );
}
YY_BREAK
case 30:
YY_RULE_SETUP
#line 306 "defargs.l"
{
BEGIN( ReadFuncArgDef );
}
YY_BREAK
case 31:
YY_RULE_SETUP
#line 309 "defargs.l"
{
g_lastDocContext=YY_START;
g_lastDocChar=*defargsYYtext;
QCString text=defargsYYtext;
if (text.find("//")!=-1)
BEGIN( ReadDocLine );
else
BEGIN( ReadDocBlock );
}
YY_BREAK
case 32:
YY_RULE_SETUP
#line 318 "defargs.l"
{
if (*defargsYYtext==')' && g_curArgTypeName.stripWhiteSpace().isEmpty())
{
g_curArgTypeName+=*defargsYYtext;
BEGIN(FuncQual);
}
else
{
g_curArgTypeName=removeRedundantWhiteSpace(g_curArgTypeName);
g_curArgDefValue=g_curArgDefValue.stripWhiteSpace();
//printf("curArgType=`%s' curArgDefVal=`%s'\n",g_curArgTypeName.data(),g_curArgDefValue.data());
int l=g_curArgTypeName.length();
if (l>0)
{
int i=l-1;
while (i>=0 && (isspace((uchar)g_curArgTypeName.at(i)) || g_curArgTypeName.at(i)=='.')) i--;
while (i>=0 && (isId(g_curArgTypeName.at(i)) || g_curArgTypeName.at(i)=='$')) i--;
Argument *a = new Argument;
a->attrib = g_curArgAttrib.copy();
//printf("a->type=%s a->name=%s i=%d l=%d\n",
// a->type.data(),a->name.data(),i,l);
a->array.resize(0);
if (i==l-1 && g_curArgTypeName.at(i)==')') // function argument
{
int bi=g_curArgTypeName.find('(');
int fi=bi-1;
//printf("func arg fi=%d\n",fi);
while (fi>=0 && isId(g_curArgTypeName.at(fi))) fi--;
if (fi>=0)
{
a->type = g_curArgTypeName.left(fi+1);
a->name = g_curArgTypeName.mid(fi+1,bi-fi-1).stripWhiteSpace();
a->array = g_curArgTypeName.right(l-bi);
}
else
{
a->type = g_curArgTypeName;
}
}
else if (i>=0 && g_curArgTypeName.at(i)!=':')
{ // type contains a name
a->type = removeRedundantWhiteSpace(g_curArgTypeName.left(i+1)).stripWhiteSpace();
a->name = g_curArgTypeName.right(l-i-1).stripWhiteSpace();
// if the type becomes a type specifier only then we make a mistake
// and need to correct it to avoid seeing a nameless parameter
// "struct A" as a parameter with type "struct" and name "A".
int sv=0;
if (a->type.left(6)=="const ") sv=6;
else if (a->type.left(9)=="volatile ") sv=9;
if (a->type.mid(sv)=="struct" ||
a->type.mid(sv)=="union" ||
a->type.mid(sv)=="class" ||
a->type.mid(sv)=="typename" ||
a->type=="const" ||
a->type=="volatile"
)
{
a->type = a->type + " " + a->name;
a->name.resize(0);
}
//printf(" --> a->type='%s'\n",a->type.data());
}
else // assume only the type was specified, try to determine name later
{
a->type = removeRedundantWhiteSpace(g_curArgTypeName);
}
if (!a->type.isEmpty() && a->type.at(0)=='$') // typeless PHP name?
{
a->name = a->type;
a->type = "";
}
a->array += removeRedundantWhiteSpace(g_curArgArray);
//printf("array=%s\n",a->array.data());
int alen = a->array.length();
if (alen>2 && a->array.at(0)=='(' &&
a->array.at(alen-1)==')') // fix-up for int *(a[10])
{
int i=a->array.find('[')-1;
a->array = a->array.mid(1,alen-2);
if (i>0 && a->name.isEmpty())
{
a->name = a->array.left(i).stripWhiteSpace();
a->array = a->array.mid(i);
}
}
a->defval = g_curArgDefValue.copy();
//printf("a->type=%s a->name=%s a->defval=\"%s\"\n",a->type.data(),a->name.data(),a->defval.data());
a->docs = g_curArgDocs.stripWhiteSpace();
//printf("Argument `%s' `%s' adding docs=`%s'\n",a->type.data(),a->name.data(),a->docs.data());
g_argList->append(a);
}
g_curArgAttrib.resize(0);
g_curArgTypeName.resize(0);
g_curArgDefValue.resize(0);
g_curArgArray.resize(0);
g_curArgDocs.resize(0);
if (*defargsYYtext==')')
{
BEGIN(FuncQual);
//printf(">>> end of argument list\n");
}
else
{
BEGIN( ReadFuncArgType );
}
}
}
YY_BREAK
case 33:
YY_RULE_SETUP
#line 427 "defargs.l"
{
QCString name=defargsYYtext; //resolveDefines(defargsYYtext);
if (YY_START==ReadFuncArgType && g_curArgArray=="[]") // Java style array
{
g_curArgTypeName+=" []";
g_curArgArray.resize(0);
}
//printf("resolveName `%s'->`%s'\n",defargsYYtext,name.data());
g_curArgTypeName+=name;
}
YY_BREAK
case 34:
YY_RULE_SETUP
#line 437 "defargs.l"
{
g_curArgTypeName+=*defargsYYtext;
}
YY_BREAK
case 35:
YY_RULE_SETUP
#line 441 "defargs.l"
{
g_curArgDefValue+=defargsYYtext;
}
YY_BREAK
case 36:
YY_RULE_SETUP
#line 444 "defargs.l"
{
g_curArgDefValue+=*defargsYYtext;
}
YY_BREAK
case 37:
YY_RULE_SETUP
#line 447 "defargs.l"
{
QCString name=defargsYYtext; //resolveDefines(defargsYYtext);
*g_copyArgValue+=name;
}
YY_BREAK
case 38:
YY_RULE_SETUP
#line 451 "defargs.l"
{
*g_copyArgValue += *defargsYYtext;
}
YY_BREAK
case 39:
YY_RULE_SETUP
#line 454 "defargs.l"
{
g_argList->constSpecifier=TRUE;
}
YY_BREAK
case 40:
YY_RULE_SETUP
#line 457 "defargs.l"
{
g_argList->volatileSpecifier=TRUE;
}
YY_BREAK
case 41:
YY_RULE_SETUP
#line 460 "defargs.l"
{
g_argList->pureSpecifier=TRUE;
BEGIN(FuncQual);
}
YY_BREAK
case 42:
YY_RULE_SETUP
#line 464 "defargs.l"
{ // C++11 trailing return type
g_argList->trailingReturnType=" -> ";
BEGIN(TrailingReturn);
}
YY_BREAK
case 43:
*yy_cp = (yy_hold_char); /* undo effects of setting up defargsYYtext */
(yy_c_buf_p) = yy_cp = yy_bp + 1;
YY_DO_BEFORE_ACTION; /* set up defargsYYtext again */
YY_RULE_SETUP
#line 468 "defargs.l"
{
unput(*defargsYYtext);
BEGIN(FuncQual);
}
YY_BREAK
case 44:
YY_RULE_SETUP
#line 472 "defargs.l"
{
g_argList->trailingReturnType+=defargsYYtext;
}
YY_BREAK
case 45:
/* rule 45 can match eol */
YY_RULE_SETUP
#line 475 "defargs.l"
{
g_argList->trailingReturnType+=defargsYYtext;
}
YY_BREAK
case 46:
/* rule 46 can match eol */
YY_RULE_SETUP
#line 478 "defargs.l"
{ // for functions returning a pointer to an array,
// i.e. ")[]" in "int (*f(int))[4]" with argsString="(int))[4]"
g_extraTypeChars=defargsYYtext;
}
YY_BREAK
case 47:
YY_RULE_SETUP
#line 482 "defargs.l"
{
g_curArgDocs+=defargsYYtext;
}
YY_BREAK
case 48:
YY_RULE_SETUP
#line 485 "defargs.l"
{
g_curArgDocs+=defargsYYtext;
}
YY_BREAK
case 49:
YY_RULE_SETUP
#line 488 "defargs.l"
{
if (g_lastDocChar!=0)
unput(g_lastDocChar);
BEGIN(g_lastDocContext);
}
YY_BREAK
case 50:
/* rule 50 can match eol */
YY_RULE_SETUP
#line 493 "defargs.l"
{
if (g_lastDocChar!=0)
unput(g_lastDocChar);
BEGIN(g_lastDocContext);
}
YY_BREAK
case 51:
/* rule 51 can match eol */
YY_RULE_SETUP
#line 498 "defargs.l"
{
g_curArgDocs+=*defargsYYtext;
}
YY_BREAK
case 52:
YY_RULE_SETUP
#line 501 "defargs.l"
{
g_curArgDocs+=*defargsYYtext;
}
YY_BREAK
case 53:
YY_RULE_SETUP
#line 504 "defargs.l"
{
g_lastDocContext=YY_START;
g_lastDocChar=0;
if (defargsYYtext[1]=='/')
BEGIN( ReadDocLine );
else
BEGIN( ReadDocBlock );
}
YY_BREAK
case 54:
/* rule 54 can match eol */
YY_RULE_SETUP
#line 512 "defargs.l"
YY_BREAK
case 55:
YY_RULE_SETUP
#line 513 "defargs.l"
YY_BREAK
case 56:
YY_RULE_SETUP
#line 515 "defargs.l"
ECHO;
YY_BREAK
#line 1867 "<stdout>"
case YY_STATE_EOF(INITIAL):
case YY_STATE_EOF(Start):
case YY_STATE_EOF(CopyArgString):
case YY_STATE_EOF(CopyRawString):
case YY_STATE_EOF(CopyArgRound):
case YY_STATE_EOF(CopyArgRound2):
case YY_STATE_EOF(CopyArgSharp):
case YY_STATE_EOF(CopyArgCurly):
case YY_STATE_EOF(ReadFuncArgType):
case YY_STATE_EOF(ReadFuncArgDef):
case YY_STATE_EOF(ReadFuncArgPtr):
case YY_STATE_EOF(FuncQual):
case YY_STATE_EOF(ReadDocBlock):
case YY_STATE_EOF(ReadDocLine):
case YY_STATE_EOF(TrailingReturn):
yyterminate();
case YY_END_OF_BUFFER:
{
/* Amount of text matched not including the EOB char. */
int yy_amount_of_matched_text = (int) (yy_cp - (yytext_ptr)) - 1;
/* Undo the effects of YY_DO_BEFORE_ACTION. */
*yy_cp = (yy_hold_char);
YY_RESTORE_YY_MORE_OFFSET
if ( YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_NEW )
{
/* We're scanning a new file or input source. It's
* possible that this happened because the user
* just pointed defargsYYin at a new source and called
* defargsYYlex(). If so, then we have to assure
* consistency between YY_CURRENT_BUFFER and our
* globals. Here is the right place to do so, because
* this is the first action (other than possibly a
* back-up) that will match for the new input source.
*/
(yy_n_chars) = YY_CURRENT_BUFFER_LVALUE->yy_n_chars;
YY_CURRENT_BUFFER_LVALUE->yy_input_file = defargsYYin;
YY_CURRENT_BUFFER_LVALUE->yy_buffer_status = YY_BUFFER_NORMAL;
}
/* Note that here we test for yy_c_buf_p "<=" to the position
* of the first EOB in the buffer, since yy_c_buf_p will
* already have been incremented past the NUL character
* (since all states make transitions on EOB to the
* end-of-buffer state). Contrast this with the test
* in input().
*/
if ( (yy_c_buf_p) <= &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] )
{ /* This was really a NUL. */
yy_state_type yy_next_state;
(yy_c_buf_p) = (yytext_ptr) + yy_amount_of_matched_text;
yy_current_state = yy_get_previous_state( );
/* Okay, we're now positioned to make the NUL
* transition. We couldn't have
* yy_get_previous_state() go ahead and do it
* for us because it doesn't know how to deal
* with the possibility of jamming (and we don't
* want to build jamming into it because then it
* will run more slowly).
*/
yy_next_state = yy_try_NUL_trans( yy_current_state );
yy_bp = (yytext_ptr) + YY_MORE_ADJ;
if ( yy_next_state )
{
/* Consume the NUL. */
yy_cp = ++(yy_c_buf_p);
yy_current_state = yy_next_state;
goto yy_match;
}
else
{
yy_cp = (yy_c_buf_p);
goto yy_find_action;
}
}
else switch ( yy_get_next_buffer( ) )
{
case EOB_ACT_END_OF_FILE:
{
(yy_did_buffer_switch_on_eof) = 0;
if ( defargsYYwrap( ) )
{
/* Note: because we've taken care in
* yy_get_next_buffer() to have set up
* defargsYYtext, we can now set up
* yy_c_buf_p so that if some total
* hoser (like flex itself) wants to
* call the scanner after we return the
* YY_NULL, it'll still work - another
* YY_NULL will get returned.
*/
(yy_c_buf_p) = (yytext_ptr) + YY_MORE_ADJ;
yy_act = YY_STATE_EOF(YY_START);
goto do_action;
}
else
{
if ( ! (yy_did_buffer_switch_on_eof) )
YY_NEW_FILE;
}
break;
}
case EOB_ACT_CONTINUE_SCAN:
(yy_c_buf_p) =
(yytext_ptr) + yy_amount_of_matched_text;
yy_current_state = yy_get_previous_state( );
yy_cp = (yy_c_buf_p);
yy_bp = (yytext_ptr) + YY_MORE_ADJ;
goto yy_match;
case EOB_ACT_LAST_MATCH:
(yy_c_buf_p) =
&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)];
yy_current_state = yy_get_previous_state( );
yy_cp = (yy_c_buf_p);
yy_bp = (yytext_ptr) + YY_MORE_ADJ;
goto yy_find_action;
}
break;
}
default:
YY_FATAL_ERROR(
"fatal flex scanner internal error--no action found" );
} /* end of action switch */
} /* end of scanning one token */
} /* end of defargsYYlex */
/* yy_get_next_buffer - try to read in a new buffer
*
* Returns a code representing an action:
* EOB_ACT_LAST_MATCH -
* EOB_ACT_CONTINUE_SCAN - continue scanning from current position
* EOB_ACT_END_OF_FILE - end of file
*/
static int yy_get_next_buffer (void)
{
register char *dest = YY_CURRENT_BUFFER_LVALUE->yy_ch_buf;
register char *source = (yytext_ptr);
register int number_to_move, i;
int ret_val;
if ( (yy_c_buf_p) > &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars) + 1] )
YY_FATAL_ERROR(
"fatal flex scanner internal error--end of buffer missed" );
if ( YY_CURRENT_BUFFER_LVALUE->yy_fill_buffer == 0 )
{ /* Don't try to fill the buffer, so this is an EOF. */
if ( (yy_c_buf_p) - (yytext_ptr) - YY_MORE_ADJ == 1 )
{
/* We matched a single character, the EOB, so
* treat this as a final EOF.
*/
return EOB_ACT_END_OF_FILE;
}
else
{
/* We matched some text prior to the EOB, first
* process it.
*/
return EOB_ACT_LAST_MATCH;
}
}
/* Try to read more data. */
/* First move last chars to start of buffer. */<|fim▁hole|> number_to_move = (int) ((yy_c_buf_p) - (yytext_ptr)) - 1;
for ( i = 0; i < number_to_move; ++i )
*(dest++) = *(source++);
if ( YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_EOF_PENDING )
/* don't do the read, it's not guaranteed to return an EOF,
* just force an EOF
*/
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars) = 0;
else
{
int num_to_read =
YY_CURRENT_BUFFER_LVALUE->yy_buf_size - number_to_move - 1;
while ( num_to_read <= 0 )
{ /* Not enough room in the buffer - grow it. */
YY_FATAL_ERROR(
"input buffer overflow, can't enlarge buffer because scanner uses REJECT" );
}
if ( num_to_read > YY_READ_BUF_SIZE )
num_to_read = YY_READ_BUF_SIZE;
/* Read in more data. */
YY_INPUT( (&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[number_to_move]),
(yy_n_chars), (size_t) num_to_read );
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars);
}
if ( (yy_n_chars) == 0 )
{
if ( number_to_move == YY_MORE_ADJ )
{
ret_val = EOB_ACT_END_OF_FILE;
defargsYYrestart(defargsYYin );
}
else
{
ret_val = EOB_ACT_LAST_MATCH;
YY_CURRENT_BUFFER_LVALUE->yy_buffer_status =
YY_BUFFER_EOF_PENDING;
}
}
else
ret_val = EOB_ACT_CONTINUE_SCAN;
if ((yy_size_t) ((yy_n_chars) + number_to_move) > YY_CURRENT_BUFFER_LVALUE->yy_buf_size) {
/* Extend the array by 50%, plus the number we really need. */
yy_size_t new_size = (yy_n_chars) + number_to_move + ((yy_n_chars) >> 1);
YY_CURRENT_BUFFER_LVALUE->yy_ch_buf = (char *) defargsYYrealloc((void *) YY_CURRENT_BUFFER_LVALUE->yy_ch_buf,new_size );
if ( ! YY_CURRENT_BUFFER_LVALUE->yy_ch_buf )
YY_FATAL_ERROR( "out of dynamic memory in yy_get_next_buffer()" );
}
(yy_n_chars) += number_to_move;
YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] = YY_END_OF_BUFFER_CHAR;
YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars) + 1] = YY_END_OF_BUFFER_CHAR;
(yytext_ptr) = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[0];
return ret_val;
}
/* yy_get_previous_state - get the state just before the EOB char was reached */
static yy_state_type yy_get_previous_state (void)
{
register yy_state_type yy_current_state;
register char *yy_cp;
yy_current_state = (yy_start);
(yy_state_ptr) = (yy_state_buf);
*(yy_state_ptr)++ = yy_current_state;
for ( yy_cp = (yytext_ptr) + YY_MORE_ADJ; yy_cp < (yy_c_buf_p); ++yy_cp )
{
register YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1);
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
if ( yy_current_state >= 243 )
yy_c = yy_meta[(unsigned int) yy_c];
}
yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
*(yy_state_ptr)++ = yy_current_state;
}
return yy_current_state;
}
/* yy_try_NUL_trans - try to make a transition on the NUL character
*
* synopsis
* next_state = yy_try_NUL_trans( current_state );
*/
static yy_state_type yy_try_NUL_trans (yy_state_type yy_current_state )
{
register int yy_is_jam;
register YY_CHAR yy_c = 1;
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
if ( yy_current_state >= 243 )
yy_c = yy_meta[(unsigned int) yy_c];
}
yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
yy_is_jam = (yy_current_state == 242);
if ( ! yy_is_jam )
*(yy_state_ptr)++ = yy_current_state;
return yy_is_jam ? 0 : yy_current_state;
}
static void yyunput (int c, register char * yy_bp )
{
register char *yy_cp;
yy_cp = (yy_c_buf_p);
/* undo effects of setting up defargsYYtext */
*yy_cp = (yy_hold_char);
if ( yy_cp < YY_CURRENT_BUFFER_LVALUE->yy_ch_buf + 2 )
{ /* need to shift things up to make room */
/* +2 for EOB chars. */
register int number_to_move = (yy_n_chars) + 2;
register char *dest = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[
YY_CURRENT_BUFFER_LVALUE->yy_buf_size + 2];
register char *source =
&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[number_to_move];
while ( source > YY_CURRENT_BUFFER_LVALUE->yy_ch_buf )
*--dest = *--source;
yy_cp += (int) (dest - source);
yy_bp += (int) (dest - source);
YY_CURRENT_BUFFER_LVALUE->yy_n_chars =
(yy_n_chars) = YY_CURRENT_BUFFER_LVALUE->yy_buf_size;
if ( yy_cp < YY_CURRENT_BUFFER_LVALUE->yy_ch_buf + 2 )
YY_FATAL_ERROR( "flex scanner push-back overflow" );
}
*--yy_cp = (char) c;
(yytext_ptr) = yy_bp;
(yy_hold_char) = *yy_cp;
(yy_c_buf_p) = yy_cp;
}
#ifndef YY_NO_INPUT
#ifdef __cplusplus
static int yyinput (void)
#else
static int input (void)
#endif
{
int c;
*(yy_c_buf_p) = (yy_hold_char);
if ( *(yy_c_buf_p) == YY_END_OF_BUFFER_CHAR )
{
/* yy_c_buf_p now points to the character we want to return.
* If this occurs *before* the EOB characters, then it's a
* valid NUL; if not, then we've hit the end of the buffer.
*/
if ( (yy_c_buf_p) < &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] )
/* This was really a NUL. */
*(yy_c_buf_p) = '\0';
else
{ /* need more input */
int offset = (yy_c_buf_p) - (yytext_ptr);
++(yy_c_buf_p);
switch ( yy_get_next_buffer( ) )
{
case EOB_ACT_LAST_MATCH:
/* This happens because yy_g_n_b()
* sees that we've accumulated a
* token and flags that we need to
* try matching the token before
* proceeding. But for input(),
* there's no matching to consider.
* So convert the EOB_ACT_LAST_MATCH
* to EOB_ACT_END_OF_FILE.
*/
/* Reset buffer status. */
defargsYYrestart(defargsYYin );
/*FALLTHROUGH*/
case EOB_ACT_END_OF_FILE:
{
if ( defargsYYwrap( ) )
return EOF;
if ( ! (yy_did_buffer_switch_on_eof) )
YY_NEW_FILE;
#ifdef __cplusplus
return yyinput();
#else
return input();
#endif
}
case EOB_ACT_CONTINUE_SCAN:
(yy_c_buf_p) = (yytext_ptr) + offset;
break;
}
}
}
c = *(unsigned char *) (yy_c_buf_p); /* cast for 8-bit char's */
*(yy_c_buf_p) = '\0'; /* preserve defargsYYtext */
(yy_hold_char) = *++(yy_c_buf_p);
return c;
}
#endif /* ifndef YY_NO_INPUT */
/** Immediately switch to a different input stream.
* @param input_file A readable stream.
*
* @note This function does not reset the start condition to @c INITIAL .
*/
void defargsYYrestart (FILE * input_file )
{
if ( ! YY_CURRENT_BUFFER ){
defargsYYensure_buffer_stack ();
YY_CURRENT_BUFFER_LVALUE =
defargsYY_create_buffer(defargsYYin,YY_BUF_SIZE );
}
defargsYY_init_buffer(YY_CURRENT_BUFFER,input_file );
defargsYY_load_buffer_state( );
}
/** Switch to a different input buffer.
* @param new_buffer The new input buffer.
*
*/
void defargsYY_switch_to_buffer (YY_BUFFER_STATE new_buffer )
{
/* TODO. We should be able to replace this entire function body
* with
* defargsYYpop_buffer_state();
* defargsYYpush_buffer_state(new_buffer);
*/
defargsYYensure_buffer_stack ();
if ( YY_CURRENT_BUFFER == new_buffer )
return;
if ( YY_CURRENT_BUFFER )
{
/* Flush out information for old buffer. */
*(yy_c_buf_p) = (yy_hold_char);
YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = (yy_c_buf_p);
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars);
}
YY_CURRENT_BUFFER_LVALUE = new_buffer;
defargsYY_load_buffer_state( );
/* We don't actually know whether we did this switch during
* EOF (defargsYYwrap()) processing, but the only time this flag
* is looked at is after defargsYYwrap() is called, so it's safe
* to go ahead and always set it.
*/
(yy_did_buffer_switch_on_eof) = 1;
}
static void defargsYY_load_buffer_state (void)
{
(yy_n_chars) = YY_CURRENT_BUFFER_LVALUE->yy_n_chars;
(yytext_ptr) = (yy_c_buf_p) = YY_CURRENT_BUFFER_LVALUE->yy_buf_pos;
defargsYYin = YY_CURRENT_BUFFER_LVALUE->yy_input_file;
(yy_hold_char) = *(yy_c_buf_p);
}
/** Allocate and initialize an input buffer state.
* @param file A readable stream.
* @param size The character buffer size in bytes. When in doubt, use @c YY_BUF_SIZE.
*
* @return the allocated buffer state.
*/
YY_BUFFER_STATE defargsYY_create_buffer (FILE * file, int size )
{
YY_BUFFER_STATE b;
b = (YY_BUFFER_STATE) defargsYYalloc(sizeof( struct yy_buffer_state ) );
if ( ! b )
YY_FATAL_ERROR( "out of dynamic memory in defargsYY_create_buffer()" );
b->yy_buf_size = size;
/* yy_ch_buf has to be 2 characters longer than the size given because
* we need to put in 2 end-of-buffer characters.
*/
b->yy_ch_buf = (char *) defargsYYalloc(b->yy_buf_size + 2 );
if ( ! b->yy_ch_buf )
YY_FATAL_ERROR( "out of dynamic memory in defargsYY_create_buffer()" );
b->yy_is_our_buffer = 1;
defargsYY_init_buffer(b,file );
return b;
}
/** Destroy the buffer.
* @param b a buffer created with defargsYY_create_buffer()
*
*/
void defargsYY_delete_buffer (YY_BUFFER_STATE b )
{
if ( ! b )
return;
if ( b == YY_CURRENT_BUFFER ) /* Not sure if we should pop here. */
YY_CURRENT_BUFFER_LVALUE = (YY_BUFFER_STATE) 0;
if ( b->yy_is_our_buffer )
defargsYYfree((void *) b->yy_ch_buf );
defargsYYfree((void *) b );
}
#ifndef __cplusplus
extern int isatty (int );
#endif /* __cplusplus */
/* Initializes or reinitializes a buffer.
* This function is sometimes called more than once on the same buffer,
* such as during a defargsYYrestart() or at EOF.
*/
static void defargsYY_init_buffer (YY_BUFFER_STATE b, FILE * file )
{
int oerrno = errno;
defargsYY_flush_buffer(b );
b->yy_input_file = file;
b->yy_fill_buffer = 1;
/* If b is the current buffer, then defargsYY_init_buffer was _probably_
* called from defargsYYrestart() or through yy_get_next_buffer.
* In that case, we don't want to reset the lineno or column.
*/
if (b != YY_CURRENT_BUFFER){
b->yy_bs_lineno = 1;
b->yy_bs_column = 0;
}
b->yy_is_interactive = file ? (isatty( fileno(file) ) > 0) : 0;
errno = oerrno;
}
/** Discard all buffered characters. On the next scan, YY_INPUT will be called.
* @param b the buffer state to be flushed, usually @c YY_CURRENT_BUFFER.
*
*/
void defargsYY_flush_buffer (YY_BUFFER_STATE b )
{
if ( ! b )
return;
b->yy_n_chars = 0;
/* We always need two end-of-buffer characters. The first causes
* a transition to the end-of-buffer state. The second causes
* a jam in that state.
*/
b->yy_ch_buf[0] = YY_END_OF_BUFFER_CHAR;
b->yy_ch_buf[1] = YY_END_OF_BUFFER_CHAR;
b->yy_buf_pos = &b->yy_ch_buf[0];
b->yy_at_bol = 1;
b->yy_buffer_status = YY_BUFFER_NEW;
if ( b == YY_CURRENT_BUFFER )
defargsYY_load_buffer_state( );
}
/** Pushes the new state onto the stack. The new state becomes
* the current state. This function will allocate the stack
* if necessary.
* @param new_buffer The new state.
*
*/
void defargsYYpush_buffer_state (YY_BUFFER_STATE new_buffer )
{
if (new_buffer == NULL)
return;
defargsYYensure_buffer_stack();
/* This block is copied from defargsYY_switch_to_buffer. */
if ( YY_CURRENT_BUFFER )
{
/* Flush out information for old buffer. */
*(yy_c_buf_p) = (yy_hold_char);
YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = (yy_c_buf_p);
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars);
}
/* Only push if top exists. Otherwise, replace top. */
if (YY_CURRENT_BUFFER)
(yy_buffer_stack_top)++;
YY_CURRENT_BUFFER_LVALUE = new_buffer;
/* copied from defargsYY_switch_to_buffer. */
defargsYY_load_buffer_state( );
(yy_did_buffer_switch_on_eof) = 1;
}
/** Removes and deletes the top of the stack, if present.
* The next element becomes the new top.
*
*/
void defargsYYpop_buffer_state (void)
{
if (!YY_CURRENT_BUFFER)
return;
defargsYY_delete_buffer(YY_CURRENT_BUFFER );
YY_CURRENT_BUFFER_LVALUE = NULL;
if ((yy_buffer_stack_top) > 0)
--(yy_buffer_stack_top);
if (YY_CURRENT_BUFFER) {
defargsYY_load_buffer_state( );
(yy_did_buffer_switch_on_eof) = 1;
}
}
/* Allocates the stack if it does not exist.
* Guarantees space for at least one push.
*/
static void defargsYYensure_buffer_stack (void)
{
int num_to_alloc;
if (!(yy_buffer_stack)) {
/* First allocation is just for 2 elements, since we don't know if this
* scanner will even need a stack. We use 2 instead of 1 to avoid an
* immediate realloc on the next call.
*/
num_to_alloc = 1;
(yy_buffer_stack) = (struct yy_buffer_state**)defargsYYalloc
(num_to_alloc * sizeof(struct yy_buffer_state*)
);
if ( ! (yy_buffer_stack) )
YY_FATAL_ERROR( "out of dynamic memory in defargsYYensure_buffer_stack()" );
memset((yy_buffer_stack), 0, num_to_alloc * sizeof(struct yy_buffer_state*));
(yy_buffer_stack_max) = num_to_alloc;
(yy_buffer_stack_top) = 0;
return;
}
if ((yy_buffer_stack_top) >= ((yy_buffer_stack_max)) - 1){
/* Increase the buffer to prepare for a possible push. */
int grow_size = 8 /* arbitrary grow size */;
num_to_alloc = (yy_buffer_stack_max) + grow_size;
(yy_buffer_stack) = (struct yy_buffer_state**)defargsYYrealloc
((yy_buffer_stack),
num_to_alloc * sizeof(struct yy_buffer_state*)
);
if ( ! (yy_buffer_stack) )
YY_FATAL_ERROR( "out of dynamic memory in defargsYYensure_buffer_stack()" );
/* zero only the new slots.*/
memset((yy_buffer_stack) + (yy_buffer_stack_max), 0, grow_size * sizeof(struct yy_buffer_state*));
(yy_buffer_stack_max) = num_to_alloc;
}
}
/** Setup the input buffer state to scan directly from a user-specified character buffer.
* @param base the character buffer
* @param size the size in bytes of the character buffer
*
* @return the newly allocated buffer state object.
*/
YY_BUFFER_STATE defargsYY_scan_buffer (char * base, yy_size_t size )
{
YY_BUFFER_STATE b;
if ( size < 2 ||
base[size-2] != YY_END_OF_BUFFER_CHAR ||
base[size-1] != YY_END_OF_BUFFER_CHAR )
/* They forgot to leave room for the EOB's. */
return 0;
b = (YY_BUFFER_STATE) defargsYYalloc(sizeof( struct yy_buffer_state ) );
if ( ! b )
YY_FATAL_ERROR( "out of dynamic memory in defargsYY_scan_buffer()" );
b->yy_buf_size = size - 2; /* "- 2" to take care of EOB's */
b->yy_buf_pos = b->yy_ch_buf = base;
b->yy_is_our_buffer = 0;
b->yy_input_file = 0;
b->yy_n_chars = b->yy_buf_size;
b->yy_is_interactive = 0;
b->yy_at_bol = 1;
b->yy_fill_buffer = 0;
b->yy_buffer_status = YY_BUFFER_NEW;
defargsYY_switch_to_buffer(b );
return b;
}
/** Setup the input buffer state to scan a string. The next call to defargsYYlex() will
* scan from a @e copy of @a str.
* @param yystr a NUL-terminated string to scan
*
* @return the newly allocated buffer state object.
* @note If you want to scan bytes that may contain NUL values, then use
* defargsYY_scan_bytes() instead.
*/
YY_BUFFER_STATE defargsYY_scan_string (yyconst char * yystr )
{
return defargsYY_scan_bytes(yystr,strlen(yystr) );
}
/** Setup the input buffer state to scan the given bytes. The next call to defargsYYlex() will
* scan from a @e copy of @a bytes.
* @param bytes the byte buffer to scan
* @param len the number of bytes in the buffer pointed to by @a bytes.
*
* @return the newly allocated buffer state object.
*/
YY_BUFFER_STATE defargsYY_scan_bytes (yyconst char * yybytes, int _yybytes_len )
{
YY_BUFFER_STATE b;
char *buf;
yy_size_t n;
int i;
/* Get memory for full buffer, including space for trailing EOB's. */
n = _yybytes_len + 2;
buf = (char *) defargsYYalloc(n );
if ( ! buf )
YY_FATAL_ERROR( "out of dynamic memory in defargsYY_scan_bytes()" );
for ( i = 0; i < _yybytes_len; ++i )
buf[i] = yybytes[i];
buf[_yybytes_len] = buf[_yybytes_len+1] = YY_END_OF_BUFFER_CHAR;
b = defargsYY_scan_buffer(buf,n );
if ( ! b )
YY_FATAL_ERROR( "bad buffer in defargsYY_scan_bytes()" );
/* It's okay to grow etc. this buffer, and we should throw it
* away when we're done.
*/
b->yy_is_our_buffer = 1;
return b;
}
#ifndef YY_EXIT_FAILURE
#define YY_EXIT_FAILURE 2
#endif
static void yy_fatal_error (yyconst char* msg )
{
(void) fprintf( stderr, "%s\n", msg );
exit( YY_EXIT_FAILURE );
}
/* Redefine yyless() so it works in section 3 code. */
#undef yyless
#define yyless(n) \
do \
{ \
/* Undo effects of setting up defargsYYtext. */ \
int yyless_macro_arg = (n); \
YY_LESS_LINENO(yyless_macro_arg);\
defargsYYtext[defargsYYleng] = (yy_hold_char); \
(yy_c_buf_p) = defargsYYtext + yyless_macro_arg; \
(yy_hold_char) = *(yy_c_buf_p); \
*(yy_c_buf_p) = '\0'; \
defargsYYleng = yyless_macro_arg; \
} \
while ( 0 )
/* Accessor methods (get/set functions) to struct members. */
/** Get the current line number.
*
*/
int defargsYYget_lineno (void)
{
return defargsYYlineno;
}
/** Get the input stream.
*
*/
FILE *defargsYYget_in (void)
{
return defargsYYin;
}
/** Get the output stream.
*
*/
FILE *defargsYYget_out (void)
{
return defargsYYout;
}
/** Get the length of the current token.
*
*/
int defargsYYget_leng (void)
{
return defargsYYleng;
}
/** Get the current token.
*
*/
char *defargsYYget_text (void)
{
return defargsYYtext;
}
/** Set the current line number.
* @param line_number
*
*/
void defargsYYset_lineno (int line_number )
{
defargsYYlineno = line_number;
}
/** Set the input stream. This does not discard the current
* input buffer.
* @param in_str A readable stream.
*
* @see defargsYY_switch_to_buffer
*/
void defargsYYset_in (FILE * in_str )
{
defargsYYin = in_str ;
}
void defargsYYset_out (FILE * out_str )
{
defargsYYout = out_str ;
}
int defargsYYget_debug (void)
{
return defargsYY_flex_debug;
}
void defargsYYset_debug (int bdebug )
{
defargsYY_flex_debug = bdebug ;
}
static int yy_init_globals (void)
{
/* Initialization is the same as for the non-reentrant scanner.
* This function is called from defargsYYlex_destroy(), so don't allocate here.
*/
(yy_buffer_stack) = 0;
(yy_buffer_stack_top) = 0;
(yy_buffer_stack_max) = 0;
(yy_c_buf_p) = (char *) 0;
(yy_init) = 0;
(yy_start) = 0;
(yy_state_buf) = 0;
(yy_state_ptr) = 0;
(yy_full_match) = 0;
(yy_lp) = 0;
/* Defined in main.c */
#ifdef YY_STDINIT
defargsYYin = stdin;
defargsYYout = stdout;
#else
defargsYYin = (FILE *) 0;
defargsYYout = (FILE *) 0;
#endif
/* For future reference: Set errno on error, since we are called by
* defargsYYlex_init()
*/
return 0;
}
/* defargsYYlex_destroy is for both reentrant and non-reentrant scanners. */
int defargsYYlex_destroy (void)
{
/* Pop the buffer stack, destroying each element. */
while(YY_CURRENT_BUFFER){
defargsYY_delete_buffer(YY_CURRENT_BUFFER );
YY_CURRENT_BUFFER_LVALUE = NULL;
defargsYYpop_buffer_state();
}
/* Destroy the stack itself. */
defargsYYfree((yy_buffer_stack) );
(yy_buffer_stack) = NULL;
defargsYYfree ( (yy_state_buf) );
(yy_state_buf) = NULL;
/* Reset the globals. This is important in a non-reentrant scanner so the next time
* defargsYYlex() is called, initialization will occur. */
yy_init_globals( );
return 0;
}
/*
* Internal utility routines.
*/
#ifndef yytext_ptr
static void yy_flex_strncpy (char* s1, yyconst char * s2, int n )
{
register int i;
for ( i = 0; i < n; ++i )
s1[i] = s2[i];
}
#endif
#ifdef YY_NEED_STRLEN
static int yy_flex_strlen (yyconst char * s )
{
register int n;
for ( n = 0; s[n]; ++n )
;
return n;
}
#endif
void *defargsYYalloc (yy_size_t size )
{
return (void *) malloc( size );
}
void *defargsYYrealloc (void * ptr, yy_size_t size )
{
/* The cast to (char *) in the following accommodates both
* implementations that use char* generic pointers, and those
* that use void* generic pointers. It works with the latter
* because both ANSI C and C++ allow castless assignment from
* any pointer type to void*, and deal with argument conversions
* as though doing an assignment.
*/
return (void *) realloc( (char *) ptr, size );
}
void defargsYYfree (void * ptr )
{
free( (char *) ptr ); /* see defargsYYrealloc() for (char *) cast */
}
#define YYTABLES_NAME "yytables"
#line 515 "defargs.l"
/* ----------------------------------------------------------------------------
*/
/*! Converts an argument string into an ArgumentList.
* \param[in] argsString the list of Arguments.
* \param[out] al a reference to resulting argument list pointer.
* \param[out] extraTypeChars point to string to which trailing characters
* for complex types are written to
*/
void stringToArgumentList(const char *argsString,ArgumentList* al,QCString *extraTypeChars)
{
if (al==0) return;
if (argsString==0) return;
printlex(defargsYY_flex_debug, TRUE, __FILE__, NULL);
g_copyArgValue=0;
g_curArgDocs.resize(0);
g_curArgAttrib.resize(0);
g_curArgArray.resize(0);
g_extraTypeChars.resize(0);
g_argRoundCount = 0;
g_argSharpCount = 0;
g_argCurlyCount = 0;
g_lastDocChar = 0;
g_inputString = argsString;
g_inputPosition = 0;
g_curArgTypeName.resize(0);
g_curArgDefValue.resize(0);
g_curArgName.resize(0);
g_argList = al;
defargsYYrestart( defargsYYin );
BEGIN( Start );
defargsYYlex();
if (extraTypeChars) *extraTypeChars=g_extraTypeChars;
//printf("stringToArgumentList(%s) result=%s\n",argsString,argListToString(al).data());
printlex(defargsYY_flex_debug, FALSE, __FILE__, NULL);
}
#if !defined(YY_FLEX_SUBMINOR_VERSION)
extern "C" { // some bogus code to keep the compiler happy
void defargsYYdummy() { yy_flex_realloc(0,0); }
}
#endif<|fim▁end|> | |
<|file_name|>check.py<|end_file_name|><|fim▁begin|>from importlib import import_module
from inspect import getdoc<|fim▁hole|> print 'Has __all__?', hasattr(mod, '__all__')
print 'Has __doc__?', hasattr(mod, '__doc__')
print 'doc: ', getdoc(mod)
if __name__=='__main__':
attribs('cairo')
attribs('zope')
attribs('A.B.C')
import hacked
class Object(object):
pass
opt = Object()
opt.ignore_errors = False
a, d = hacked.get_all_attr_has_docstr('/home/ali/ws-pydev/apidocfilter/A/B',
'/home/ali/ws-pydev/apidocfilter/A/B/C',
opt)
print(a)
print(d)<|fim▁end|> |
def attribs(name):
mod = import_module(name)
print name |
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|># Copyright 2015, Province of British Columbia
# License: https://github.com/bcgov/ckanext-bcgov/blob/master/license
from ckan.common import c, _
import pylons.config as config
import ckan.lib.base as base
import ckan.plugins as plugins
import ckan.plugins.toolkit as toolkit
from routes.mapper import SubMapper
from ckanext.bcgov.util.util import (get_edc_tags,
edc_type_label,
get_state_values,
get_username,
get_user_orgs,
get_user_role_orgs,
get_user_orgs_id,
get_user_toporgs,
get_organization_branches,
get_all_orgs
)
from ckanext.bcgov.util.helpers import (get_suborg_sector,
get_user_dataset_num,
get_package_data,
is_license_open,
get_record_type_label,
get_suborgs,
record_is_viewable,
get_facets_selected,
get_facets_unselected,
get_sectors_list,
get_dataset_type,
get_organizations,
get_organization_title,
get_espg_id,
get_edc_org,
get_iso_topic_values,
get_eas_login_url,
get_fqdn,
get_environment_name,
get_version,
get_bcgov_commit_id,
resource_prefix,
)
abort = base.abort
class SchemaPlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IConfigurer)
plugins.implements(plugins.IRoutes, inherit=True)
plugins.implements(plugins.ITemplateHelpers, inherit=False)
plugins.implements(plugins.IPackageController, inherit=True)
plugins.implements(plugins.IFacets, inherit=True)
plugins.implements(plugins.IActions, inherit=True)
def get_helpers(self):
return {
"dataset_type" : get_dataset_type,
"edc_tags" : get_edc_tags,
"edc_orgs" : get_organizations,
"edc_org_branches" : get_organization_branches,
"edc_org_title" : get_organization_title,
"edc_type_label" : edc_type_label,
"edc_state_values" : get_state_values,
"edc_username": get_username,
"get_sector" : get_suborg_sector,
"get_user_orgs" : get_user_orgs,
"get_user_orgs_id" : get_user_orgs_id,
"get_user_toporgs": get_user_toporgs,
"get_suborg_sector" : get_suborg_sector,
"get_user_dataset_num" : get_user_dataset_num,
"get_edc_package" : get_package_data,
"is_license_open" : is_license_open,
"record_type_label" : get_record_type_label,
"get_suborgs": get_suborgs,
"record_is_viewable": record_is_viewable,
"get_espg_id" : get_espg_id,
"get_user_role_orgs" : get_user_role_orgs,
"get_all_orgs" : get_all_orgs,
"get_facets_selected": get_facets_selected,
"get_facets_unselected" : get_facets_unselected,
"get_sectors_list": get_sectors_list,
"get_edc_org" : get_edc_org,
"get_iso_topic_values" : get_iso_topic_values,
"get_eas_login_url": get_eas_login_url,
"get_fqdn": get_fqdn,
"get_environment_name": get_environment_name,
"get_version": get_version,
"get_bcgov_commit_id": get_bcgov_commit_id,
"googleanalytics_resource_prefix": resource_prefix,
}
def update_config(self, config):
toolkit.add_public_directory(config, 'public')
toolkit.add_template_directory(config, 'templates')
toolkit.add_resource('fanstatic', 'edc_resource')
toolkit.add_resource('public/scripts', 'theme_scripts')
#Customizing action mapping
def before_map(self, map):
from routes.mapper import SubMapper<|fim▁hole|> user_controller = 'ckanext.bcgov.controllers.user:EDCUserController'
org_controller = 'ckanext.bcgov.controllers.organization:EDCOrganizationController'
site_map_controller = 'ckanext.bcgov.controllers.site_map:GsaSitemapController'
api_controller = 'ckanext.bcgov.controllers.api:EDCApiController'
# map.redirect('/', '/dataset')
map.connect('package_index', '/', controller=package_controller, action='index')
map.connect('/dataset/add', controller=package_controller, action='typeSelect')
with SubMapper(map, controller=package_controller) as m:
m.connect('add dataset', '/dataset/new', action='new')
#m.connect('dataset_edit', '/dataset/edit/{id}', action='edc_edit',ckan_icon='edit')
m.connect('search', '/dataset', action='search', highlight_actions='index search')
m.connect('dataset_read', '/dataset/{id}', action='read', ckan_icon='sitemap')
m.connect('duplicate', '/dataset/duplicate/{id}', action='duplicate')
m.connect('/dataset/{id}/resource/{resource_id}', action='resource_read')
m.connect('/dataset/{id}/resource_delete/{resource_id}', action='resource_delete')
m.connect('/authorization-error', action='auth_error')
m.connect('resource_edit', '/dataset/{id}/resource_edit/{resource_id}', action='resource_edit', ckan_icon='edit')
m.connect('new_resource', '/dataset/new_resource/{id}', action='new_resource')
with SubMapper(map, controller=user_controller) as m:
m.connect('user_dashboard_unpublished', '/dashboard/unpublished',
action='dashboard_unpublished', ckan_icon='group')
m.connect('/user/edit', action='edit')
m.connect('/user/activity/{id}/{offset}', action='activity')
m.connect('user_activity_stream', '/user/activity/{id}',
action='activity', ckan_icon='time')
m.connect('user_dashboard', '/dashboard', action='dashboard',
ckan_icon='list')
m.connect('user_dashboard_datasets', '/dashboard/datasets',
action='dashboard_datasets', ckan_icon='sitemap')
m.connect('user_dashboard_organizations', '/dashboard/organizations',
action='dashboard_organizations', ckan_icon='building')
m.connect('/dashboard/{offset}', action='dashboard')
m.connect('user_follow', '/user/follow/{id}', action='follow')
m.connect('/user/unfollow/{id}', action='unfollow')
m.connect('user_followers', '/user/followers/{id:.*}',
action='followers', ckan_icon='group')
m.connect('user_edit', '/user/edit/{id:.*}', action='edit',
ckan_icon='cog')
m.connect('user_delete', '/user/delete/{id}', action='delete')
m.connect('/user/reset/{id:.*}', action='perform_reset')
m.connect('register', '/user/register', action='register')
m.connect('login', '/user/login', action='login')
m.connect('/user/_logout', action='logout')
m.connect('/user/logged_in', action='logged_in')
m.connect('/user/logged_out', action='logged_out')
m.connect('/user/logged_out_redirect', action='logged_out_page')
m.connect('/user/reset', action='request_reset')
m.connect('/user/me', action='me')
m.connect('/user/set_lang/{lang}', action='set_lang')
m.connect('user_datasets', '/user/{id:.*}', action='read',
ckan_icon='sitemap')
m.connect('user_index', '/user', action='index')
with SubMapper(map, controller=org_controller) as m:
m.connect('organizations_index', '/organization', action='index')
m.connect('/organization/list', action='list')
m.connect('/organization/new', action='new')
m.connect('/organization/{action}/{id}',
requirements=dict(action='|'.join([
'delete',
'admins',
'member_new',
'member_delete',
'history'
])))
m.connect('organization_activity', '/organization/activity/{id}',
action='activity', ckan_icon='time')
m.connect('organization_about', '/organization/about/{id}',
action='about', ckan_icon='info-sign')
m.connect('organization_read', '/organization/{id}', action='read',
ckan_icon='sitemap')
m.connect('organization_edit', '/organization/edit/{id}',
action='edit', ckan_icon='edit')
m.connect('organization_members', '/organization/members/{id}',
action='members', ckan_icon='group')
m.connect('organization_bulk_process',
'/organization/bulk_process/{id}',
action='bulk_process', ckan_icon='sitemap')
map.connect('sitemap','/sitemap.html', controller=site_map_controller, action='view')
map.connect('sitemap','/sitemap.xml', controller=site_map_controller, action='read')
with SubMapper(map, controller=api_controller, path_prefix='/api{ver:/1|/2|/3|}',
ver='/1') as m:
m.connect('/i18n/{lang}', action='i18n_js_translations')
m.connect('/')
GET_POST = dict(method=['GET', 'POST'])
m.connect('/action/organization_list_related', action='organization_list_related', conditions=GET_POST)
m.connect('/action/{logic_function}', action='action', conditions=GET_POST)
map.connect('/admin/trash', controller='admin', action='trash')
map.connect('ckanadmin_trash', '/admin/trash', controller='admin',
action='trash', ckan_icon='trash')
return map
def after_map(self, map):
return map;
def before_index(self, pkg_dict):
'''
Makes the sort by name case insensitive.
Note that the search index must be rebuild for the first time in order for the changes to take affect.
'''
title = pkg_dict['title']
if title:
#Assign title to title_string with all characters switched to lower case.
pkg_dict['title_string'] = title.lower()
res_format = pkg_dict.get('res_format', [])
if 'other' in res_format:
# custom download (other) supports a number of formats
res_format.remove('other')
res_format.extend(['shp', 'fgdb', 'e00'])
return pkg_dict
def before_search(self, search_params):
'''
Customizes package search and applies filters based on the dataset metadata-visibility
and user roles.
'''
#Change the default sort order when no query passed
if not search_params.get('q') and search_params.get('sort') in (None, 'rank'):
search_params['sort'] = 'record_publish_date desc, metadata_modified desc'
#Change the query filter depending on the user
if 'fq' in search_params:
fq = search_params['fq']
else:
fq = ''
#need to append solr param q.op to force an AND query
if 'q' in search_params:
q = search_params['q']
if q !='':
q = '{!lucene q.op=AND}' + q
search_params['q'] = q
else:
q = ''
try :
user_name = c.user or 'visitor'
# There are no restrictions for sysadmin
if c.userobj and c.userobj.sysadmin == True:
fq += ' '
else:
if user_name != 'visitor':
fq += ' +(edc_state:("PUBLISHED" OR "PENDING ARCHIVE")'
#IDIR users can also see private records of their organizations
user_id = c.userobj.id
#Get the list of orgs that the user is an admin or editor of
user_orgs = ['"' + org.id + '"' for org in get_user_orgs(user_id, 'admin')]
user_orgs += ['"' + org.id + '"' for org in get_user_orgs(user_id, 'editor')]
if user_orgs != []:
fq += ' OR ' + 'owner_org:(' + ' OR '.join(user_orgs) + ')'
fq += ')'
#Public user can only view public and published records
else:
fq += ' +(edc_state:("PUBLISHED" OR "PENDING ARCHIVE") AND metadata_visibility:("Public"))'
except Exception:
if 'fq' in search_params:
fq = search_params['fq']
else:
fq = ''
fq += ' +edc_state:("PUBLISHED" OR "PENDING ARCHIVE") +metadata_visibility:("Public")'
search_params['fq'] = fq
return search_params
def before_view(self, pkg_dict):
# CITZEDC808
if not record_is_viewable(pkg_dict, c.userobj):
abort(401, _('Unauthorized to read package %s') % pkg_dict.get("title"))
return pkg_dict
def dataset_facets(self, facet_dict, package_type):
'''
Customizes search facet list.
'''
from collections import OrderedDict
facet_dict = OrderedDict()
#Add dataset types and organization sectors to the facet list
facet_dict['license_id'] = _('License')
facet_dict['sector'] = _('Sectors')
facet_dict['type'] = _('Dataset types')
facet_dict['res_format'] = _('Format')
facet_dict['organization'] = _('Organizations')
facet_dict['download_audience'] = _('Download permission')
if c.userobj and c.userobj.sysadmin:
facet_dict['edc_state'] = _('States')
return facet_dict
def group_facets(self, facet_dict, group_type, package_type):
'''
Use the same facets for filtering datasets within group pages
'''
return self.dataset_facets(facet_dict, package_type)
def get_actions(self):
import ckanext.bcgov.logic.action as edc_action
return {'edc_package_update' : edc_action.edc_package_update,
'edc_package_update_bcgw' : edc_action.edc_package_update_bcgw,
'package_update' : edc_action.package_update,
'package_autocomplete' : edc_action.package_autocomplete }
class EDCDisqusPlugin(plugins.SingletonPlugin):
# Declare that this class implements IConfigurer.
plugins.implements(plugins.IConfigurer)
# Declare that this plugin will implement ITemplateHelpers.
plugins.implements(plugins.ITemplateHelpers)
plugins.implements(plugins.IRoutes, inherit=True)
def update_config(self, config):
# Add this plugin's templates dir to CKAN's extra_template_paths, so
# that CKAN will use this plugin's custom templates.
# 'templates' is the path to the templates dir, relative to this
# plugin.py file.
toolkit.add_template_directory(config, 'templates')
# Add this plugin's public dir to CKAN's extra_public_paths, so
# that CKAN will use this plugin's custom static files.
toolkit.add_public_directory(config, 'public')
toolkit.add_resource('fanstatic', 'edc_resource')
def before_map(self, map):
disqus_controller = 'ckanext.bcgov.controllers.disqus:DisqusController'
with SubMapper(map, controller=disqus_controller) as m:
m.connect('/disqus/posts/create', action='disqusPostCreate')
with SubMapper(map, controller=disqus_controller) as m:
m.connect('/disqus/threads/get', action='disqusGetThread')
with SubMapper(map, controller=disqus_controller) as m:
m.connect('/disqus/posts/list', action='disqusGetPostsByThread')
return map
def comments_block(self):
''' Adds Disqus Comments to the page.'''
# we need to create an identifier
c = plugins.toolkit.c
identifier = ''
try:
if c.current_package_id:
identifier = c.current_package_id
elif c.id:
identifier = c.id
else:
# cannot make an identifier
identifier = ''
except:
identifier = ''
data = {'identifier' : identifier, 'site_url': config.get('ckan.site_url') }
return plugins.toolkit.render_snippet('package/comments_block.html', data)
def disqus_get_forum_name(self):
return config.get('edcdisqus.forum_name') or 'bccatalogue'
def get_helpers(self):
return { 'comments_block' : self.comments_block, 'disqus_get_forum_name': self.disqus_get_forum_name }<|fim▁end|> | package_controller = 'ckanext.bcgov.controllers.package:EDCPackageController' |
<|file_name|>WEDInterface.cpp<|end_file_name|><|fim▁begin|>/* Copyright (c) 2002-2012 Croteam Ltd.
This program is free software; you can redistribute it and/or modify
it under the terms of version 2 of the GNU General Public License as published by
the Free Software Foundation
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */
#include "StdAfx.h"
extern CGame *_pGame;
extern INDEX gam_iQuickStartDifficulty;
extern INDEX gam_iQuickStartMode;
extern INDEX gam_iStartDifficulty;
extern INDEX gam_iStartMode;
// initialize game and load settings
void CGame::Initialize(const CTFileName &fnGameSettings)
{
gm_fnSaveFileName = fnGameSettings;
InitInternal();
}
// save settings and cleanup
void CGame::End(void)
{
EndInternal();
}
// automaticaly manage input enable/disable toggling
static BOOL _bInputEnabled = FALSE;
void UpdateInputEnabledState(CViewPort *pvp)
{
// input should be enabled if application is active
// and no menu is active and no console is active
BOOL bShouldBeEnabled = _pGame->gm_csConsoleState==CS_OFF && _pGame->gm_csComputerState==CS_OFF;
// if should be turned off
if (!bShouldBeEnabled && _bInputEnabled) {
// disable it
_pInput->DisableInput();
// remember new state
_bInputEnabled = FALSE;
}
// if should be turned on
if (bShouldBeEnabled && !_bInputEnabled) {
// enable it
_pInput->EnableInput(pvp);
// remember new state
_bInputEnabled = TRUE;
}
}
// automaticaly manage pause toggling<|fim▁hole|>static void UpdatePauseState(void)
{
BOOL bShouldPause =
_pGame->gm_csConsoleState ==CS_ON || _pGame->gm_csConsoleState ==CS_TURNINGON || _pGame->gm_csConsoleState ==CS_TURNINGOFF ||
_pGame->gm_csComputerState==CS_ON || _pGame->gm_csComputerState==CS_TURNINGON || _pGame->gm_csComputerState==CS_TURNINGOFF;
_pNetwork->SetLocalPause(bShouldPause);
}
// run a quicktest game from within editor
void CGame::QuickTest(const CTFileName &fnMapName,
CDrawPort *pdp, CViewPort *pvp)
{
#ifdef PLATFORM_WIN32
UINT uiMessengerMsg = RegisterWindowMessage("Croteam Messenger: Incoming Message");
#else
UINT uiMessengerMsg = 0x7337d00d;
#endif
EnableLoadingHook(pdp);
// quick start game with the world
gm_strNetworkProvider = "Local";
gm_aiStartLocalPlayers[0] = gm_iWEDSinglePlayer;
gm_aiStartLocalPlayers[1] = -1;
gm_aiStartLocalPlayers[2] = -1;
gm_aiStartLocalPlayers[3] = -1;
gm_CurrentSplitScreenCfg = CGame::SSC_PLAY1;
// set properties for a quick start session
CSessionProperties sp;
SetQuickStartSession(sp);
// start the game
if( !NewGame( fnMapName, fnMapName, sp)) {
DisableLoadingHook();
return;
}
// enable input
_pInput->EnableInput(pvp);
// initialy, game is running
BOOL bRunning = TRUE;
// while it is still running
while( bRunning)
{
// while there are any messages in the message queue
MSG msg;
while( PeekMessage( &msg, NULL, 0, 0, PM_REMOVE)) {
// if it is not a mouse message
if( !(msg.message>=WM_MOUSEFIRST && msg.message<=WM_MOUSELAST)) {
// if not system key messages
if( !(msg.message==WM_KEYDOWN && msg.wParam==VK_F10
||msg.message==WM_SYSKEYDOWN)) {
// dispatch it
TranslateMessage(&msg);
}
// if paint message
if( msg.message==WM_PAINT) {
// dispatch it
DispatchMessage(&msg);
}
}
// if should stop
if ((msg.message==WM_QUIT)
||(msg.message==WM_CLOSE)
||(msg.message==WM_KEYDOWN && msg.wParam==VK_ESCAPE)
||(msg.message==WM_ACTIVATE)
||(msg.message==WM_CANCELMODE)
||(msg.message==WM_KILLFOCUS)
||(msg.message==WM_ACTIVATEAPP)) {
// stop running
bRunning = FALSE;
break;
}
if (msg.message==uiMessengerMsg)
{
if(!_pNetwork->IsPaused())
{
// pause it
_pNetwork->TogglePause();
}
char *pachrTemp=getenv("TEMP");
if( pachrTemp!=NULL)
{
FILE *pfileMessage=fopen(CTString(pachrTemp)+"Messenger.msg","r");
if( pfileMessage!=NULL)
{
char achrMessage[1024];
char *pachrMessage=fgets( achrMessage, 1024-1, pfileMessage);
if( pachrMessage!=NULL)
{
CPrintF("%s",pachrMessage);
}
}
}
}
// if pause pressed
if (msg.message==WM_KEYDOWN && msg.wParam==VK_PAUSE &&
_pGame->gm_csConsoleState==CS_OFF && _pGame->gm_csComputerState==CS_OFF) {
// toggle pause
_pNetwork->TogglePause();
}
if(msg.message==WM_KEYDOWN &&
// !!! FIXME: rcg11162001 This sucks.
#ifdef PLATFORM_UNIX
(msg.unicode == '~'
#else
(MapVirtualKey(msg.wParam, 0)==41 // scan code for '~'
#endif
||msg.wParam==VK_F1)) {
if (_pGame->gm_csConsoleState==CS_OFF || _pGame->gm_csConsoleState==CS_TURNINGOFF) {
_pGame->gm_csConsoleState = CS_TURNINGON;
} else {
_pGame->gm_csConsoleState = CS_TURNINGOFF;
}
}
extern INDEX con_bTalk;
if (con_bTalk && _pGame->gm_csConsoleState==CS_OFF) {
con_bTalk = FALSE;
_pGame->gm_csConsoleState = CS_TALK;
}
if (msg.message==WM_KEYDOWN) {
ConsoleKeyDown(msg);
if (_pGame->gm_csConsoleState!=CS_ON) {
ComputerKeyDown(msg);
}
} else if (msg.message==WM_KEYUP) {
// special handler for talk (not to invoke return key bind)
if( msg.wParam==VK_RETURN && _pGame->gm_csConsoleState==CS_TALK) _pGame->gm_csConsoleState = CS_OFF;
} else if (msg.message==WM_CHAR) {
ConsoleChar(msg);
}
if (msg.message==WM_LBUTTONDOWN
||msg.message==WM_RBUTTONDOWN
||msg.message==WM_LBUTTONDBLCLK
||msg.message==WM_RBUTTONDBLCLK
||msg.message==WM_LBUTTONUP
||msg.message==WM_RBUTTONUP) {
if (_pGame->gm_csConsoleState!=CS_ON) {
ComputerKeyDown(msg);
}
}
}
// get real cursor position
if (_pGame->gm_csComputerState != CS_OFF) {
POINT pt;
::GetCursorPos(&pt);
::ScreenToClient(pvp->vp_hWnd, &pt);
ComputerMouseMove(pt.x, pt.y);
}
UpdatePauseState();
UpdateInputEnabledState(pvp);
// if playing a demo and it is finished
if (_pNetwork->IsDemoPlayFinished()) {
// stop running
bRunning = FALSE;
}
// do the main game loop
GameMainLoop();
// redraw the view
if (pdp->Lock()) {
// if current view preferences will not clear the background, clear it here
if( _wrpWorldRenderPrefs.GetPolygonsFillType() == CWorldRenderPrefs::FT_NONE) {
// clear background
pdp->Fill(C_BLACK| CT_OPAQUE);
pdp->FillZBuffer(ZBUF_BACK);
}
// redraw view
if (_pGame->gm_csComputerState != CS_ON) {
GameRedrawView(pdp, (_pGame->gm_csConsoleState==CS_ON)?0:GRV_SHOWEXTRAS);
}
ComputerRender(pdp);
ConsoleRender(pdp);
pdp->Unlock();
// show it
pvp->SwapBuffers();
}
}
if (_pGame->gm_csConsoleState != CS_OFF) {
_pGame->gm_csConsoleState = CS_TURNINGOFF;
}
if (_pGame->gm_csComputerState != CS_OFF) {
_pGame->gm_csComputerState = CS_TURNINGOFF;
cmp_ppenPlayer = NULL;
}
_pInput->DisableInput();
StopGame();
DisableLoadingHook();
}<|fim▁end|> | |
<|file_name|>tcmalloc_debug.cc<|end_file_name|><|fim▁begin|><|fim▁hole|>#include <iostream>
using namespace std;
void foo(char* p){
memcpy(p, "01234567890", 32);
}
void foo2(char *p) {
for (int i = 0; i < 100; i++) {
cout << "start p[" << i << "]" << endl;
p[i] = 'a';
cout << "p[" << i << "] ok " << endl;
}
}
int main(int argc, char** argv){
char* p = new char[10];
//foo(p);
delete []p;
foo2(p);
printf("p=%s\n", p);
return 0;
}<|fim▁end|> | #include <unistd.h>
#include <string.h>
#include <stdio.h> |
<|file_name|>GraphInferenceGrpcClientTest.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.nd4j.graph;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.apache.commons.lang3.RandomUtils;
import org.junit.Ignore;
import org.junit.Test;
import org.nd4j.common.tests.BaseND4JTest;
import org.nd4j.autodiff.execution.conf.ExecutorConfiguration;
import org.nd4j.autodiff.execution.conf.OutputMode;
import org.nd4j.autodiff.execution.input.Operands;
import org.nd4j.imports.graphmapper.tf.TFGraphMapper;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.common.io.ClassPathResource;
import org.nd4j.remote.grpc.GraphInferenceGrpcClient;
import static org.junit.Assert.*;
@Slf4j
@Ignore
public class GraphInferenceGrpcClientTest extends BaseND4JTest {
@Test
public void testSimpleGraph_1() throws Exception {
val exp = Nd4j.create(new double[] {-0.95938617, -1.20301781, 1.22260064, 0.50172403, 0.59972949, 0.78568028, 0.31609724, 1.51674747, 0.68013491, -0.05227458, 0.25903158,1.13243439}, new long[]{3, 1, 4});
// configuring client
val client = new GraphInferenceGrpcClient("127.0.0.1", 40123);
val graphId = RandomUtils.nextLong(0, Long.MAX_VALUE);
// preparing and registering graph (it's optional, and graph might be embedded into Docker image
val tg = TFGraphMapper.importGraph(new ClassPathResource("tf_graphs/examples/expand_dim/frozen_model.pb").getInputStream());
assertNotNull(tg);
client.registerGraph(graphId, tg, ExecutorConfiguration.builder().outputMode(OutputMode.IMPLICIT).build());
//defining input
val input0 = Nd4j.create(new double[] {0.09753360, 0.76124972, 0.24693797, 0.13813169, 0.33144656, 0.08299957, 0.67197708, 0.80659380, 0.98274191, 0.63566073, 0.21592326, 0.54902743}, new int[] {3, 4});
val operands = new Operands().addArgument("input_0", input0);
// sending request and getting result
val result = client.output(graphId, operands);
assertEquals(exp, result.getById("output"));
}
@Test
public void testSimpleGraph_2() throws Exception {
val exp = Nd4j.create(new double[] {-0.95938617, -1.20301781, 1.22260064, 0.50172403, 0.59972949, 0.78568028, 0.31609724, 1.51674747, 0.68013491, -0.05227458, 0.25903158,1.13243439}, new long[]{3, 1, 4});
// configuring client
val client = new GraphInferenceGrpcClient("127.0.0.1", 40123);
val graphId = RandomUtils.nextLong(0, Long.MAX_VALUE);
// preparing and registering graph (it's optional, and graph might be embedded into Docker image
val tg = TFGraphMapper.importGraph(new ClassPathResource("tf_graphs/examples/expand_dim/frozen_model.pb").getInputStream());
assertNotNull(tg);
client.registerGraph(graphId, tg, ExecutorConfiguration.builder().outputMode(OutputMode.IMPLICIT).build());
//defining input
val input0 = Nd4j.create(new double[] {0.09753360, 0.76124972, 0.24693797, 0.13813169, 0.33144656, 0.08299957, 0.67197708, 0.80659380, 0.98274191, 0.63566073, 0.21592326, 0.54902743}, new int[] {3, 4});
val operands = new Operands().addArgument(1, 0, input0);
<|fim▁hole|> val result = client.output(graphId, operands);
assertEquals(exp, result.getById("output"));
}
}<|fim▁end|> | // sending request and getting result |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
import models<|fim▁hole|><|fim▁end|> |
admin.site.register(models.Testimonial) |
<|file_name|>server.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#-*- coding:utf-8 -*-
import logging, os.path
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.websocket
import tornado.gen
class Application(tornado.web.Application):
def __init__(self):
base_dir = os.path.dirname(__file__)
app_settings = {
"debug": True,
'static_path': os.path.join(base_dir, "static"),
}
tornado.web.Application.__init__(self, [
tornado.web.url(r"/", MainHandler, name="main"),
tornado.web.url(r"/live", WebSocketHandler, name="websocket"),
], **app_settings)
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.render('index.html')
class WebSocketHandler(tornado.websocket.WebSocketHandler):
listenners = []
<|fim▁hole|> @tornado.gen.engine
def open(self):
WebSocketHandler.listenners.append(self)
def on_close(self):
if self in WebSocketHandler.listenners:
WebSocketHandler.listenners.remove(self)
@tornado.gen.engine
def on_message(self, wsdata):
for listenner in WebSocketHandler.listenners:
listenner.write_message(wsdata)
@tornado.gen.coroutine
def main():
tornado.options.parse_command_line()
http_server = tornado.httpserver.HTTPServer(Application())
http_server.listen(8888)
logging.info("application running on http://localhost:8888")
if __name__ == "__main__":
tornado.ioloop.IOLoop.current().run_sync(main)
tornado.ioloop.IOLoop.current().start()<|fim▁end|> | def check_origin(self, origin):
return True
|
<|file_name|>authorize.js<|end_file_name|><|fim▁begin|>function showErrorMessage(errorMessage) {
$("#authorize-prompt")
.addClass("error-prompt")
.removeClass("success-prompt")
.html(errorMessage);
}
function showSuccessMessage(message) {
$("#authorize-prompt")
.removeClass("error-prompt")
.addClass("success-prompt")
.html(message);
}
function shake() {
var l = 10;
var original = -150;
for( var i = 0; i < 8; i++ ) {
var computed;
if (i % 2 > 0.51) {
computed = original - l;
} else {
computed = original + l;
}
$("#login-box").animate({
"left": computed + "px"
}, 100);
}
$("#login-box").animate({
"left": "-150px"
}, 50);
}
function handleAuthSuccess(data) {
showSuccessMessage(data.message);
$("#login-button").prop("disabled", true);
setTimeout(function() {
location.href = data.redirectUri;
}, 1000);
}
function handleAuthFailure(data) {
showErrorMessage(data.responseJSON.message);
shake();
}<|fim▁hole|>})
function handleGrantAuthorization() {
var csrf_token = $("#csrf_token").val();
var client_id = $("#client_id").val();
$.ajax("/oauth/authorize", {
"method": "POST",
"data": {
client_id,
csrf_token
},
"success": handleAuthSuccess,
"error": handleAuthFailure
});
}<|fim▁end|> |
$(function () {
$('[data-toggle="tooltip"]').tooltip() |
<|file_name|>directivesSpec.js<|end_file_name|><|fim▁begin|>'use strict';
/* jasmine specs for directives go here */
describe('directives', function () {
beforeEach(module('myApp.directives'));
describe('app-version', function () {
it('should print current version', function () {
module(function ($provide) {
$provide.value('version', 'TEST_VER');
});
inject(function ($compile, $rootScope) {
var element = $compile('<span app-version></span>')($rootScope);
expect(element.text()).toEqual('TEST_VER');
});
});
});
describe('Button directive', function(){
var $compile, $rootScope;
beforeEach(inject(function (_$rootScope_, _$compile_) {
$compile = _$compile_;
$rootScope = _$rootScope_;
}));
it('should have "btn" class to the button element', function(){
var element = $compile('<button></button>')($rootScope);
expect(element.hasClass('btn')).toBeTruthy();
})
});
<|fim▁hole|> beforeEach(inject(function ($compile, $rootScope) {
$scope = $rootScope;
$scope.numPage = 5;
$scope.currentPage = 2;
element = $compile('<pagination num-pages="numPages" current-page="currentPage"></pagination>')($scope);
$scope.$digest();
lis = function () {
return element.find('li');
};
}));
it('has the number of the page as text in each page item', function () {
for (var i = 0;i < $scope.numPage; i++) {
expect(lis.eq(i).text()).toEqual('' + i);
}
});
});
});<|fim▁end|> | describe('Pagination directive', function () {
var element, $scope, lis;
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Generated with ./mk_vsl_tag from Varnish headers: include/tbl/vsl_tags.h include/tbl/vsl_tags_http.h include/vsl_int.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/vapi/vsl_int.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/tbl/vsl_tags.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/tbl/vsl_tags_http.h
mod tag_e;
pub mod message;
pub mod parser;
use std::fmt::{self, Debug, Display};
use quick_error::ResultExt;
use nom;
use quick_error::quick_error;
use bitflags::bitflags;
use crate::maybe_string::MaybeStr;
pub use self::tag_e::VSL_tag_e as VslRecordTag;
bitflags! {
pub struct Marker: u8 {
const VSL_CLIENTMARKER = 0b0000_0001;
const VSL_BACKENDMARKER = 0b0000_0010;
}
}
impl Display for Marker {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "[{}{}]",
if self.contains(Marker::VSL_CLIENTMARKER) { "C" } else { " " },
if self.contains(Marker::VSL_BACKENDMARKER) { "B" } else { " " })
}
}
pub type VslIdent = u32;
#[derive(Debug)]
struct VslRecordHeader {
tag: u8,
len: u16,
marker: Marker,
ident: VslIdent,
}
pub struct VslRecord<'b> {
pub tag: VslRecordTag,
pub marker: Marker,
pub ident: VslIdent,
pub data: &'b[u8],
}
quick_error! {
#[derive(Debug)]
pub enum VslRecordParseError {
Nom(nom_err: String, tag: VslRecordTag, record: String) {
context(record: &'a VslRecord<'a>, err: nom::Err<&'a [u8]>) -> (format!("{}", err), record.tag, format!("{}", record))<|fim▁hole|> }
}
impl<'b> VslRecord<'b> {
pub fn parse_data<T, P>(&'b self, parser: P) -> Result<T, VslRecordParseError> where
P: Fn(&'b [u8]) -> nom::IResult<&'b [u8], T> {
// Note: need type annotaion for the u32 error type as the output IResult has no Error
// variant that would help to infer it
let result: nom::IResult<_, Result<T, _>, u32> = opt_res!(self.data, complete!(parser));
// unwrap here is safe as complete! eliminates Incomplete variant and opt_res! remaining Error variant
result.unwrap().1.context(self).map_err(From::from)
}
pub fn is_client(&self) -> bool {
self.marker.contains(Marker::VSL_CLIENTMARKER)
}
pub fn is_backend(&self) -> bool {
self.marker.contains(Marker::VSL_BACKENDMARKER)
}
}
impl<'b> Debug for VslRecord<'b> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.debug_struct("VSL Record")
.field("tag", &self.tag)
.field("marker", &self.marker)
.field("ident", &self.ident)
.field("data", &MaybeStr::from_bytes(self.data))
.finish()
}
}
impl<'b> Display for VslRecord<'b> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
let tag = format!("{:?}", self.tag);
if f.alternate() {
write!(f, "{} {:5} {:18} {}", self.marker, self.ident, tag, MaybeStr::from_bytes(self.data))
} else {
write!(f, "VSL record (marker: {} ident: {} tag: {} data: {:?})", self.marker, self.ident, tag, MaybeStr::from_bytes(self.data))
}
}
}<|fim▁end|> | display("Nom parser failed on {}: {}", record, nom_err)
} |
<|file_name|>Stage.js<|end_file_name|><|fim▁begin|>Stage.prototype = Object.create(MovieClip.prototype);
function Stage(canvas_id, args) {
// private vars
args = args || {};
args._name = 'stage';
var self = this,
_frameRate = args.frameRate || 0,
_interval = null,
_canvas = document.getElementById(canvas_id),
_context = _canvas.getContext('2d'),
_displayState = args.displayState || 'dynamic',
_lastFrameTime = 0,
// private function declarations
_updateDisplay,
_render,
_resize;
Object.defineProperty(this, 'frameRate', {
get: function() {
return _frameRate;
},
set: function(fps) {
if (fps !== _frameRate) {
_frameRate = fps;
}
}
});
Object.defineProperty(this, 'displayState', {
get: function() {
return _displayState;
},
set: function(displayState) {
_displayState = displayState;
_updateDisplay();
}
});
_resize = function() {
// updating display
_updateDisplay();
self.trigger('onResize');
};
_render = function(time) {
if (time-_lastFrameTime >= 1000/_frameRate || _frameRate === 0) {
_lastFrameTime = time;
// clear canvas
_context.clearRect(0, 0, _canvas.width, _canvas.height);
// render new graphics
self.tickGraphics(_context, Mouse.event);
// run logic for tweens and such
self.trigger('tickLogic', null, true);
// calling on enter frame
self.trigger('onEnterFrame');
// clear input context
Mouse.clear();
Key.clear();
}
_interval = window.requestAnimationFrame(_render);
};
//_renderTimer = function
_updateDisplay = function() {
// code for making sure canvas resolution matches dpi
_canvas.width = _canvas.offsetWidth;
_canvas.height = _canvas.offsetHeight;
// logic for screen
if (_displayState == 'original') {
self._x = (_canvas.width - self._width)/2;
self._y = (_canvas.height - self._height)/2;
self._scaleX = 1;
self._scaleY = 1;
} else if (_displayState == 'stretch') {
self._x = 0;
self._y = 0;
self._scaleX = _canvas.width / self._width;
self._scaleY = _canvas.height / self._height;
} else if (_displayState == 'fit') {
self._x = 0;
self._y = 0;
self._scaleX = _canvas.width / self._width;
self._scaleY = _canvas.height / self._height;
if (self._scaleX > self._scaleY) {
self._scaleX = self._scaleY;
self._x = (_canvas.width - self._width*self._scaleX)/2;
} else {
self._scaleY = self._scaleX;
self._y = (_canvas.height - self._height*self._scaleY)/2;
}
} else if (_displayState == 'dynamic') {
// experimental
self._width = _canvas.offsetWidth;
self._height = _canvas.offsetHeight;
self._x = 0;
self._y = 0;
self._scaleX = 1;
self._scaleY = 1;
}
};
// public functions
this.onResize = null;
this.onBlur = null;
this.onFocus = null;
this.play = function() {
if (!self.isPlaying()) {
_interval = window.requestAnimationFrame(_render);
}
};
this.isPlaying = function() {
return _interval !== null;
};
this.stop = function() {
if (self.isPlaying()) {
window.cancelAnimationFrame(_interval);
//clearInterval(_interval);
_interval = null;
// render new graphics
self.tickGraphics(_context, Mouse.event);
}
};
// init extended class
args._graphic = 'stage';
MovieClip.call(this, args);
Mouse.register(_canvas);
// setting up handler for resize
window.addEventListener('resize', _resize);
// setting up handler for blur
window.addEventListener('blur', function(e) {
// trigger blur events
self.trigger('onBlur');
});
// setting up handler for focus
window.addEventListener('focus', function(e) {<|fim▁hole|>}<|fim▁end|> | // trigger focus events
self.trigger('onFocus');
});
_resize(); |
<|file_name|>view-full-height.js<|end_file_name|><|fim▁begin|>jQuery.each(param_obj, function (index, value) {
if (!isNaN(value)) {
param_obj[index] = parseInt(value);
}
});
function Portfolio_Gallery_Full_Height(id) {
var _this = this;
_this.container = jQuery('#' + id + '.view-full-height');
_this.hasLoading = _this.container.data("show-loading") == "on";
_this.optionsBlock = _this.container.parent().find('div[id^="huge_it_portfolio_options_"]');
_this.filtersBlock = _this.container.parent().find('div[id^="huge_it_portfolio_filters_"]');
_this.content = _this.container.parent();
_this.element = _this.container.find('.portelement');
_this.defaultBlockHeight = param_obj.ht_view1_block_height;
_this.defaultBlockWidth = param_obj.ht_view1_block_width;
_this.optionSets = _this.optionsBlock.find('.option-set');
_this.optionLinks = _this.optionSets.find('a');
_this.sortBy = _this.optionsBlock.find('#sort-by');
_this.filterButton = _this.filtersBlock.find('ul li');
if (_this.container.data('show-center') == 'on' && ( ( !_this.content.hasClass('sortingActive') && !_this.content.hasClass('filteringActive') )
|| ( _this.optionsBlock.data('sorting-position') == 'top' && _this.filtersBlock.data('filtering-position') == 'top' ) ||
( _this.optionsBlock.data('sorting-position') == 'top' && _this.filtersBlock.data('filtering-position') == '' ) || ( _this.optionsBlock.data('sorting-position') == '' && _this.filtersBlock.data('filtering-position') == 'top' ) )) {
_this.isCentered = _this.container.data("show-center") == "on";
}
_this.documentReady = function () {
_this.container.hugeitmicro({
itemSelector: _this.element,
masonry: {
columnWidth: _this.defaultBlockWidth + 20 + param_obj.ht_view1_element_border_width * 2
},
masonryHorizontal: {
rowHeight: 300 + 20
},
cellsByRow: {
columnWidth: 300 + 20,
rowHeight: 240
},
cellsByColumn: {
columnWidth: 300 + 20,
rowHeight: 240
},
getSortData: {
symbol: function ($elem) {
return $elem.attr('data-symbol');
},
category: function ($elem) {
return $elem.attr('data-category');
},
number: function ($elem) {
return parseInt($elem.find('.number').text(), 10);
},
weight: function ($elem) {
return parseFloat($elem.find('.weight').text().replace(/[\(\)]/g, ''));
},
id: function ($elem) {
return $elem.find('.id').text();
}
}
});
setInterval(function(){
_this.container.hugeitmicro('reLayout');
});
};
_this.manageLoading = function () {
if (_this.hasLoading) {
_this.container.css({'opacity': 1});
_this.optionsBlock.css({'opacity': 1});
_this.filtersBlock.css({'opacity': 1});
_this.content.find('div[id^="huge-it-container-loading-overlay_"]').css('display', 'none');
}
};
_this.showCenter = function () {
if (_this.isCentered) {
var count = _this.element.length;
var elementwidth = _this.defaultBlockWidth + 10 + param_obj.ht_view1_element_border_width * 2;
var enterycontent = _this.content.width();
var whole = ~~(enterycontent / (elementwidth));
if (whole > count) whole = count;
if (whole == 0) {
return false;
}
else {
var sectionwidth = whole * elementwidth + (whole - 1) * 20;
}
_this.container.width(sectionwidth).css({
"margin": "0px auto",
"overflow": "hidden"
});
console.log(elementwidth + " " + enterycontent + " " + whole + " " + sectionwidth);
}
};
_this.addEventListeners = function () {
_this.optionLinks.on('click', _this.optionsClick);
_this.optionsBlock.find('#shuffle a').on('click',_this.randomClick);
_this.filterButton.on('click', _this.filtersClick);
<|fim▁hole|> jQuery(window).resize(_this.resizeEvent);
};
_this.resizeEvent = function(){
_this.container.hugeitmicro('reLayout');
_this.showCenter();
};
_this.optionsClick = function () {
var $this = jQuery(this);
if ($this.hasClass('selected')) {
return false;
}
var $optionSet = $this.parents('.option-set');
$optionSet.find('.selected').removeClass('selected');
$this.addClass('selected');
var options = {},
key = $optionSet.attr('data-option-key'),
value = $this.attr('data-option-value');
value = value === 'false' ? false : value;
options[key] = value;
if (key === 'layoutMode' && typeof changeLayoutMode === 'function') {
changeLayoutMode($this, options)
} else {
_this.container.hugeitmicro(options);
}
return false;
};
_this.randomClick = function () {
_this.container.hugeitmicro('shuffle');
_this.sortBy.find('.selected').removeClass('selected');
_this.sortBy.find('[data-option-value="random"]').addClass('selected');
return false;
};
_this.filtersClick = function () {
_this.filterButton.each(function () {
jQuery(this).removeClass('active');
});
jQuery(this).addClass('active');
// get filter value from option value
var filterValue = jQuery(this).attr('rel');
// use filterFn if matches value
_this.container.hugeitmicro({filter: filterValue});
};
_this.init = function () {
_this.showCenter();
jQuery(window).load(_this.manageLoading);
_this.documentReady();
_this.addEventListeners();
};
this.init();
}
var portfolios = [];
jQuery(document).ready(function () {
jQuery(".huge_it_portfolio_container.view-full-height").each(function (i) {
var id = jQuery(this).attr('id');
portfolios[i] = new Portfolio_Gallery_Full_Height(id);
});
});<|fim▁end|> | |
<|file_name|>post-link.js<|end_file_name|><|fim▁begin|>import React from "react"
import { Link } from "gatsby"
import numberToColor from "../utils/number-to-color"
import "./post-link.css"
const year = node => new Date(node.frontmatter.date).getFullYear()
const PostLink = ({ post }) => (
<div className="post-link"><|fim▁hole|> </Link>
</div>
)
export default PostLink<|fim▁end|> | <Link to={post.frontmatter.path} style={{color: numberToColor(year(post))}}>
{post.frontmatter.title} |
<|file_name|>bootstrap-wysihtml5.sv-SE.js<|end_file_name|><|fim▁begin|>/**
* Swedish translation for bootstrap-wysihtml5
*/
(function($){
$.fn.wysihtml5.locale["sv-SE"] = {
font_styles: {
normal: "Normal Text",
h1: "Rubrik 1",
h2: "Rubrik 2",
h3: "Rubrik 3"
},
emphasis: {
bold: "Fet",
italic: "Kursiv",
underline: "Understruken"
},
lists: {
unordered: "Osorterad lista",
ordered: "Sorterad lista",
outdent: "Minska indrag",
indent: "Öka indrag"<|fim▁hole|> },
link: {
insert: "Lägg till länk",
cancel: "Avbryt"
},
image: {
insert: "Lägg till Bild",
cancel: "Avbryt"
},
html: {
edit: "Redigera HTML"
},
colours: {
black: "Svart",
silver: "Silver",
gray: "Grå",
maroon: "Kastaniebrun",
red: "Röd",
purple: "Lila",
green: "Grön",
olive: "Olivgrön",
navy: "Marinblå",
blue: "Blå",
orange: "Orange"
}
};
}(jQuery));<|fim▁end|> | |
<|file_name|>plugin.ts<|end_file_name|><|fim▁begin|>///<reference path="../bower_components/dt-gulp-util/gulp-util.d.ts" />
import File = require('vinyl');
import gutil = require('gulp-util');
import through = require('through2');
import Configuration = require('./Configuration');
import ConfigurationOptions = require('./interfaces/ConfigurationOptions');
import VinylCompiler = require('./VinylCompiler');
var PLUGIN_NAME = 'blink';
var PluginError = gutil.PluginError;
function plugin(options?: ConfigurationOptions): NodeJS.ReadWriteStream {
var compiler = new VinylCompiler(new Configuration(options));
return through.obj((file: File, enc: string, cb: Function) => {
if (file.isNull()) {
cb(null, file);
return;
}
if (file.isStream()) {
cb(new PluginError(PLUGIN_NAME, 'Streaming not supported'));<|fim▁hole|> try {
cb(null, compiler.compile(file));
} catch (err) {
cb(new PluginError(PLUGIN_NAME, err.message, {
showStack: true
}));
}
});
}
export = plugin;<|fim▁end|> | return;
}
|
<|file_name|>issue_372.rs<|end_file_name|><|fim▁begin|>//! Checks that `__typename` field queries okay (and not okay) on root types.
//! See [#372](https://github.com/graphql-rust/juniper/issues/372) for details.
use futures::stream;
use juniper::{
execute, graphql_object, graphql_subscription, graphql_value, graphql_vars,
resolve_into_stream, GraphQLError, RootNode,
};
pub struct Query;
#[graphql_object]
impl Query {
fn some() -> bool {
true
}
}
pub struct Mutation;
#[graphql_object]
impl Mutation {
fn another() -> bool {
false
}
}
pub struct Subscription;
#[graphql_subscription]
impl Subscription {
async fn another() -> stream::Empty<bool> {
stream::empty()
}
}
#[tokio::test]
async fn implicit_query_typename() {
let query = r#"{ __typename }"#;
let schema = RootNode::new(Query, Mutation, Subscription);
assert_eq!(
execute(query, None, &schema, &graphql_vars! {}, &()).await,
Ok((graphql_value!({"__typename": "Query"}), vec![])),
);
}
#[tokio::test]
async fn query_typename() {
let query = r#"query { __typename }"#;
let schema = RootNode::new(Query, Mutation, Subscription);
assert_eq!(
execute(query, None, &schema, &graphql_vars! {}, &()).await,
Ok((graphql_value!({"__typename": "Query"}), vec![])),
);
}
#[tokio::test]
async fn explicit_query_typename() {
let query = r#"query Query { __typename }"#;
let schema = RootNode::new(Query, Mutation, Subscription);
assert_eq!(
execute(query, None, &schema, &graphql_vars! {}, &()).await,
Ok((graphql_value!({"__typename": "Query"}), vec![])),
);
}
#[tokio::test]
async fn mutation_typename() {
let query = r#"mutation { __typename }"#;
<|fim▁hole|> assert_eq!(
execute(query, None, &schema, &graphql_vars! {}, &()).await,
Ok((graphql_value!({"__typename": "Mutation"}), vec![])),
);
}
#[tokio::test]
async fn explicit_mutation_typename() {
let query = r#"mutation Mutation { __typename }"#;
let schema = RootNode::new(Query, Mutation, Subscription);
assert_eq!(
execute(query, None, &schema, &graphql_vars! {}, &()).await,
Ok((graphql_value!({"__typename": "Mutation"}), vec![])),
);
}
#[tokio::test]
async fn subscription_typename() {
let query = r#"subscription { __typename }"#;
let schema = RootNode::new(Query, Mutation, Subscription);
match resolve_into_stream(query, None, &schema, &graphql_vars! {}, &()).await {
Err(GraphQLError::ValidationError(mut errors)) => {
assert_eq!(errors.len(), 1);
let err = errors.pop().unwrap();
assert_eq!(
err.message(),
"`__typename` may not be included as a root field in a \
subscription operation",
);
assert_eq!(err.locations()[0].index(), 15);
assert_eq!(err.locations()[0].line(), 0);
assert_eq!(err.locations()[0].column(), 15);
}
_ => panic!("Expected ValidationError"),
};
}
#[tokio::test]
async fn explicit_subscription_typename() {
let query = r#"subscription Subscription { __typename }"#;
let schema = RootNode::new(Query, Mutation, Subscription);
match resolve_into_stream(query, None, &schema, &graphql_vars! {}, &()).await {
Err(GraphQLError::ValidationError(mut errors)) => {
assert_eq!(errors.len(), 1);
let err = errors.pop().unwrap();
assert_eq!(
err.message(),
"`__typename` may not be included as a root field in a \
subscription operation"
);
assert_eq!(err.locations()[0].index(), 28);
assert_eq!(err.locations()[0].line(), 0);
assert_eq!(err.locations()[0].column(), 28);
}
_ => panic!("Expected ValidationError"),
};
}<|fim▁end|> | let schema = RootNode::new(Query, Mutation, Subscription);
|
<|file_name|>base_action_rule.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv, orm
from tools.translate import _
from datetime import datetime
from datetime import timedelta
from tools.safe_eval import safe_eval
from tools import ustr
import pooler
import re
import time
import tools
def get_datetime(date_field):
'''Return a datetime from a date string or a datetime string'''
#complete date time if date_field contains only a date
date_split = date_field.split(' ')
if len(date_split) == 1:
date_field = date_split[0] + " 00:00:00"
return datetime.strptime(date_field[:19], '%Y-%m-%d %H:%M:%S')
class base_action_rule(osv.osv):
""" Base Action Rules """
_name = 'base.action.rule'
_description = 'Action Rules'
def _state_get(self, cr, uid, context=None):
""" Get State
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param context: A standard dictionary for contextual values """
return self.state_get(cr, uid, context=context)
def state_get(self, cr, uid, context=None):
""" Get State
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param context: A standard dictionary for contextual values """
return [('', '')]
def priority_get(self, cr, uid, context=None):
""" Get Priority
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param context: A standard dictionary for contextual values """
return [('', '')]
_columns = {
'name': fields.char('Rule Name', size=64, required=True),
'model_id': fields.many2one('ir.model', 'Object', required=True),
'create_date': fields.datetime('Create Date', readonly=1),
'active': fields.boolean('Active', help="If the active field is set to False,\
it will allow you to hide the rule without removing it."),
'sequence': fields.integer('Sequence', help="Gives the sequence order \
when displaying a list of rules."),
'trg_date_type': fields.selection([
('none', 'None'),
('create', 'Creation Date'),
('action_last', 'Last Action Date'),
('date', 'Date'),
('deadline', 'Deadline'),
], 'Trigger Date', size=16),
'trg_date_range': fields.integer('Delay after trigger date', \
help="Delay After Trigger Date,\
specifies you can put a negative number. If you need a delay before the \
trigger date, like sending a reminder 15 minutes before a meeting."),
'trg_date_range_type': fields.selection([('minutes', 'Minutes'), ('hour', 'Hours'), \
('day', 'Days'), ('month', 'Months')], 'Delay type'),
'trg_user_id': fields.many2one('res.users', 'Responsible'),
'trg_partner_id': fields.many2one('res.partner', 'Partner'),
'trg_partner_categ_id': fields.many2one('res.partner.category', 'Partner Category'),
'trg_state_from': fields.selection(_state_get, 'State', size=16),
'trg_state_to': fields.selection(_state_get, 'Button Pressed', size=16),
'act_method': fields.char('Call Object Method', size=64),
'act_user_id': fields.many2one('res.users', 'Set Responsible to'),
'act_state': fields.selection(_state_get, 'Set State to', size=16),
'act_email_cc': fields.char('Add Watchers (Cc)', size=250, help="\
These people will receive a copy of the future communication between partner \
and users by email"),
'act_remind_partner': fields.boolean('Remind Partner', help="Check \
this if you want the rule to send a reminder by email to the partner."),
'act_remind_user': fields.boolean('Remind Responsible', help="Check \
this if you want the rule to send a reminder by email to the user."),
'act_reply_to': fields.char('Reply-To', size=64),
'act_remind_attach': fields.boolean('Remind with Attachment', help="Check this if you want that all documents attached to the object be attached to the reminder email sent."),
'act_mail_to_user': fields.boolean('Mail to Responsible', help="Check\
this if you want the rule to send an email to the responsible person."),
'act_mail_to_watchers': fields.boolean('Mail to Watchers (CC)',
help="Check this if you want \
the rule to mark CC(mail to any other person defined in actions)."),
'act_mail_to_email': fields.char('Mail to these Emails', size=128, \
help="Email-id of the persons whom mail is to be sent"),
'act_mail_body': fields.text('Mail body', help="Content of mail"),
'regex_name': fields.char('Regex on Resource Name', size=128, help="Regular expression for matching name of the resource\
\ne.g.: 'urgent.*' will search for records having name starting with the string 'urgent'\
\nNote: This is case sensitive search."),
'server_action_id': fields.many2one('ir.actions.server', 'Server Action', help="Describes the action name.\neg:on which object which action to be taken on basis of which condition"),
'filter_id':fields.many2one('ir.filters', 'Filter', required=False),
'act_email_from' : fields.char('Email From', size=64, required=False,
help="Use a python expression to specify the right field on which one than we will use for the 'From' field of the header"),
'act_email_to' : fields.char('Email To', size=64, required=False,
help="Use a python expression to specify the right field on which one than we will use for the 'To' field of the header"),
'last_run': fields.datetime('Last Run', readonly=1),
}
_defaults = {
'active': lambda *a: True,
'trg_date_type': lambda *a: 'none',
'trg_date_range_type': lambda *a: 'day',
'act_mail_to_user': lambda *a: 0,
'act_remind_partner': lambda *a: 0,
'act_remind_user': lambda *a: 0,
'act_mail_to_watchers': lambda *a: 0,
}
_order = 'sequence'
def onchange_model_id(self, cr, uid, ids, name):
#This is not a good solution as it will affect the domain only on onchange
res = {'domain':{'filter_id':[]}}
if name:
model_name = self.pool.get('ir.model').read(cr, uid, [name], ['model'])
if model_name:
mod_name = model_name[0]['model']
res['domain'] = {'filter_id': [('model_id','=',mod_name)]}
else:
res['value'] = {'filter_id':False}
return res
def post_action(self, cr, uid, ids, model, context=None):
# Searching for action rules
cr.execute("SELECT model.model, rule.id FROM base_action_rule rule \
LEFT JOIN ir_model model on (model.id = rule.model_id) \
WHERE active")
res = cr.fetchall()
# Check if any rule matching with current object
for obj_name, rule_id in res:
if not (model == obj_name):
continue # TODO add this condition in the WHERE clause above.
else:
obj = self.pool.get(obj_name)
# If the rule doesn't involve a time condition, run it immediately
# Otherwise we let the scheduler run the action
if self.browse(cr, uid, rule_id, context=context).trg_date_type == 'none':
self._action(cr, uid, [rule_id], obj.browse(cr, uid, ids, context=context), context=context)
return True
def _create(self, old_create, model, context=None):
"""<|fim▁hole|> def wrapper(cr, uid, vals, context=context):
if context is None:
context = {}
new_id = old_create(cr, uid, vals, context=context)
if not context.get('action'):
self.post_action(cr, uid, [new_id], model, context=context)
return new_id
return wrapper
def _write(self, old_write, model, context=None):
"""
Return a wrapper around `old_write` calling both `old_write` and
`post_action`, in that order.
"""
def wrapper(cr, uid, ids, vals, context=context):
if context is None:
context = {}
if isinstance(ids, (str, int, long)):
ids = [ids]
old_write(cr, uid, ids, vals, context=context)
if not context.get('action'):
self.post_action(cr, uid, ids, model, context=context)
return True
return wrapper
def _register_hook(self, cr, uid, ids, context=None):
"""
Wrap every `create` and `write` methods of the models specified by
the rules (given by `ids`).
"""
for action_rule in self.browse(cr, uid, ids, context=context):
model = action_rule.model_id.model
obj_pool = self.pool.get(model)
if not hasattr(obj_pool, 'base_action_ruled'):
obj_pool.create = self._create(obj_pool.create, model, context=context)
obj_pool.write = self._write(obj_pool.write, model, context=context)
obj_pool.base_action_ruled = True
return True
def create(self, cr, uid, vals, context=None):
res_id = super(base_action_rule, self).create(cr, uid, vals, context=context)
self._register_hook(cr, uid, [res_id], context=context)
return res_id
def write(self, cr, uid, ids, vals, context=None):
super(base_action_rule, self).write(cr, uid, ids, vals, context=context)
self._register_hook(cr, uid, ids, context=context)
return True
def _check(self, cr, uid, automatic=False, use_new_cursor=False, \
context=None):
"""
This Function is call by scheduler.
"""
rule_pool = self.pool.get('base.action.rule')
rule_ids = rule_pool.search(cr, uid, [], context=context)
self._register_hook(cr, uid, rule_ids, context=context)
rules = self.browse(cr, uid, rule_ids, context=context)
for rule in rules:
model = rule.model_id.model
model_pool = self.pool.get(model)
last_run = False
if rule.last_run:
last_run = get_datetime(rule.last_run)
now = datetime.now()
for obj_id in model_pool.search(cr, uid, [], context=context):
obj = model_pool.browse(cr, uid, obj_id, context=context)
# Calculate when this action should next occur for this object
base = False
if rule.trg_date_type=='create' and hasattr(obj, 'create_date'):
base = obj.create_date
elif (rule.trg_date_type=='action_last'
and hasattr(obj, 'create_date')):
if hasattr(obj, 'date_action_last') and obj.date_action_last:
base = obj.date_action_last
else:
base = obj.create_date
elif (rule.trg_date_type=='deadline'
and hasattr(obj, 'date_deadline')
and obj.date_deadline):
base = obj.date_deadline
elif (rule.trg_date_type=='date'
and hasattr(obj, 'date')
and obj.date):
base = obj.date
if base:
fnct = {
'minutes': lambda interval: timedelta(minutes=interval),
'day': lambda interval: timedelta(days=interval),
'hour': lambda interval: timedelta(hours=interval),
'month': lambda interval: timedelta(months=interval),
}
base = get_datetime(base)
delay = fnct[rule.trg_date_range_type](rule.trg_date_range)
action_date = base + delay
if (not last_run or (last_run <= action_date < now)):
self._action(cr, uid, [rule.id], [obj], context=context)
rule_pool.write(cr, uid, [rule.id], {'last_run': now},
context=context)
def format_body(self, body):
""" Foramat Action rule's body
@param self: The object pointer """
return body and tools.ustr(body) or ''
def format_mail(self, obj, body):
data = {
'object_id': obj.id,
'object_subject': hasattr(obj, 'name') and obj.name or False,
'object_date': hasattr(obj, 'date') and obj.date or False,
'object_description': hasattr(obj, 'description') and obj.description or False,
'object_user': hasattr(obj, 'user_id') and (obj.user_id and obj.user_id.name) or '/',
'object_user_email': hasattr(obj, 'user_id') and (obj.user_id and \
obj.user_id.user_email) or '/',
'object_user_phone': hasattr(obj, 'partner_address_id') and (obj.partner_address_id and \
obj.partner_address_id.phone) or '/',
'partner': hasattr(obj, 'partner_id') and (obj.partner_id and obj.partner_id.name) or '/',
'partner_email': hasattr(obj, 'partner_address_id') and (obj.partner_address_id and\
obj.partner_address_id.email) or '/',
}
return self.format_body(body % data)
def email_send(self, cr, uid, obj, emails, body, emailfrom=None, context=None):
""" send email
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param email: pass the emails
@param emailfrom: Pass name the email From else False
@param context: A standard dictionary for contextual values """
if not emailfrom:
emailfrom = tools.config.get('email_from', False)
if context is None:
context = {}
mail_message = self.pool.get('mail.message')
body = self.format_mail(obj, body)
if not emailfrom:
if hasattr(obj, 'user_id') and obj.user_id and obj.user_id.user_email:
emailfrom = obj.user_id.user_email
name = '[%d] %s' % (obj.id, tools.ustr(obj.name))
emailfrom = tools.ustr(emailfrom)
reply_to = emailfrom
if not emailfrom:
raise osv.except_osv(_('Error!'),
_("No E-Mail ID Found for your Company address!"))
return mail_message.schedule_with_attach(cr, uid, emailfrom, emails, name, body, model='base.action.rule', reply_to=reply_to, res_id=obj.id)
def do_check(self, cr, uid, action, obj, context=None):
""" check Action
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param context: A standard dictionary for contextual values """
if context is None:
context = {}
ok = True
if action.filter_id:
if action.model_id.model == action.filter_id.model_id:
context.update(eval(action.filter_id.context))
obj_ids = obj._table.search(cr, uid, eval(action.filter_id.domain), context=context)
if not obj.id in obj_ids:
ok = False
else:
ok = False
if getattr(obj, 'user_id', False):
ok = ok and (not action.trg_user_id.id or action.trg_user_id.id==obj.user_id.id)
if getattr(obj, 'partner_id', False):
ok = ok and (not action.trg_partner_id.id or action.trg_partner_id.id==obj.partner_id.id)
ok = ok and (
not action.trg_partner_categ_id.id or
(
obj.partner_id.id and
(action.trg_partner_categ_id.id in map(lambda x: x.id, obj.partner_id.category_id or []))
)
)
state_to = context.get('state_to', False)
state = getattr(obj, 'state', False)
if state:
ok = ok and (not action.trg_state_from or action.trg_state_from==state)
if state_to:
ok = ok and (not action.trg_state_to or action.trg_state_to==state_to)
elif action.trg_state_to:
ok = False
reg_name = action.regex_name
result_name = True
if reg_name:
ptrn = re.compile(ustr(reg_name))
_result = ptrn.search(ustr(obj.name))
if not _result:
result_name = False
regex_n = not reg_name or result_name
ok = ok and regex_n
return ok
def do_action(self, cr, uid, action, model_obj, obj, context=None):
""" Do Action
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param action: pass action
@param model_obj: pass Model object
@param context: A standard dictionary for contextual values """
if context is None:
context = {}
if action.server_action_id:
context.update({'active_id': obj.id, 'active_ids': [obj.id], 'active_model': obj._name})
self.pool.get('ir.actions.server').run(cr, uid, [action.server_action_id.id], context)
write = {}
if hasattr(obj, 'user_id') and action.act_user_id:
obj.user_id = action.act_user_id
write['user_id'] = action.act_user_id.id
if hasattr(obj, 'date_action_last'):
write['date_action_last'] = time.strftime('%Y-%m-%d %H:%M:%S')
if hasattr(obj, 'state') and action.act_state:
obj.state = action.act_state
write['state'] = action.act_state
if hasattr(obj, 'categ_id') and action.act_categ_id:
obj.categ_id = action.act_categ_id
write['categ_id'] = action.act_categ_id.id
model_obj.write(cr, uid, [obj.id], write, context)
if hasattr(model_obj, 'remind_user') and action.act_remind_user:
model_obj.remind_user(cr, uid, [obj.id], context, attach=action.act_remind_attach)
if hasattr(model_obj, 'remind_partner') and action.act_remind_partner:
model_obj.remind_partner(cr, uid, [obj.id], context, attach=action.act_remind_attach)
if action.act_method:
getattr(model_obj, 'act_method')(cr, uid, [obj.id], action, context)
emails = []
if hasattr(obj, 'user_id') and action.act_mail_to_user:
if obj.user_id:
emails.append(obj.user_id.user_email)
if action.act_mail_to_watchers:
emails += (action.act_email_cc or '').split(',')
if action.act_mail_to_email:
emails += (action.act_mail_to_email or '').split(',')
locals_for_emails = {
'user' : self.pool.get('res.users').browse(cr, uid, uid, context=context),
'obj' : obj,
}
if action.act_email_to:
emails.append(safe_eval(action.act_email_to, {}, locals_for_emails))
emails = filter(None, emails)
if len(emails) and action.act_mail_body:
emails = list(set(emails))
email_from = safe_eval(action.act_email_from, {}, locals_for_emails)
def to_email(text):
return re.findall(r'([^ ,<@]+@[^> ,]+)', text or '')
emails = to_email(','.join(filter(None, emails)))
email_froms = to_email(email_from)
if email_froms:
self.email_send(cr, uid, obj, emails, action.act_mail_body, emailfrom=email_froms[0])
return True
def _action(self, cr, uid, ids, objects, scrit=None, context=None):
""" Do Action
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of Basic Action Rule’s IDs,
@param objects: pass objects
@param context: A standard dictionary for contextual values """
if context is None:
context = {}
context.update({'action': True})
if not scrit:
scrit = []
for action in self.browse(cr, uid, ids, context=context):
for obj in objects:
if self.do_check(cr, uid, action, obj, context=context):
model_obj = self.pool.get(action.model_id.model)
self.do_action(cr, uid, action, model_obj, obj, context=context)
context.update({'action': False})
return True
def _check_mail(self, cr, uid, ids, context=None):
""" Check Mail
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of Action Rule’s IDs
@param context: A standard dictionary for contextual values """
empty = orm.browse_null()
rule_obj = self.pool.get('base.action.rule')
for rule in self.browse(cr, uid, ids, context=context):
if rule.act_mail_body:
try:
rule_obj.format_mail(empty, rule.act_mail_body)
except (ValueError, KeyError, TypeError):
return False
return True
_constraints = [
(_check_mail, 'Error: The mail is not well formated', ['act_mail_body']),
]
base_action_rule()
class ir_cron(osv.osv):
_inherit = 'ir.cron'
_init_done = False
def _poolJobs(self, db_name, check=False):
if not self._init_done:
self._init_done = True
try:
db = pooler.get_db(db_name)
except:
return False
cr = db.cursor()
try:
next = datetime.now().strftime('%Y-%m-%d %H:00:00')
# Putting nextcall always less than current time in order to call it every time
cr.execute('UPDATE ir_cron set nextcall = \'%s\' where numbercall<>0 and active and model=\'base.action.rule\' ' % (next))
finally:
cr.commit()
cr.close()
super(ir_cron, self)._poolJobs(db_name, check=check)
ir_cron()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | Return a wrapper around `old_create` calling both `old_create` and
`post_action`, in that order.
""" |
<|file_name|>upgrade042.py<|end_file_name|><|fim▁begin|>self.description = "Backup file relocation"
lp1 = pmpkg("bash")
lp1.files = ["etc/profile*"]
lp1.backup = ["etc/profile"]
self.addpkg2db("local", lp1)
p1 = pmpkg("bash", "1.0-2")
self.addpkg(p1)
lp2 = pmpkg("filesystem")
self.addpkg2db("local", lp2)
p2 = pmpkg("filesystem", "1.0-2")
p2.files = ["etc/profile**"]
p2.backup = ["etc/profile"]
p2.depends = [ "bash" ]
self.addpkg(p2)
<|fim▁hole|>self.filesystem = ["etc/profile"]
self.addrule("PACMAN_RETCODE=0")
self.addrule("PKG_VERSION=bash|1.0-2")
self.addrule("PKG_VERSION=filesystem|1.0-2")
self.addrule("!FILE_PACSAVE=etc/profile")
self.addrule("FILE_PACNEW=etc/profile")
self.addrule("FILE_EXIST=etc/profile")<|fim▁end|> | self.args = "-U %s" % " ".join([p.filename() for p in (p1, p2)])
|
<|file_name|>issue-1696.rs<|end_file_name|><|fim▁begin|>// xfail-fast
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>
use std::hashmap::HashMap;
pub fn main() {
let mut m = HashMap::new();
m.insert("foo".as_bytes().to_owned(), "bar".as_bytes().to_owned());
error!(m);
}<|fim▁end|> | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms. |
<|file_name|>string_expressions.rs<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the<|fim▁hole|>//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! String expressions
use crate::error::{DataFusionError, Result};
use arrow::array::{Array, ArrayRef, StringArray, StringBuilder};
macro_rules! downcast_vec {
($ARGS:expr, $ARRAY_TYPE:ident) => {{
$ARGS
.iter()
.map(|e| match e.as_any().downcast_ref::<$ARRAY_TYPE>() {
Some(array) => Ok(array),
_ => Err(DataFusionError::Internal("failed to downcast".to_string())),
})
}};
}
/// concatenate string columns together.
pub fn concatenate(args: &[ArrayRef]) -> Result<StringArray> {
// downcast all arguments to strings
let args = downcast_vec!(args, StringArray).collect::<Result<Vec<&StringArray>>>()?;
// do not accept 0 arguments.
if args.len() == 0 {
return Err(DataFusionError::Internal(
"Concatenate was called with 0 arguments. It requires at least one."
.to_string(),
));
}
let mut builder = StringBuilder::new(args.len());
// for each entry in the array
for index in 0..args[0].len() {
let mut owned_string: String = "".to_owned();
// if any is null, the result is null
let mut is_null = false;
for arg in &args {
if arg.is_null(index) {
is_null = true;
break; // short-circuit as we already know the result
} else {
owned_string.push_str(&arg.value(index));
}
}
if is_null {
builder.append_null()?;
} else {
builder.append_value(&owned_string)?;
}
}
Ok(builder.finish())
}<|fim▁end|> | // "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at |
<|file_name|>default.py<|end_file_name|><|fim▁begin|># Microsoft Azure Linux Agent
#
# Copyright 2018 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.6+ and Openssl 1.0+
#
import glob
import os.path
import re
import signal
import sys
import azurelinuxagent.common.conf as conf
import azurelinuxagent.common.utils.fileutil as fileutil
from azurelinuxagent.common import version
from azurelinuxagent.common.exception import ProtocolError
from azurelinuxagent.common.osutil import get_osutil
from azurelinuxagent.common.persist_firewall_rules import PersistFirewallRulesHandler
from azurelinuxagent.common.protocol.util import get_protocol_util
from azurelinuxagent.ga.exthandlers import HANDLER_COMPLETE_NAME_PATTERN
def read_input(message):
if sys.version_info[0] >= 3:
return input(message)
else:
# This is not defined in python3, and the linter will thus
# throw an undefined-variable<E0602> error on this line.
# Suppress it here.
return raw_input(message) # pylint: disable=E0602
class DeprovisionAction(object):
def __init__(self, func, args=None, kwargs=None):
if args is None:
args = []
if kwargs is None:
kwargs = {}
self.func = func
self.args = args
self.kwargs = kwargs
def invoke(self):
self.func(*self.args, **self.kwargs)
class DeprovisionHandler(object):
def __init__(self):
self.osutil = get_osutil()
self.protocol_util = get_protocol_util()
self.actions_running = False
signal.signal(signal.SIGINT, self.handle_interrupt_signal)
def del_root_password(self, warnings, actions):
warnings.append("WARNING! root password will be disabled. "
"You will not be able to login as root.")
actions.append(DeprovisionAction(self.osutil.del_root_password))
def del_user(self, warnings, actions):
try:
ovfenv = self.protocol_util.get_ovf_env()
except ProtocolError:
warnings.append("WARNING! ovf-env.xml is not found.")
warnings.append("WARNING! Skip delete user.")
return
username = ovfenv.username
warnings.append(("WARNING! {0} account and entire home directory "
"will be deleted.").format(username))<|fim▁hole|> [username]))
def regen_ssh_host_key(self, warnings, actions):
warnings.append("WARNING! All SSH host key pairs will be deleted.")
actions.append(DeprovisionAction(fileutil.rm_files,
[conf.get_ssh_key_glob()]))
def stop_agent_service(self, warnings, actions):
warnings.append("WARNING! The waagent service will be stopped.")
actions.append(DeprovisionAction(self.osutil.stop_agent_service))
def del_dirs(self, warnings, actions): # pylint: disable=W0613
dirs = [conf.get_lib_dir(), conf.get_ext_log_dir()]
actions.append(DeprovisionAction(fileutil.rm_dirs, dirs))
def del_files(self, warnings, actions): # pylint: disable=W0613
files = ['/root/.bash_history', conf.get_agent_log_file()]
actions.append(DeprovisionAction(fileutil.rm_files, files))
# For OpenBSD
actions.append(DeprovisionAction(fileutil.rm_files,
["/etc/random.seed",
"/var/db/host.random",
"/etc/isakmpd/local.pub",
"/etc/isakmpd/private/local.key",
"/etc/iked/private/local.key",
"/etc/iked/local.pub"]))
def del_resolv(self, warnings, actions):
warnings.append("WARNING! /etc/resolv.conf will be deleted.")
files_to_del = ["/etc/resolv.conf"]
actions.append(DeprovisionAction(fileutil.rm_files, files_to_del))
def del_dhcp_lease(self, warnings, actions):
warnings.append("WARNING! Cached DHCP leases will be deleted.")
dirs_to_del = ["/var/lib/dhclient", "/var/lib/dhcpcd", "/var/lib/dhcp"]
actions.append(DeprovisionAction(fileutil.rm_dirs, dirs_to_del))
# For FreeBSD and OpenBSD
actions.append(DeprovisionAction(fileutil.rm_files,
["/var/db/dhclient.leases.*"]))
# For FreeBSD, NM controlled
actions.append(DeprovisionAction(fileutil.rm_files,
["/var/lib/NetworkManager/dhclient-*.lease"]))
def del_ext_handler_files(self, warnings, actions): # pylint: disable=W0613
ext_dirs = [d for d in os.listdir(conf.get_lib_dir())
if os.path.isdir(os.path.join(conf.get_lib_dir(), d))
and re.match(HANDLER_COMPLETE_NAME_PATTERN, d) is not None
and not version.is_agent_path(d)]
for ext_dir in ext_dirs:
ext_base = os.path.join(conf.get_lib_dir(), ext_dir)
files = glob.glob(os.path.join(ext_base, 'status', '*.status'))
files += glob.glob(os.path.join(ext_base, 'config', '*.settings'))
files += glob.glob(os.path.join(ext_base, 'config', 'HandlerStatus'))
files += glob.glob(os.path.join(ext_base, 'mrseq'))
if len(files) > 0:
actions.append(DeprovisionAction(fileutil.rm_files, files))
def del_lib_dir_files(self, warnings, actions): # pylint: disable=W0613
known_files = [
'HostingEnvironmentConfig.xml',
'Incarnation',
'partition',
'Protocol',
'SharedConfig.xml',
'WireServerEndpoint'
]
known_files_glob = [
'Extensions.*.xml',
'ExtensionsConfig.*.xml',
'GoalState.*.xml'
]
lib_dir = conf.get_lib_dir()
files = [f for f in \
[os.path.join(lib_dir, kf) for kf in known_files] \
if os.path.isfile(f)]
for p in known_files_glob:
files += glob.glob(os.path.join(lib_dir, p))
if len(files) > 0:
actions.append(DeprovisionAction(fileutil.rm_files, files))
def reset_hostname(self, warnings, actions): # pylint: disable=W0613
localhost = ["localhost.localdomain"]
actions.append(DeprovisionAction(self.osutil.set_hostname,
localhost))
actions.append(DeprovisionAction(self.osutil.set_dhcp_hostname,
localhost))
def setup(self, deluser):
warnings = []
actions = []
self.stop_agent_service(warnings, actions)
if conf.get_regenerate_ssh_host_key():
self.regen_ssh_host_key(warnings, actions)
self.del_dhcp_lease(warnings, actions)
self.reset_hostname(warnings, actions)
if conf.get_delete_root_password():
self.del_root_password(warnings, actions)
self.del_dirs(warnings, actions)
self.del_files(warnings, actions)
self.del_resolv(warnings, actions)
if deluser:
self.del_user(warnings, actions)
self.del_persist_firewall_rules(actions)
return warnings, actions
def setup_changed_unique_id(self):
warnings = []
actions = []
self.del_dhcp_lease(warnings, actions)
self.del_lib_dir_files(warnings, actions)
self.del_ext_handler_files(warnings, actions)
self.del_persist_firewall_rules(actions)
return warnings, actions
def run(self, force=False, deluser=False):
warnings, actions = self.setup(deluser)
self.do_warnings(warnings)
if self.do_confirmation(force=force):
self.do_actions(actions)
def run_changed_unique_id(self):
'''
Clean-up files and directories that may interfere when the VM unique
identifier has changed.
While users *should* manually deprovision a VM, the files removed by
this routine will help keep the agent from getting confused
(since incarnation and extension settings, among other items, will
no longer be monotonically increasing).
'''
warnings, actions = self.setup_changed_unique_id()
self.do_warnings(warnings)
self.do_actions(actions)
def do_actions(self, actions):
self.actions_running = True
for action in actions:
action.invoke()
self.actions_running = False
def do_confirmation(self, force=False):
if force:
return True
confirm = read_input("Do you want to proceed (y/n)")
return True if confirm.lower().startswith('y') else False
def do_warnings(self, warnings):
for warning in warnings:
print(warning)
def handle_interrupt_signal(self, signum, frame): # pylint: disable=W0613
if not self.actions_running:
print("Deprovision is interrupted.")
sys.exit(0)
print ('Deprovisioning may not be interrupted.')
return
@staticmethod
def del_persist_firewall_rules(actions):
agent_network_service_path = PersistFirewallRulesHandler.get_service_file_path()
actions.append(DeprovisionAction(fileutil.rm_files,
[agent_network_service_path, os.path.join(conf.get_lib_dir(),
PersistFirewallRulesHandler.BINARY_FILE_NAME)]))<|fim▁end|> | actions.append(DeprovisionAction(self.osutil.del_account, |
<|file_name|>persistable.rs<|end_file_name|><|fim▁begin|>use std::marker::PhantomData;
use expression::Expression;
use query_builder::{QueryBuilder, BuildQueryResult};
use query_source::{Table, Column};
use types::NativeSqlType;
/// Represents that a structure can be used to to insert a new row into the database.
/// Implementations can be automatically generated by
/// [`#[insertable_into]`](https://github.com/sgrif/diesel/tree/master/diesel_codegen#insertable_intotable_name).
/// This is automatically implemented for `&[T]`, `Vec<T>` and `&Vec<T>` for inserting more than
/// one record.
pub trait Insertable<T: Table> {
type Columns: InsertableColumns<T>;
type Values: Expression<SqlType=<Self::Columns as InsertableColumns<T>>::SqlType>;
fn columns() -> Self::Columns;
fn values(self) -> Self::Values;
}
pub trait InsertableColumns<T: Table> {
type SqlType: NativeSqlType;
fn names(&self) -> String;
}
impl<'a, T, U> Insertable<T> for &'a [U] where
T: Table,
&'a U: Insertable<T>,
{
type Columns = <&'a U as Insertable<T>>::Columns;
type Values = InsertValues<'a, T, U>;
fn columns() -> Self::Columns {
<&'a U>::columns()
}
fn values(self) -> Self::Values {
InsertValues {
values: self,
_marker: PhantomData,
}
}
}
impl<'a, T, U> Insertable<T> for &'a Vec<U> where
T: Table,
&'a U: Insertable<T>,
{
type Columns = <&'a U as Insertable<T>>::Columns;
type Values = InsertValues<'a, T, U>;
fn columns() -> Self::Columns {
<&'a U>::columns()
}
fn values(self) -> Self::Values {
InsertValues {
values: &*self,
_marker: PhantomData,
}
}
}
pub struct InsertValues<'a, T, U: 'a> {<|fim▁hole|> values: &'a [U],
_marker: PhantomData<T>,
}
impl<'a, T, U> Expression for InsertValues<'a, T, U> where
T: Table,
&'a U: Insertable<T>,
{
type SqlType = <<&'a U as Insertable<T>>::Columns as InsertableColumns<T>>::SqlType;
fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult {
self.to_insert_sql(out)
}
fn to_insert_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult {
for (i, record) in self.values.into_iter().enumerate() {
if i != 0 {
out.push_sql(", ");
}
try!(record.values().to_insert_sql(out));
}
Ok(())
}
}
impl<C: Column<Table=T>, T: Table> InsertableColumns<T> for C {
type SqlType = <Self as Expression>::SqlType;
fn names(&self) -> String {
Self::name().to_string()
}
}<|fim▁end|> | |
<|file_name|>helper.go<|end_file_name|><|fim▁begin|>package mediaupload
import (
"errors"
"fmt"
"os"
log "github.com/Sirupsen/logrus"
"github.com/rafael84/go-spa/backend/cfg"
"github.com/rafael84/go-spa/backend/random"
"github.com/rafael84/go-spa/backend/storage/location"
"github.com/rafael84/go-spa/backend/storage/mediatype"
)
func MoveFile(location *location.Model, mediatype *mediatype.Model, srcPath string) (string, error) {
// create directories if necessary
dir := fmt.Sprintf("%s/%s/%s", cfg.Media.Root, location.StaticPath, mediatype.Name)<|fim▁hole|> err := os.MkdirAll(dir, 0755)
if err != nil {
log.Errorf("Unable to create directory: %s", err)
return "", errors.New("Could not process uploaded file")
}
// generate filename randomly
filename, err := random.New(16)
if err != nil {
log.Errorf("Unable to generate filename: %s", err)
return "", errors.New("Could not process uploaded file")
}
dstPath := fmt.Sprintf("%s/%s", dir, filename)
// move file to its destination
err = os.Rename(srcPath, dstPath)
if err != nil {
log.Errorf("Could not move file %s", err)
return "", errors.New("Could not process uploaded file")
}
return dstPath, nil
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).<|fim▁hole|><|fim▁end|> | from . import account_invoice
from . import account_invoice_refund |
<|file_name|>_job.py<|end_file_name|><|fim▁begin|># -*- coding: ascii -*-
r"""
:Copyright:
Copyright 2014 - 2016
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
======
Jobs
======
Jobs have been entered into the scheduler once. They may be even finished
already.
"""
if __doc__: # pragma: no cover
# pylint: disable = redefined-builtin
__doc__ = __doc__.encode('ascii').decode('unicode_escape')
__author__ = r"Andr\xe9 Malo".encode('ascii').decode('unicode_escape')
__docformat__ = "restructuredtext en"
import collections as _collections
import itertools as _it
from .. import _graph
from .. import interfaces as _interfaces
from .. import _lock
#: Exception raised on cycles, when a todo DAG is resolved
DependencyCycle = _graph.DependencyCycle
#: Job ID sequence
#:
#: :Type: callable
_gen_id = _it.count(1).next
def last_job_id():
"""
Determine the largest job ID assigned until now
:Return: The ID. It's ``0``, if no job ID was assigned until now (job IDs
start with ``1``)
:Rtype: ``id``
"""
# this inspects the counter iterable by calling pickling methods and
# retrieving the next value from there and then subtracting one.
# __reduce__ returns the factory ('count') and the argument tuple
# containing the initial value (advanced with each call to next())
# pylint: disable = no-member
return _gen_id.__self__.__reduce__()[1][0] - 1
class Job(object):
"""
Job after is been scheduled.
:See: `JobInterface`
"""
__implements__ = [_interfaces.JobInterface]
def __init__(self, job_id, desc, group, locks, importance, not_before,
extra, predecessors, attempts):
"""
Initialization
:Parameters:
`job_id` : ``int``
Job ID
`desc` : `TodoDescription`
Job description
`group` : ``str``
Job Group
`locks` : iterable
List of locks that need to be aquired (``(`LockInterface`, ...)``)
`importance` : ``int``
Job importance
`not_before` : various
execute job not before this time. Special formats are allowed:
``int``
Number of seconds from now (delay)
``datetime.datetime``
a specific point in time (server time). Use UTC if you can. For
naive date times, UTC is assumed.
If omitted or ``None``, ``0`` is assumed.
`extra` : ``dict``
Extra job data
`predecessors` : iterable
List of jobs to be run successfully before this one
(``(int, ...)``)
`attempts` : ``list``
execution attempts (``[ExecutionAttemptInterface, ...]``)
"""
self.id = job_id
self.desc = desc
self.group = group
self.locks = _lock.validate(locks)
self.locks_waiting = None
self.importance = importance
self.extra = extra
self.predecessors = set()
self.predecessors_waiting = None<|fim▁hole|> for item in predecessors or ():
self.depend_on(item)
def depend_on(self, job_id):
"""
Add predecessor job ID
Duplicates are silently ignored.
:See: `interfaces.JobInterface.depend_on`
"""
assert self.predecessors_waiting is None
try:
job_id = int(job_id)
except TypeError:
raise ValueError("Invalid job_id: %r" % (job_id,))
if job_id < 1 or job_id >= self.id:
raise ValueError("Invalid job_id: %r" % (job_id,))
self.predecessors.add(job_id)
def job_from_todo(todo):
"""
Construct Job from Todo
:Parameters:
`todo` : `Todo`
Todo to construct from
:Return: New job instance
:Rtype: `JobInterface`
"""
return Job(
_gen_id(), todo.desc, todo.group, todo.locks, todo.importance,
todo.not_before, {}, set(), []
)
def joblist_from_todo(todo):
"""
Construct a list of jobs from Todo graph
:Parameters:
`todo` : `Todo`
todo to be inspected.
:Return: List of jobs (``[JobInterface, ...]``)
:Rtype: ``list``
"""
jobs, todos, virtuals = [], {}, {}
toinspect = _collections.deque([(todo, None)])
graph = _graph.DependencyGraph()
# 1) fill the dependency graph with the todo nodes (detects cycles, too)
try:
while toinspect:
todo, parent = toinspect.pop()
todo_id = id(todo)
if todo_id in todos:
virtual_id, pre, _ = todos[todo_id]
else:
pre = []
virtual_id = len(virtuals)
todos[todo_id] = virtual_id, pre, todo
virtuals[virtual_id] = todo_id
for parent_id in todo.predecessors():
graph.add((False, parent_id), (True, virtual_id))
pre.append((False, parent_id))
for succ in todo.successors():
toinspect.appendleft((succ, (True, virtual_id)))
if parent is not None:
graph.add(parent, (True, virtual_id))
pre.append(parent)
else:
graph.add((False, None), (True, virtual_id))
except DependencyCycle as e:
# remap to our input (todos and not some weird virtual IDs)
raise DependencyCycle([
todos[virtuals[tup[1]]][2] for tup in e.args[0]
])
# 2) resolve the graph (create topological order)
id_mapping = {}
for is_virtual, virtual_id in graph.resolve():
if is_virtual:
_, pres, todo = todos[virtuals[virtual_id]]
job = job_from_todo(todo)
for is_virtual, pre in pres:
if is_virtual:
pre = id_mapping[pre]
job.depend_on(pre)
id_mapping[virtual_id] = job.id
jobs.append(job)
return jobs<|fim▁end|> | self.attempts = attempts
self.not_before = not_before |
<|file_name|>kernel_density_estimation.py<|end_file_name|><|fim▁begin|>#<|fim▁hole|># SPDX-License-Identifier: BSD-2-Clause
#
from numba import njit, prange
import numpy as np
import argparse
import time
def kde(X):
b = 0.5
points = np.array([-1.0, 2.0, 5.0])
N = points.shape[0]
n = X.shape[0]
exps = 0
# "prange" in a normal function is identical to "range"
for i in prange(n):
p = X[i]
d = (-(p-points)**2)/(2*b**2)
m = np.min(d)
exps += m-np.log(b*N)+np.log(np.sum(np.exp(d-m)))
return exps
def main():
parser = argparse.ArgumentParser(description='Kernel-Density')
parser.add_argument('--size', dest='size', type=int, default=10000000)
parser.add_argument('--iterations', dest='iterations', type=int, default=20)
args = parser.parse_args()
size = args.size
iterations = args.iterations
np.random.seed(0)
kde(np.random.ranf(10))
print("size:", size)
X = np.random.ranf(size)
t1 = time.time()
for _ in range(iterations):
res = kde(X)
t = time.time()-t1
print("checksum:", res)
print("SELFTIMED:", t)
if __name__ == '__main__':
main()<|fim▁end|> | # Copyright (c) 2017 Intel Corporation |
<|file_name|>sendfn-spawn-with-fn-arg.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//<|fim▁hole|>// except according to those terms.
use std::cell::Cell;
use std::task;
pub fn main() { test05(); }
fn test05_start(f: ~fn(int)) {
f(22);
}
fn test05() {
let three = ~3;
let fn_to_send: ~fn(int) = |n| {
error2!("{}", *three + n); // will copy x into the closure
assert_eq!(*three, 3);
};
let fn_to_send = Cell::new(fn_to_send);
task::spawn(|| {
test05_start(fn_to_send.take());
});
}<|fim▁end|> | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed |
<|file_name|>obj_rod.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import numpy as np
import mirheo as mir
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--axes', type=float, nargs=3)
parser.add_argument('--coords', type=str)
parser.add_argument('--vis', action='store_true', default=False)
parser.add_argument('--drag', type=float, default=0.0)
args = parser.parse_args()
ranks = (1, 1, 1)
domain = [16, 16, 16]
dt = 1e-3
t_end = 10.0
t_dump_every = 1.0
L = 14.0
num_segments = 10
mass = 1.0
u = mir.Mirheo(ranks, tuple(domain), debug_level=3, log_filename='log', no_splash=True)
# rod
com_q_rod = [[ 0.5 * domain[0],
0.5 * domain[1],
0.5 * domain[2] - L/2,
1.0, 0.0, 0.0, 0.0]]
def center_line(s): return (0, 0, (0.5-s) * L)
def torsion(s): return 0.0
def length(a, b):
return np.sqrt(
(a[0] - b[0])**2 +
(a[1] - b[1])**2 +
(a[2] - b[2])**2)
h = 1.0 / num_segments
l0 = length(center_line(h), center_line(0))
a0 = l0/2
pv_rod = mir.ParticleVectors.RodVector('rod', mass, num_segments)
ic_rod = mir.InitialConditions.Rod(com_q_rod, center_line, torsion, a0)
# ellipsoid
axes = tuple(args.axes)
com_q_ell = [[0.5 * domain[0],
0.5 * domain[1],
0.5 * domain[2] + axes[2],
1., 0, 0, 0]]
coords = np.loadtxt(args.coords).tolist()
if args.vis:
import trimesh
ell = trimesh.creation.icosphere(subdivisions=2, radius = 1.0)
for i in range(3):
ell.vertices[:,i] *= axes[i]
mesh = mir.ParticleVectors.Mesh(ell.vertices.tolist(), ell.faces.tolist())
pv_ell = mir.ParticleVectors.RigidEllipsoidVector('ellipsoid', mass, object_size=len(coords), semi_axes=axes, mesh=mesh)
else:
pv_ell = mir.ParticleVectors.RigidEllipsoidVector('ellipsoid', mass, object_size=len(coords), semi_axes=axes)
ic_ell = mir.InitialConditions.Rigid(com_q_ell, coords)<|fim▁hole|>
u.registerParticleVector(pv_ell, ic_ell)
u.registerParticleVector(pv_rod, ic_rod)
u.registerIntegrator(vv_ell)
u.setIntegrator(vv_ell, pv_ell)
# interactions
prms = {
"a0" : a0,
"l0" : l0,
"k_s_center" : 100.0,
"k_s_frame" : 100.0,
"k_bending" : (10.0, 0.0, 10.0),
"k_twist" : 10.0,
"tau0" : 0,
"kappa0" : (0., 0.)
}
int_rod = mir.Interactions.RodForces("rod_forces", **prms);
u.registerInteraction(int_rod)
u.setInteraction(int_rod, pv_rod, pv_rod)
anchor=(0.0, 0.0, -axes[2])
torque = 0.1
k_bound = 100.0
int_bind = mir.Interactions.ObjRodBinding("binding", torque, anchor, k_bound);
u.registerInteraction(int_bind)
u.setInteraction(int_bind, pv_ell, pv_rod)
vv_rod = mir.Integrators.VelocityVerlet('vv_rod')
u.registerIntegrator(vv_rod)
u.setIntegrator(vv_rod, pv_rod)
if args.drag > 0.0:
u.registerPlugins(mir.Plugins.createParticleDrag('rod_drag', pv_rod, args.drag))
if args.vis:
dump_every = int (t_dump_every/dt)
u.registerPlugins(mir.Plugins.createDumpParticles('rod_dump', pv_rod, dump_every, [], 'h5/rod_particles-'))
u.registerPlugins(mir.Plugins.createDumpMesh("mesh_dump", pv_ell, dump_every, path="ply/"))
u.run(int(t_end / dt), dt=dt)
if pv_rod is not None:
pos_rod = pv_rod.getCoordinates()
pos_ell = pv_ell.getCoordinates()
np.savetxt("pos.txt", np.vstack((pos_rod, pos_ell)))
del u
# nTEST: bindings.obj_rod.one
# cd bindings
# rm -rf h5 pos*txt
# f="pos.txt"
# rho=8.0; ax=2.0; ay=1.0; az=1.0
# cp ../../data/ellipsoid_coords_${rho}_${ax}_${ay}_${az}.txt $f
# mir.run --runargs "-n 2" ./obj_rod.py --axes $ax $ay $az --coords $f --vis
# cat pos.txt > pos.out.txt<|fim▁end|> | vv_ell = mir.Integrators.RigidVelocityVerlet("vv_ell") |
<|file_name|>reverse_xxd_dump_from_cc.py<|end_file_name|><|fim▁begin|># Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Reverses xxd dump from to binary file
This script is used to convert models from C++ source file (dumped with xxd) to
the binary model weight file and analyze it with model visualizer like Netron
(https://github.com/lutzroeder/netron) or load the model in TensorFlow Python
API
to evaluate the results in Python.
The command to dump binary file to C++ source file looks like
xxd -i model_data.tflite > model_data.cc<|fim▁hole|>
Example usage:
python reverse_xxd_dump_from_cc.py \
--input_cc_file=model_data.cc \
--output_tflite_file=model_data.tflite
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from tensorflow.lite.tools import flatbuffer_utils
from tensorflow.python.platform import app
def main(_):
"""Application run loop."""
parser = argparse.ArgumentParser(
description='Reverses xxd dump from to binary file')
parser.add_argument(
'--input_cc_file',
type=str,
required=True,
help='Full path name to the input cc file.')
parser.add_argument(
'--output_tflite_file',
type=str,
required=True,
help='Full path name to the stripped output tflite file.')
args = parser.parse_args()
# Read the model from xxd output C++ source file
model = flatbuffer_utils.xxd_output_to_object(args.input_cc_file)
# Write the model
flatbuffer_utils.write_model(model, args.output_tflite_file)
if __name__ == '__main__':
app.run(main=main, argv=sys.argv[:1])<|fim▁end|> | |
<|file_name|>mp.go<|end_file_name|><|fim▁begin|>package mp
type MP struct {
*Server
*Client
CorpClient *Client
}
func New(id, appID, appSecret, token, aesKey string, urlPrefix ...string) *MP {
client := NewClient(appID, appSecret, true)
server := NewServer(token, aesKey, urlPrefix...)
server.SetClient(client)
server.SetID(id)
server.SetAppID(appID)
return &MP{
Server: server,
Client: client,<|fim▁hole|> }
}<|fim▁end|> | |
<|file_name|>benchmark.py<|end_file_name|><|fim▁begin|>"""
Benchmarking and performance tests.
"""
import pytest
from pluggy import (_multicall, _legacymulticall, HookImpl, HookspecMarker,
HookimplMarker)
hookspec = HookspecMarker("example")
hookimpl = HookimplMarker("example")
def MC(methods, kwargs, callertype, firstresult=False):
hookfuncs = []
for method in methods:
f = HookImpl(None, "<temp>", method, method.example_impl)
hookfuncs.append(f)
return callertype(hookfuncs, kwargs, {"firstresult": firstresult})
@hookimpl
def hook(arg1, arg2, arg3):
return arg1, arg2, arg3
@hookimpl(hookwrapper=True)
def wrapper(arg1, arg2, arg3):
yield
@pytest.fixture(
params=[10, 100],
ids="hooks={}".format,
)
def hooks(request):
return [hook for i in range(request.param)]
@pytest.fixture(
params=[10, 100],
ids="wrappers={}".format,
)
def wrappers(request):<|fim▁hole|>
@pytest.fixture(
params=[_multicall, _legacymulticall],
ids=lambda item: item.__name__
)
def callertype(request):
return request.param
def inner_exec(methods, callertype):
return MC(methods, {'arg1': 1, 'arg2': 2, 'arg3': 3}, callertype)
def test_hook_and_wrappers_speed(benchmark, hooks, wrappers, callertype):
benchmark(inner_exec, hooks + wrappers, callertype)<|fim▁end|> | return [wrapper for i in range(request.param)]
|
<|file_name|>SuperTypeWildcards.java<|end_file_name|><|fim▁begin|>package generics;
//: generics/SuperTypeWildcards.java
import java.util.*;
public class SuperTypeWildcards {<|fim▁hole|> }
} ///:~<|fim▁end|> | static void writeTo(List<? super Apple> apples) {
apples.add(new Apple());
apples.add(new Jonathan());
// apples.add(new Fruit()); // Error |
<|file_name|>map.js<|end_file_name|><|fim▁begin|>'use strict';
function NavalMap(canvasId, imageMapUrl, imageCompassUrl, config) {
this.canvas = document.getElementById(canvasId);
this.imageMap = new Image();
this.imageCompass = new Image();
this.config = config;
this.itemsLoaded = false;
this.nationsLoaded = false;
this.shopsLoaded = false;
this.portsLoaded = false;
this.imageMapLoaded = false;
this.imageCompassLoaded = false;
this.init(imageMapUrl, imageCompassUrl);
}
NavalMap.prototype.init = function init(imageMapUrl, imageCompassUrl) {
var self = this;
this.loadEverything(imageMapUrl, imageCompassUrl, function () {
var stage = new createjs.Stage(self.canvas);
createjs.Touch.enable(stage);
stage.enableMouseOver(5);
stage.tickEnabled = false;
//createjs.Ticker.framerate = 60;
createjs.Ticker.timingMode = createjs.Ticker.RAF;
self.map = new Map(self.canvas, stage, self.imageMap, self.imageCompass, self.config);
});
};
NavalMap.prototype.loadImageMap = function loadImageMap(url, cb) {
this.imageMap.src = url;
var self = this;
this.imageMap.onload = function () {
self.imageMapLoaded = true;
if (self.checkEverethingIsLoaded()) {
if(cb) {
cb();
}
}
};
};
NavalMap.prototype.loadImageCompass = function loadImageCompass(url, cb) {
this.imageCompass.src = url;
var self = this;
this.imageCompass.onload = function () {
self.imageCompassLoaded = true;
if (self.checkEverethingIsLoaded()) {
if(cb) {
cb();
}
}
};
};
NavalMap.prototype.checkEverethingIsLoaded = function () {
return this.itemsLoaded && this.nationsLoaded && this.shopsLoaded && this.portsLoaded && this.imageMapLoaded && this.imageCompassLoaded;
};
NavalMap.prototype.loadItems = function(cb) {
var self = this;
$.getScript("items.php").done(function(){
self.itemsLoaded = true;
if (self.checkEverethingIsLoaded()) {
if(cb) {
cb();
}
}
});
};
NavalMap.prototype.loadNations = function(cb) {
var self = this;
$.getScript("nations.php").done(function(){
self.nationsLoaded = true;
if (self.checkEverethingIsLoaded()) {
if(cb) {
cb();
}
}
});
};
NavalMap.prototype.loadShops = function(cb) {
var self = this;
$.getScript("shops.php").done(function(){
self.shopsLoaded = true;
if (self.checkEverethingIsLoaded()) {
if(cb) {
cb();
}
}
});
};
NavalMap.prototype.loadPorts = function(cb) {
var self = this;
$.getScript("ports.php").done(function(){
self.portsLoaded = true;
if (self.checkEverethingIsLoaded()) {
if(cb) {
cb();
}
}
});
};
NavalMap.prototype.loadEverything = function loadEverything(urlMap, urlCompass, cb) {
this.loadImageMap(urlMap, cb);
this.loadImageCompass(urlCompass, cb);
this.loadShops(cb);
this.loadItems(cb);
this.loadPorts(cb);
this.loadNations(cb);
};
function Map(canvas, stage, imageMap, imageCompass, config) {
this.canvas = canvas;
this.config = config;
this.stage = stage;
this.globalContainer = new createjs.Container();
this.mapContainer = new createjs.Container();
this.unmodifiedMapContainer = {};
this.compass = new Compass(imageCompass, config);
this.update = false;
this.alreadyZooming = false;
this.gpsCursor = undefined;
this.statistics = {};
this.fpsLabel = new createjs.Text("-- fps", "bold 18px Arial", "black");
this.init(imageMap);
}
Map.prototype.init = function (imageMap) {
this.stage.addChild(this.globalContainer);
this.stage.addChild(this.fpsLabel);
this.fpsLabel.x = 240;<|fim▁hole|> this.globalContainer.addChild(this.mapContainer);
this.globalContainer.addChild(this.compass);
this.mapContainer.addChild(new createjs.Bitmap(imageMap));
this.mapContainer.hasBeenDblClicked = false;
this.initContainerMap();
this.resizeCanvas(this);
this.createAllEvents();
var self = this;
Nations.Nations.forEach(function(nation) {
self.statistics[nation.Name] = 0;
});
this.addPorts();
this.stage.update();
self.tickEvent();
setTimeout(function() {
$("#progress-bar-load").hide();
$(".top-nav").removeClass('hide');
$("#port-information").removeClass('hide');
$("#how-to-use").removeClass('hide');
},600);
//this.update = true;
};
Map.prototype.initContainerMap = function () {
this.setScale(this.config.map.scale);
this.centerTo(this.config.map.x, this.config.map.y);
var self = this;
this.mapContainer.addLine = function (x, y) {
var shape = new createjs.Shape();
self.mapContainer.lineIndex = self.mapContainer.children.length;
self.mapContainer.addChild(shape);
shape.graphics.setStrokeStyle(3, "round").beginStroke('#3d3d3d').moveTo((self.compass.x - self.mapContainer.x) / self.mapContainer.scale, (self.compass.y - self.mapContainer.y) / self.mapContainer.scale).lineTo(x, y);
};
this.mapContainer.removeLine = function () {
if (self.mapContainer.lineIndex) {
self.mapContainer.removeChildAt(self.mapContainer.lineIndex);
}
};
//this.globalContainer.cursor = "default";
};
Map.prototype.populateStatistics = function () {
var stats = $("#ports-number");
$.each(this.statistics, function(name, number) {
stats.append('<strong>' + name + ' : </strong>' + number + '<br>');
})
};
Map.prototype.setScale = function (scale) {
this.mapContainer.scale = this.mapContainer.scaleX = this.mapContainer.scaleY = scale;
};
Map.prototype.zoom = function (increment) {
this.setScale(this.mapContainer.scale + increment);
};
Map.prototype.addPorts = function () {
var self = this;
setTimeout(function() {
Ports.forEach(function (port, idx) {
var circle = new createjs.Shape();
circle.graphics.beginFill(self.config.color[port.Nation]).drawCircle(0, 0, 5);
circle.x = (port.sourcePosition.x + self.config.portsOffset.x) * self.config.portsOffset.ratio;
circle.y = (port.sourcePosition.y + self.config.portsOffset.y) * self.config.portsOffset.ratio;
circle.cursor = "pointer";
circle.idx = idx;
self.statistics[getNationFromIdx(port.Nation).Name] += 1;
circle.on("click", function () {
var currPort = Ports[this.idx];
$('#port-title').text(currPort.Name);
$('#nation').text(getNationFromIdx(currPort.Nation).Name);
var timer = currPort.ConquestFlagTimeSlot + 'h - ' + (currPort.ConquestFlagTimeSlot + 2) + "h";
$('#timer').text(currPort.ConquestFlagTimeSlot == -1?'No Timer':timer);
$('#capital').text(currPort.Capital?'yes':'no');
$('#regional').text(currPort.Regional?'yes':'no');
$('#shallow').text(currPort.Depth == 1?'yes':'no');
$('#capturer').text(currPort.Capturer);
var produces = Shops[this.idx].ResourcesProduced;
var consumes = Shops[this.idx].ResourcesConsumed;
$('#produces-list').html('');
$('#consumes-list').html('');
produces.forEach(function (produce) {
var item = getItemTemplateFromId(produce.Key);
$('#produces-list').append('<li class="list-group-item">'+item.Name+' : '+ produce.Value+'</li>');
});
consumes.forEach(function (consume) {
var item = getItemTemplateFromId(consume.Key);
$('#consumes-list').append('<li class="list-group-item">'+item.Name+' : '+ consume.Value+'</li>');
});
});
circle.cache(-5, -5, 10, 10);
self.mapContainer.addChild(circle);
});
self.update = true;
self.stage.tick();
self.populateStatistics();
},200);
};
Map.prototype.keepMapUnderPos = function (x, y) {
var mapPos = this.getMapPosFromWindowPos(x, y);
this.globalContainer.x = x - this.mapContainer.scale * mapPos.x;
this.globalContainer.y = y - this.mapContainer.scale * mapPos.y;
};
Map.prototype.keepCompassUnderCurrentPos = function () {
var mapPos = this.getMapPosFromWindowPos(this.compass.x + this.unmodifiedMapContainer.x, this.compass.y + this.unmodifiedMapContainer.y);
this.compass.x = mapPos.x * this.mapContainer.scale;
this.compass.y = mapPos.y * this.mapContainer.scale;
};
Map.prototype.centerTo = function (x, y) {
this.globalContainer.x = this.canvas.width / 2 - this.mapContainer.scale * x;
this.globalContainer.y = this.canvas.height / 2 - this.mapContainer.scale * y;
};
Map.prototype.getNewWindowPosFromMapPos = function (x, y) {
return {
x: x * this.mapContainer.scale + this.mapContainer.x - this.globalContainer.x,
y: y * this.mapContainer.scale + this.mapContainer.y - this.globalContainer.y
}
};
Map.prototype.getMapPosFromGpsPos = function(x , y) {
return {
x: Math.round(x * this.config.gps.ratio + this.config.gps.x),
y: Math.round(-(y * this.config.gps.ratio - this.config.gps.y))
}
};
Map.prototype.getMapPosFromWindowPos = function (x, y) {
return {
x: (x - this.unmodifiedMapContainer.x) / this.unmodifiedMapContainer.scale,
y: (y - this.unmodifiedMapContainer.y) / this.unmodifiedMapContainer.scale
};
};
Map.prototype.gps = function (x, y) {
if (this.gpsCursor) {
this.mapContainer.removeChild(this.gpsCursor);
}
this.gpsCursor = new createjs.Shape();
this.gpsCursor.graphics.setStrokeStyle(2).beginStroke("OrangeRed").drawCircle(0,0,30);
var mapPos = this.getMapPosFromGpsPos(x, y);
this.gpsCursor.x = mapPos.x + (Math.random() > 0.5 ? Math.floor((Math.random() * 10 * 13 / 10)) : - Math.floor((Math.random() * 10 * 13 / 10)));
this.gpsCursor.y = mapPos.y + (Math.random() > 0.5 ? Math.floor((Math.random() * 10 * 13 / 10)) : - Math.floor((Math.random() * 10 * 13 / 10)));
this.mapContainer.addChild(this.gpsCursor);
this.centerTo(mapPos.x, mapPos.y);
this.update = true;
};
Map.prototype.gpsSubmitEvent = function () {
var self = this;
$("#gpsForm").submit(function (event) {
event.preventDefault();
self.gps($('#xGps').val(), $('#yGps').val());
});
};
Map.prototype.createAllEvents = function () {
this.resizeCanvasEvent();
this.gpsSubmitEvent();
this.mouseDownEvent();
this.clickEvent();
this.pressMoveEvent();
//this.pressUpEvent();
this.dblClickEvent();
this.mouseWheelEvent();
};
Map.prototype.dblClickEvent = function () {
var self = this;
this.globalContainer.on("dblclick", function (evt) {
if (this.hasBeenDblClicked) {
self.mapContainer.addLine((evt.stageX - self.globalContainer.x) / self.mapContainer.scale, (evt.stageY - self.globalContainer.y) / self.mapContainer.scale);
this.hasBeenDblClicked = false;
} else {
self.mapContainer.removeLine();
self.compass.x = (evt.stageX - self.globalContainer.x);
self.compass.y = (evt.stageY - self.globalContainer.y);
this.hasBeenDblClicked = true;
}
self.update = true;
});
};
Map.prototype.clickEvent = function () {
var self = this;
this.globalContainer.on("click", function (evt) {
var mapPos = self.getMapPosFromWindowPos(evt.stageX, evt.stageY);
var gpsPos = {
x: Math.round((mapPos.x - self.config.gps.x) / self.config.gps.ratio),
y: Math.round(-(mapPos.y - self.config.gps.y) / self.config.gps.ratio)
};
$('#cursorX').text(gpsPos.x);
$('#cursorY').text(gpsPos.y);
});
};
Map.prototype.mouseDownEvent = function () {
this.globalContainer.on("mousedown", function (evt) {
this.offset = {x: this.x - evt.stageX, y: this.y - evt.stageY};
//this.cursor = "move";
});
};
Map.prototype.pressMoveEvent = function () {
var self = this;
this.globalContainer.on("pressmove", function (evt) {
this.x = evt.stageX + this.offset.x;
this.y = evt.stageY + this.offset.y;
//this.cursor = "move";
self.update = true;
});
};
Map.prototype.pressUpEvent = function () {
var self = this;
this.globalContainer.on("pressup", function (evt) {
this.cursor = "default";
//self.update = true;
});
};
Map.prototype.mouseWheelEvent = function () {
var self = this;
$('#canvas').mousewheel(function (event) {
if (!self.alreadyZooming) {
self.alreadyZooming = true;
setTimeout(function () {
self.alreadyZooming = false;
}, 45);
if (event.deltaY == 1) {
if (self.mapContainer.scale < 1.8) {
self.zoom(0.1);
self.keepMapUnderPos(event.pageX, event.pageY);
self.keepCompassUnderCurrentPos();
}
} else if (event.deltaY == -1) {
if (self.mapContainer.scale > 0.4) {
self.zoom(-0.1);
self.keepMapUnderPos(event.pageX, event.pageY);
self.keepCompassUnderCurrentPos();
}
}
self.update = true;
}
});
};
Map.prototype.resizeCanvasEvent = function () {
var self = this;
window.addEventListener('resize', function(){self.resizeCanvas(self)}, false);
};
Map.prototype.resizeCanvas = function (self) {
self.canvas.width = window.innerWidth;
self.canvas.height = window.innerHeight;
self.update = true;
};
Map.prototype.tickEvent = function () {
var self = this;
createjs.Ticker.addEventListener("tick", function (event) {
self.fpsLabel.text = Math.round(createjs.Ticker.getMeasuredFPS()) + " fps";
if (self.update) {
self.copyMapContainer();
self.update = false; // only update once
self.stage.update(event);
}
});
};
Map.prototype.copyMapContainer = function () {
this.unmodifiedMapContainer = {
x: this.globalContainer.x,
y: this.globalContainer.y,
scale: this.mapContainer.scale
}
};
function Compass(imageCompass, config) {
this.addChild(new createjs.Bitmap(imageCompass).setTransform(-imageCompass.width / 2, -imageCompass.height / 2));
this.setScale(config.compass.scale);
this.x = config.compass.x;
this.y = config.compass.y;
}
Compass.prototype = new createjs.Container();
Compass.prototype.constructor = Compass;
Compass.prototype.setScale = function (scale) {
this.scale = this.scaleX = this.scaleY = scale;
};<|fim▁end|> | this.fpsLabel.y = 10; |
<|file_name|>clientset_generated.go<|end_file_name|><|fim▁begin|>/*
* Copyright 2018-2019, EnMasse authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
// Code generated by client-gen. DO NOT EDIT.
package fake
import (
clientset "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned"
adminv1beta1 "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned/typed/admin/v1beta1"
fakeadminv1beta1 "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned/typed/admin/v1beta1/fake"
adminv1beta2 "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned/typed/admin/v1beta2"
fakeadminv1beta2 "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned/typed/admin/v1beta2/fake"
enmassev1beta1 "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned/typed/enmasse/v1beta1"
fakeenmassev1beta1 "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned/typed/enmasse/v1beta1/fake"
iotv1alpha1 "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned/typed/iot/v1alpha1"
fakeiotv1alpha1 "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned/typed/iot/v1alpha1/fake"
userv1beta1 "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned/typed/user/v1beta1"
fakeuserv1beta1 "github.com/enmasseproject/enmasse/pkg/client/clientset/versioned/typed/user/v1beta1/fake"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/discovery"
fakediscovery "k8s.io/client-go/discovery/fake"
"k8s.io/client-go/testing"
)
// NewSimpleClientset returns a clientset that will respond with the provided objects.
// It's backed by a very simple object tracker that processes creates, updates and deletions as-is,
// without applying any validations and/or defaults. It shouldn't be considered a replacement
// for a real clientset and is mostly useful in simple unit tests.
func NewSimpleClientset(objects ...runtime.Object) *Clientset {
o := testing.NewObjectTracker(scheme, codecs.UniversalDecoder())
for _, obj := range objects {
if err := o.Add(obj); err != nil {
panic(err)
}
}
cs := &Clientset{tracker: o}<|fim▁hole|> gvr := action.GetResource()
ns := action.GetNamespace()
watch, err := o.Watch(gvr, ns)
if err != nil {
return false, nil, err
}
return true, watch, nil
})
return cs
}
// Clientset implements clientset.Interface. Meant to be embedded into a
// struct to get a default implementation. This makes faking out just the method
// you want to test easier.
type Clientset struct {
testing.Fake
discovery *fakediscovery.FakeDiscovery
tracker testing.ObjectTracker
}
func (c *Clientset) Discovery() discovery.DiscoveryInterface {
return c.discovery
}
func (c *Clientset) Tracker() testing.ObjectTracker {
return c.tracker
}
var _ clientset.Interface = &Clientset{}
// AdminV1beta1 retrieves the AdminV1beta1Client
func (c *Clientset) AdminV1beta1() adminv1beta1.AdminV1beta1Interface {
return &fakeadminv1beta1.FakeAdminV1beta1{Fake: &c.Fake}
}
// AdminV1beta2 retrieves the AdminV1beta2Client
func (c *Clientset) AdminV1beta2() adminv1beta2.AdminV1beta2Interface {
return &fakeadminv1beta2.FakeAdminV1beta2{Fake: &c.Fake}
}
// EnmasseV1beta1 retrieves the EnmasseV1beta1Client
func (c *Clientset) EnmasseV1beta1() enmassev1beta1.EnmasseV1beta1Interface {
return &fakeenmassev1beta1.FakeEnmasseV1beta1{Fake: &c.Fake}
}
// IotV1alpha1 retrieves the IotV1alpha1Client
func (c *Clientset) IotV1alpha1() iotv1alpha1.IotV1alpha1Interface {
return &fakeiotv1alpha1.FakeIotV1alpha1{Fake: &c.Fake}
}
// UserV1beta1 retrieves the UserV1beta1Client
func (c *Clientset) UserV1beta1() userv1beta1.UserV1beta1Interface {
return &fakeuserv1beta1.FakeUserV1beta1{Fake: &c.Fake}
}<|fim▁end|> | cs.discovery = &fakediscovery.FakeDiscovery{Fake: &cs.Fake}
cs.AddReactor("*", "*", testing.ObjectReaction(o))
cs.AddWatchReactor("*", func(action testing.Action) (handled bool, ret watch.Interface, err error) { |
<|file_name|>test_interdomain_pair.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2008,2009, Yale Laboratory of Networked Systems
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Yale University nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <boost/test/unit_test.hpp>
#include <boost/test/floating_point_comparison.hpp>
#include <stdexcept>
#include "interdomain_pair.h"
BOOST_AUTO_TEST_CASE ( interdomain_dest_create_empty )
{
InterdomainDest d;
BOOST_CHECK_THROW(d.get_asn(), std::exception);
BOOST_CHECK_THROW(d.get_ip(), std::exception);
}
BOOST_AUTO_TEST_CASE ( interdomain_dest_create_asn )
{
InterdomainDest d(100);
BOOST_CHECK_THROW(d.get_ip(), std::exception);
BOOST_CHECK_EQUAL(d.get_asn(), 100);
}
BOOST_AUTO_TEST_CASE ( interdomain_dest_create_ip )
{
p4p::IPPrefix* ip = p4p::IPPrefix::from_text("127.0.0.1", 8);
InterdomainDest d(*ip);
BOOST_CHECK_THROW(d.get_asn(), std::exception);
BOOST_CHECK_EQUAL(d.get_ip().get_address_text(), "127.0.0.0");
BOOST_CHECK_EQUAL(d.get_ip().get_length(), 8);
delete ip;
}
BOOST_AUTO_TEST_CASE ( interdomain_dest_empty_less )
{
InterdomainDest d1;
InterdomainDest d2(100);
BOOST_CHECK_THROW(d1 < d2, std::exception);
}
BOOST_AUTO_TEST_CASE ( interdomain_dest_asn_compare )
{
InterdomainDest d1(100);
InterdomainDest d2(200);
BOOST_CHECK(d1 < d2);
BOOST_CHECK(d2 > d1);
BOOST_CHECK(d1 != d2);
BOOST_CHECK(d2 != d1);
BOOST_CHECK(!(d1 == d2));
BOOST_CHECK(!(d2 == d1));
}
BOOST_AUTO_TEST_CASE ( interdomain_dest_ip_compare )
{
p4p::IPPrefix* ip1 = p4p::IPPrefix::from_text("127.0.0.1", 8);
p4p::IPPrefix* ip2 = p4p::IPPrefix::from_text("128.0.0.1", 8);
InterdomainDest d1(*ip1);
InterdomainDest d2(*ip2);
BOOST_CHECK(d1 < d2);
BOOST_CHECK(d2 > d1);
BOOST_CHECK(d1 != d2);
BOOST_CHECK(d2 != d1);
BOOST_CHECK(!(d1 == d2));
BOOST_CHECK(!(d2 == d1));
delete ip1;
delete ip2;
}
BOOST_AUTO_TEST_CASE ( interdomain_dest_ip_asn_compare )
{
p4p::IPPrefix* ip = p4p::IPPrefix::from_text("127.0.0.1", 8);
InterdomainDest d1(100);
InterdomainDest d2(*ip);
BOOST_CHECK(d1 < d2 || d1 > d2);
BOOST_CHECK(d2 > d1 || d2 > d1);
BOOST_CHECK(d1 != d2);
BOOST_CHECK(d2 != d1);
BOOST_CHECK(!(d1 == d2));
BOOST_CHECK(!(d2 == d1));
<|fim▁hole|><|fim▁end|> | delete ip;
} |
<|file_name|>lex_b_tree_sets_length_range.rs<|end_file_name|><|fim▁begin|>use itertools::Itertools;
use malachite_base::bools::exhaustive::exhaustive_bools;
use malachite_base::nevers::nevers;
use malachite_base::sets::exhaustive::lex_b_tree_sets_length_range;
use malachite_base::tuples::exhaustive::exhaustive_units;
use std::collections::BTreeSet;
use std::fmt::Debug;
fn lex_b_tree_sets_length_range_small_helper<I: Clone + Iterator>(
a: u64,
b: u64,
xs: I,
out_len: usize,
out: &[BTreeSet<I::Item>],
) where
I::Item: Clone + Debug + Eq + Ord,
{
let xss = lex_b_tree_sets_length_range(a, b, xs);<|fim▁hole|>}
#[test]
fn test_lex_b_tree_sets_length_range() {
lex_b_tree_sets_length_range_small_helper(0, 5, nevers(), 1, &[btreeset! {}]);
lex_b_tree_sets_length_range_small_helper(6, 10, nevers(), 0, &[]);
lex_b_tree_sets_length_range_small_helper(
0,
5,
exhaustive_units(),
2,
&[btreeset! {}, btreeset! {()}],
);
lex_b_tree_sets_length_range_small_helper(1, 0, exhaustive_bools(), 0, &[]);
lex_b_tree_sets_length_range_small_helper(1, 1, exhaustive_bools(), 0, &[]);
lex_b_tree_sets_length_range_small_helper(
0,
2,
exhaustive_bools(),
3,
&[btreeset! {}, btreeset! {false}, btreeset! {true}],
);
lex_b_tree_sets_length_range_small_helper(
2,
4,
exhaustive_bools(),
1,
&[btreeset! {false, true}],
);
lex_b_tree_sets_length_range_small_helper(
1,
2,
'a'..='c',
3,
&[btreeset! {'a'}, btreeset! {'b'}, btreeset! {'c'}],
);
}<|fim▁end|> | let xss_prefix = xss.clone().take(20).collect_vec();
assert_eq!(xss_prefix.into_iter().collect_vec().as_slice(), out);
assert_eq!(xss.count(), out_len); |
<|file_name|>test_cursor_fetchmany.py<|end_file_name|><|fim▁begin|>import ctds
from .base import TestExternalDatabase
class TestCursorFetchMany(TestExternalDatabase):
'''Unit tests related to the Cursor.fetchmany() method.
'''
def test___doc__(self):
self.assertEqual(
ctds.Cursor.fetchmany.__doc__,
'''\
fetchmany(size=self.arraysize)
Fetch the next set of rows of a query result, returning a sequence of<|fim▁hole|>
:pep:`0249#fetchmany`
:return: A sequence of result rows.
:rtype: ctds.RowList
'''
)
def test_closed(self):
with self.connect() as connection:
cursor = connection.cursor()
cursor.close()
try:
cursor.fetchmany()
except ctds.InterfaceError as ex:
self.assertEqual(str(ex), 'cursor closed')
else:
self.fail('.fetchmany() did not fail as expected') # pragma: nocover
def test_closed_connection(self): # pylint: disable=invalid-name
connection = self.connect()
with connection.cursor() as cursor:
connection.close()
try:
cursor.fetchmany()
except ctds.InterfaceError as ex:
self.assertEqual(str(ex), 'connection closed')
else:
self.fail('.fetchmany() did not fail as expected') # pragma: nocover
def test_invalid_size(self):
with self.connect() as connection:
with connection.cursor() as cursor:
self.assertRaises(TypeError, cursor.fetchmany, size='123')
def test_premature(self):
with self.connect() as connection:
with connection.cursor() as cursor:
self.assertRaises(ctds.InterfaceError, cursor.fetchmany)
def test_fetchmany(self):
with self.connect() as connection:
with connection.cursor() as cursor:
cursor.execute(
'''
DECLARE @{0} TABLE(i INT);
INSERT INTO @{0}(i) VALUES (1),(2),(3);
SELECT * FROM @{0};
SELECT i * 2 FROM @{0};
'''.format(self.test_fetchmany.__name__)
)
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(1,)])
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(2,)])
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(3,)])
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), True)
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(2,)])
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(4,)])
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(6,)])
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), None)
self.assertRaises(ctds.InterfaceError, cursor.fetchmany)
cursor.arraysize = 3
cursor.execute(
'''
DECLARE @{0} TABLE(i INT);
INSERT INTO @{0}(i) VALUES (1),(2),(3);
SELECT * FROM @{0};
SELECT i * 2 FROM @{0};
'''.format(self.test_fetchmany.__name__)
)
self.assertEqual([tuple(row) for row in cursor.fetchmany(3)], [(1,), (2,), (3,)])
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), True)
self.assertEqual([tuple(row) for row in cursor.fetchmany(3)], [(2,), (4,), (6,)])
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), None)
self.assertRaises(ctds.InterfaceError, cursor.fetchmany)
def test_size(self):
with self.connect() as connection:
with connection.cursor() as cursor:
cursor.execute(
'''
DECLARE @{0} TABLE(i INT);
INSERT INTO @{0}(i) VALUES (1),(2),(3);
SELECT * FROM @{0};
SELECT i * 2 FROM @{0};
'''.format(self.test_size.__name__)
)
self.assertEqual([tuple(row) for row in cursor.fetchmany(3)], [(1,), (2,), (3,)])
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), True)
self.assertEqual([tuple(row) for row in cursor.fetchmany(3)], [(2,), (4,), (6,)])
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), None)
self.assertRaises(ctds.InterfaceError, cursor.fetchmany)
def test_empty_resultset(self):
with self.connect() as connection:
with connection.cursor() as cursor:
cursor.execute(
'''
DECLARE @{0} TABLE(i INT);
INSERT INTO @{0}(i) VALUES (1),(2),(3);
SELECT i FROM @{0} WHERE i < 0;
'''.format(self.test_empty_resultset.__name__)
)
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), None)
def test_multiple_resultsets(self):
with self.connect() as connection:
with connection.cursor() as cursor:
cursor.execute(
'''
DECLARE @{0} TABLE(i INT);
INSERT INTO @{0}(i) VALUES (1),(2),(3);
SELECT i FROM @{0} WHERE i < 0;
SELECT i AS j FROM @{0} WHERE i > 2;
SELECT i AS k FROM @{0} WHERE i > 3;
SELECT i AS ii FROM @{0};
'''.format(self.test_multiple_resultsets.__name__)
)
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), True)
self.assertEqual([tuple(row) for row in cursor.fetchmany(3)], [(3,)])
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), True)
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), True)
self.assertEqual([tuple(row) for row in cursor.fetchmany(3)], [(1,), (2,), (3,)])
self.assertEqual(cursor.nextset(), None)<|fim▁end|> | sequences. An empty sequence is returned when no more rows are available. |
<|file_name|>create.go<|end_file_name|><|fim▁begin|>package clusterreleaseversion
import (
"context"
"github.com/giantswarm/apiextensions/v3/pkg/annotation"
"github.com/giantswarm/conditions/pkg/conditions"
"github.com/giantswarm/microerror"
azopannotation "github.com/giantswarm/azure-operator/v5/pkg/annotation"
"github.com/giantswarm/azure-operator/v5/service/controller/key"
)
func (r *Resource) EnsureCreated(ctx context.Context, cr interface{}) error {
var err error
cluster, err := key.ToCluster(cr)
if err != nil {
return microerror.Mask(err)
}
updateReleaseVersion := false
if conditions.IsCreatingTrue(&cluster) {
updateReleaseVersion, err = r.isCreationCompleted(ctx, &cluster)
if err != nil {
return microerror.Mask(err)
}
} else if conditions.IsUpgradingTrue(&cluster) {
updateReleaseVersion, err = r.isUpgradeCompleted(ctx, &cluster)
if err != nil {
return microerror.Mask(err)
}
}
if updateReleaseVersion {
cluster.Annotations[annotation.LastDeployedReleaseVersion] = key.ReleaseVersion(&cluster)
if _, isUpgradingToNodePoolsSet := cluster.GetAnnotations()[azopannotation.UpgradingToNodePools]; isUpgradingToNodePoolsSet {
delete(cluster.Annotations, azopannotation.UpgradingToNodePools)
}
err = r.ctrlClient.Update(ctx, &cluster)
if err != nil {
return microerror.Mask(err)
}
}
<|fim▁hole|><|fim▁end|> | return nil
} |
<|file_name|>config.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> public secret: string;
public algorithm: string;
public host: string;
public masterKey: string;
constructor() {
this.port = process.env.PORT || 8080;
this.mongodb = 'mongodb://testuser:CFT^[email protected]:61495/multiplitest';
this.secret = 'test';
this.algorithm = 'aes-256-ctr';
this.host = '';
this.masterKey = '';
}
}<|fim▁end|> | export default class Config {
public port: number;
public mongodb: string; |
<|file_name|>fix_raw_input.py<|end_file_name|><|fim▁begin|>"""Fixer that changes raw_input(...) into input(...)."""
# Author: Andre Roberge
<|fim▁hole|>from .. import fixer_base
from ..fixer_util import Name
class FixRawInput(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
power< name='raw_input' trailer< '(' [any] ')' > any* >
"""
def transform(self, node, results):
name = results["name"]
name.replace(Name("input", prefix=name.prefix))<|fim▁end|> |
# Local imports
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate toml;
use std::{env, process};
use std::io::prelude::*;
use std::io::{BufReader, ErrorKind};
use std::collections::BTreeMap;
use std::fs::File;
use toml::Value;
static OVERRIDES_PATH : &'static str = ".multirust/overrides";
static SETTINGS_PATH : &'static str = ".rustup/settings.toml";
static OLD_SETTINGS_PATH : &'static str = ".multirust/settings.toml";
enum OverridesDatabase {<|fim▁hole|>
impl OverridesDatabase {
pub fn get(&self, key: &str) -> Option<&str> {
use OverridesDatabase::*;
match *self {
Plain(ref db) => db.get(key).map(|s| &s[..]),
Toml(ref db) => {
db.get(key).map(|v| v.as_str().expect("Expected value is not a string."))
}
}
}
}
fn with_date<'a>(short: &'a str, toolchain: &'a str) -> Option<&'a str> {
let date_start = short.len() + 1;
let date_end = short.len() + 3 + 4 + 2 + 2;
let char_range = toolchain.chars()
.skip(date_start)
.take(4)
.all(char::is_numeric);
if toolchain.len() > date_start && char_range {
Some(&toolchain[0..date_end])
} else {
None
}
}
fn clean_toolchain_name(toolchain: &str) -> &str {
static SHORTNAMES : &'static [&'static str] = &["stable", "nightly", "beta"];
for short in SHORTNAMES {
if toolchain.starts_with(short) {
return match with_date(short, toolchain) {
Some(s) => s,
None => short
}
}
}
toolchain
}
fn plain_overrides_file(f: File) {
let overrides = BufReader::new(f);
let mut overrides_map = BTreeMap::new();
for line in overrides.lines() {
let line = line.expect("No valid line found");
let mut s = line.split(';');
let path = s.next().expect("No path in line");
let toolchain = s.next().expect("No toolchain in line");
overrides_map.insert(path.into(), toolchain.into());
}
let database = OverridesDatabase::Plain(overrides_map);
toolchain(database);
}
fn settings_toml(mut settings: File) -> Result<(), ()> {
let mut content = String::new();
settings.read_to_string(&mut content).expect("Can't read settings file");
let database = content.parse::<Value>().map_err(|_| ())?;
let database = database.get("overrides").cloned()
.and_then(|overrides| overrides.as_table().cloned())
.and_then(|database| Some(OverridesDatabase::Toml(database)))
.ok_or(())?;
toolchain(database);
Ok(())
}
fn toolchain(database: OverridesDatabase) {
let mut cwd = match env::current_dir() {
Ok(cwd) => cwd,
Err(_) => return,
};
loop {
let path = format!("{}", cwd.display());
if let Some(toolchain) = database.get(&path) {
println!("{}", clean_toolchain_name(toolchain));
return;
}
if !cwd.pop() {
break;
}
}
println!("default");
}
fn main() {
let home = env::home_dir().expect("Impossible to get your home dir!");
let mut overrides_path = home.clone();
overrides_path.push(OVERRIDES_PATH);
match File::open(&overrides_path) {
Ok(f) => {
plain_overrides_file(f);
process::exit(0);
},
Err(ref e) if e.kind() == ErrorKind::NotFound => { /* ignored */ },
Err(_) => {
println!("default");
process::exit(0);
}
}
let mut settings_path = home.clone();
settings_path.push(SETTINGS_PATH);
if let Ok(f) = File::open(&settings_path) {
settings_toml(f).unwrap_or_else(|_| println!("default"));
process::exit(0);
}
let mut settings_path = home.clone();
settings_path.push(OLD_SETTINGS_PATH);
if let Ok(f) = File::open(&settings_path) {
settings_toml(f).unwrap_or_else(|_| println!("default"));
process::exit(0);
}
println!("default");
}
#[cfg(test)]
mod test {
use super::clean_toolchain_name;
#[test]
fn simple_name() {
assert_eq!("nightly", clean_toolchain_name("nightly-x86_64-unknown-linux-gnu"));
assert_eq!("nightly", clean_toolchain_name("nightly"));
}
#[test]
fn name_with_date() {
assert_eq!("nightly-2016-06-05", clean_toolchain_name("nightly-2016-06-05-x86_64-unknown-linux-gnu"));
}
}<|fim▁end|> | Plain(BTreeMap<String, String>),
Toml(BTreeMap<String, toml::Value>),
} |
<|file_name|>ClimbStairs70.py<|end_file_name|><|fim▁begin|>class Solution(object):<|fim▁hole|> :type n: int
:rtype: int
"""
nums = [0 for _ in xrange(n + 1)]
for i in xrange(1, n + 1):
if i == 1:
nums[1] = 1
elif i == 2:
nums[2] = 2
else:
nums[i] = nums[i - 1] + nums[i - 2]
return nums[n]<|fim▁end|> | def climbStairs(self, n):
""" |
<|file_name|>IntelHexWriter.java<|end_file_name|><|fim▁begin|>package ru.trolsoft.utils.files;
import ru.trolsoft.utils.StrUtils;
import java.io.*;
import java.util.Random;
/**
* Created on 07/02/17.<|fim▁hole|> *
*/
private int segmentAddress = 0;
public IntelHexWriter(Writer writer) {
if (writer instanceof BufferedWriter) {
this.writer = writer;
} else {
this.writer = new BufferedWriter(writer);
}
}
// public IntelHexWriter(String fileName) throws IOException {
// this(new FileWriter(fileName));
// }
public void addData(int offset, byte data[], int bytesPerLine) throws IOException {
if (data.length == 0) {
return;
}
//System.out.println("::" + data.length);
byte buf[] = new byte[bytesPerLine];
int pos = 0;
int bytesToAdd = data.length;
while (bytesToAdd > 0) {
if (offset % bytesPerLine != 0) { // can be true for first line if offset doesn't aligned
buf = new byte[bytesPerLine - offset % bytesPerLine];
} else if (bytesToAdd < bytesPerLine) { // last line
buf = new byte[bytesToAdd];
} else if (buf.length != bytesPerLine) {
buf = new byte[bytesPerLine];
}
System.arraycopy(data, pos, buf, 0, buf.length);
// Goto next segment if no more space available in current
if (offset + buf.length - 1 > segmentAddress + 0xffff) {
int nextSegment = ((offset + bytesPerLine) >> 4) << 4;
addSegmentRecord(nextSegment);
segmentAddress = nextSegment;
}
addDataRecord(offset & 0xffff, buf);
bytesToAdd -= buf.length;
offset += buf.length;
pos += buf.length;
}
}
private void addSegmentRecord(int offset) throws IOException {
int paragraph = offset >> 4;
int hi = (paragraph >> 8) & 0xff;
int lo = paragraph & 0xff;
int crc = 2 + 2 + hi + lo;
crc = (-crc) & 0xff;
String rec = ":02000002" + hex(hi) + hex(lo) + hex(crc);
write(rec);
// 02 0000 02 10 00 EC
//:02 0000 04 00 01 F9
}
private void addEofRecord() throws IOException {
write(":00000001FF");
}
private void write(String s) throws IOException {
writer.write(s);
//writer.write(0x0d);
writer.write(0x0a);
}
private void addDataRecord(int offset, byte[] data) throws IOException {
int hi = (offset >> 8) & 0xff;
int lo = offset & 0xff;
int crc = data.length + hi + lo;
String rec = ":" + hex(data.length) + hex(hi) + hex(lo) + "00";
for (byte d : data) {
rec += hex(d);
crc += d;
}
crc = (-crc) & 0xff;
rec += hex(crc);
write(rec);
}
private static String hex(int b) {
return StrUtils.byteToHexStr((byte)b);
}
public void done() throws IOException {
addEofRecord();
writer.flush();
}
public static void main(String ... args) throws IOException {
IntelHexWriter w = new IntelHexWriter(new OutputStreamWriter(System.out));
// w.addDataRecord(0x0190, new byte[] {0x56, 0x45, 0x52, 0x53, 0x49, 0x4F, 0x4E, 0x0D, 0x0A, 0x00, 0x0D, 0x0A, 0x41,
// 0x54, 0x0D, 0x0A});
byte[] data = new byte[Math.abs(new Random().nextInt() % 1024)];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (i % 0xff);
}
w.addData(0x10000 - 0x100, data, 16);
w.done();
}
}<|fim▁end|> | */
public class IntelHexWriter {
private final Writer writer;
/** |
<|file_name|>test_do_authorize_user_for_task_by_email.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import unittest
from werkzeug.exceptions import Forbidden, NotFound
from tests.logic_t.layer.LogicLayer.util import generate_ll
class AuthorizeUserForTaskByEmailTest(unittest.TestCase):
def setUp(self):
self.ll = generate_ll()
self.pl = self.ll.pl
def test_authorizes_user(self):
# given a task
task = self.pl.create_task('task')
self.pl.add(task)
# and a user to authorize
user = self.pl.create_user('[email protected]')
self.pl.add(user)
# and an admin user to attempt the authorization
admin = self.pl.create_user('[email protected]', is_admin=True)
self.pl.add(admin)
self.pl.commit()
# precondition: the user is not authorized for the task
self.assertNotIn(user, task.users)
self.assertNotIn(task, user.tasks)
# when
result = self.ll.do_authorize_user_for_task_by_email(task.id,
user.email, admin)
# then the user is now authorized for the task
self.assertIn(user, task.users)
self.assertIn(task, user.tasks)
# and the task is returned
self.assertIs(result, task)
def test_task_id_none_raises(self):
# given a user to try to authorize
user = self.pl.create_user('[email protected]')
self.pl.add(user)
# and an admin user to attempt the authorization
admin = self.pl.create_user('[email protected]', is_admin=True)
self.pl.add(admin)
self.pl.commit()
# precondition: there are no tasks
self.assertEqual(0, self.pl.count_tasks())
# and the user is not authorized for anything
self.assertEqual(0, len(user.tasks))
# expect
self.assertRaises(
ValueError,
self.ll.do_authorize_user_for_task_by_email,
None, user.email, admin)
# and the user was not authorized for anything
self.assertEqual(0, len(user.tasks))
def test_task_not_found_raises(self):
# given a user to try to authorize
user = self.pl.create_user('[email protected]')
self.pl.add(user)
# and an admin user to attempt the authorization
admin = self.pl.create_user('[email protected]', is_admin=True)
self.pl.add(admin)
self.pl.commit()
# precondition: there are no tasks
self.assertEqual(0, self.pl.count_tasks())
# and the user is not authorized for anything
self.assertEqual(0, len(user.tasks))
# expect
self.assertRaises(
NotFound,
self.ll.do_authorize_user_for_task_by_email,
1, user.email, admin)
# and the user was not authorized for anything
self.assertEqual(0, len(user.tasks))
def test_email_none_raises(self):
# given a task
task = self.pl.create_task('task')
self.pl.add(task)
# and an admin user to attempt the authorization
admin = self.pl.create_user('[email protected]', is_admin=True)
self.pl.add(admin)
self.pl.commit()
# precondition no users are authorized for the task
self.assertEqual(0, len(task.users))
# expect
self.assertRaises(
ValueError,
self.ll.do_authorize_user_for_task_by_email,
task.id, None, admin)
# and no users are authorized for the task
self.assertEqual(0, len(task.users))
def test_email_empty_raises(self):
# given a task
task = self.pl.create_task('task')
self.pl.add(task)
# and an admin user to attempt the authorization
admin = self.pl.create_user('[email protected]', is_admin=True)
self.pl.add(admin)
self.pl.commit()
# precondition no users are authorized for the task
self.assertEqual(0, len(task.users))
# expect
self.assertRaises(
ValueError,
self.ll.do_authorize_user_for_task_by_email,
task.id, '', admin)
# and no users are authorized for the task
self.assertEqual(0, len(task.users))<|fim▁hole|> def test_email_not_found_raises(self):
# given a task
task = self.pl.create_task('task')
self.pl.add(task)
# and an admin user to attempt the authorization
admin = self.pl.create_user('[email protected]', is_admin=True)
self.pl.add(admin)
self.pl.commit()
# precondition: there are no users with that email address
self.assertEqual(0, self.pl.count_users(email_in=['[email protected]']))
# and no users are authorized for the task
self.assertEqual(0, len(task.users))
# expect
self.assertRaises(
NotFound,
self.ll.do_authorize_user_for_task_by_email,
task.id, '[email protected]', admin)
# and no users are authorized for the task
self.assertEqual(0, len(task.users))
def test_current_user_not_allowed_raises(self):
# given a task
task = self.pl.create_task('task')
self.pl.add(task)
# and a user to authorize
user = self.pl.create_user('[email protected]')
self.pl.add(user)
# and a non-admin user to attempt the authorization
non_admin = self.pl.create_user('[email protected]', is_admin=False)
self.pl.add(non_admin)
self.pl.commit()
# precondition: the current_user is not authorized or admin
self.assertNotIn(non_admin, task.users)
self.assertNotIn(task, non_admin.tasks)
self.assertFalse(non_admin.is_admin)
# when
self.assertRaises(
Forbidden,
self.ll.do_authorize_user_for_task_by_email,
task.id, user.email, non_admin)
# and no users are authorized for the task
self.assertEqual(0, len(task.users))
def test_current_user_is_authorized_non_admin_then_authorizes_user(self):
# given a task
task = self.pl.create_task('task')
self.pl.add(task)
# and a user to authorize
user = self.pl.create_user('[email protected]')
self.pl.add(user)
# and a non-admin user to attempt the authorization
non_admin = self.pl.create_user('[email protected]', is_admin=False)
self.pl.add(non_admin)
task.users.add(non_admin)
self.pl.commit()
# precondition: the current_user is authorized for the task
self.assertIn(non_admin, task.users)
self.assertIn(task, non_admin.tasks)
# and the current_user is not an admin
self.assertFalse(non_admin.is_admin)
# when
result = self.ll.do_authorize_user_for_task_by_email(
task.id, user.email, non_admin)
# then the user is now authorized for the task
self.assertIn(user, task.users)
self.assertIn(task, user.tasks)
# and the task is returned
self.assertIs(result, task)<|fim▁end|> | |
<|file_name|>policytree.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from pyparsing import *
from charm.toolbox.node import *
import string
objStack = []
def createAttribute(s, loc, toks):
if toks[0] == '!':
newtoks = ""
for i in toks:
newtoks += i
return BinNode(newtoks)
return BinNode(toks[0]) # create
# convert 'attr < value' to a binary tree based on 'or' and 'and'
def parseNumConditional(s, loc, toks):
print("print: %s" % toks)
return BinNode(toks[0])
def printStuff(s, loc, toks):
print("print: %s" % toks)
return toks
def pushFirst( s, loc, toks ):
objStack.append( toks[0] )
def createTree(op, node1, node2):
if(op == "or"):
node = BinNode(OpType.OR)
elif(op == "and"):
node = BinNode(OpType.AND)
else:
return None
node.addSubNode(node1, node2)
return node
class PolicyParser:
def __init__(self, verbose=False):
self.finalPol = self.getBNF()
self.verbose = verbose
def getBNF(self):
# supported operators => (OR, AND, <
OperatorOR = Literal("OR").setParseAction(downcaseTokens) | Literal("or")
OperatorAND = Literal("AND").setParseAction(downcaseTokens) | Literal("and")
Operator = OperatorAND | OperatorOR
lpar = Literal("(").suppress()
rpar = Literal(")").suppress()
BinOperator = Literal("<=") | Literal(">=") | Literal("==") | Word("<>", max=1)
# describes an individual leaf node
leafNode = (Optional("!") + Word(alphanums+'-_./\?!@#$^&*%')).setParseAction( createAttribute )
# describes expressions such as (attr < value)
leafConditional = (Word(alphanums) + BinOperator + Word(nums)).setParseAction( parseNumConditional )
# describes the node concept
node = leafConditional | leafNode
expr = Forward()
term = Forward()<|fim▁hole|> expr << term + ZeroOrMore((Operator + term).setParseAction( pushFirst ))
finalPol = expr#.setParseAction( printStuff )
return finalPol
def evalStack(self, stack):
op = stack.pop()
if op in ["or", "and"]:
op2 = self.evalStack(stack)
op1 = self.evalStack(stack)
return createTree(op, op1, op2)
else:
# Node value (attribute)
return op
def parse(self, string):
global objStack
del objStack[:]
self.finalPol.parseString(string)
return self.evalStack(objStack)
def findDuplicates(self, tree, _dict):
if tree.left: self.findDuplicates(tree.left, _dict)
if tree.right: self.findDuplicates(tree.right, _dict)
if tree.getNodeType() == OpType.ATTR:
key = tree.getAttribute()
if _dict.get(key) == None: _dict[ key ] = 1
else: _dict[ key ] += 1
def labelDuplicates(self, tree, _dictLabel):
if tree.left: self.labelDuplicates(tree.left, _dictLabel)
if tree.right: self.labelDuplicates(tree.right, _dictLabel)
if tree.getNodeType() == OpType.ATTR:
key = tree.getAttribute()
if _dictLabel.get(key) != None:
tree.index = _dictLabel[ key ]
_dictLabel[ key ] += 1
def prune(self, tree, attributes):
"""given policy tree and attributes, determine whether the attributes satisfy the policy.
if not enough attributes to satisfy policy, return None otherwise, a pruned list of
attributes to potentially recover the associated secret.
"""
(policySatisfied, prunedList) = self.requiredAttributes(tree, attributes)
# print("pruned attrs: ", prunedList)
# if prunedList:
# for i in prunedList:
# print("node: ", i)
if not policySatisfied:
return policySatisfied
return prunedList
def requiredAttributes(self, tree, attrList):
""" determines the required attributes to satisfy policy tree and returns a list of BinNode
objects."""
if tree == None: return 0
Left = tree.getLeft()
Right = tree.getRight()
if Left: resultLeft, leftAttr = self.requiredAttributes(Left, attrList)
if Right: resultRight, rightAttr = self.requiredAttributes(Right, attrList)
if(tree.getNodeType() == OpType.OR):
# never return both attributes, basically the first one that matches from left to right
if resultLeft: sendThis = leftAttr
elif resultRight: sendThis = rightAttr
else: sendThis = None
result = (resultLeft or resultRight)
if result == False: return (False, sendThis)
return (True, sendThis)
if(tree.getNodeType() == OpType.AND):
if resultLeft and resultRight: sendThis = leftAttr + rightAttr
elif resultLeft: sendThis = leftAttr
elif resultRight: sendThis = rightAttr
else: sendThis = None
result = (resultLeft and resultRight)
if result == False: return (False, sendThis)
return (True, sendThis)
elif(tree.getNodeType() == OpType.ATTR):
if(tree.getAttribute() in attrList):
return (True, [tree])
else:
return (False, None)
return
if __name__ == "__main__":
# policy parser test cases
parser = PolicyParser()
attrs = ['1', '3']
print("Attrs in user set: ", attrs)
tree1 = parser.parse("(1 or 2) and (2 and 3))")
print("case 1: ", tree1, ", pruned: ", parser.prune(tree1, attrs))
tree2 = parser.parse("1 or (2 and 3)")
print("case 2: ", tree2, ", pruned: ", parser.prune(tree2, attrs))
tree3 = parser.parse("(1 or 2) and (4 or 3)")
print("case 3: ", tree3, ", pruned: ", parser.prune(tree3, attrs))<|fim▁end|> | atom = lpar + expr + rpar | (node).setParseAction( pushFirst )
term = atom + ZeroOrMore((Operator + term).setParseAction( pushFirst )) |
<|file_name|>SubAccount2.go<|end_file_name|><|fim▁begin|>package iso20022
// The subaccount of the safekeeping account<|fim▁hole|>
// Unique and unambiguous identification for the account between the account owner and the account servicer.
Identification *Max35Text `xml:"Id"`
}
func (s *SubAccount2) SetIdentification(value string) {
s.Identification = (*Max35Text)(&value)
}<|fim▁end|> | type SubAccount2 struct { |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from django.apps import AppConfig
from django.db.models import signals
from django.contrib.auth import get_user_model
def populate_users(sender, **kwargs):
User = get_user_model()
for i in range(10):
username = "user_{}".format(i+1)
email = "{}@example.com".format(username)
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User.objects.create_user(username=username,
email=email,
is_staff=True,
password="demo1234")
class PresenceConfig(AppConfig):
name = 'apps.presence'
<|fim▁hole|> signals.post_migrate.connect(populate_users, sender=self)<|fim▁end|> | def ready(self): |
<|file_name|>inferredIndexerOnNamespaceImport.js<|end_file_name|><|fim▁begin|>//// [tests/cases/compiler/inferredIndexerOnNamespaceImport.ts] ////
//// [foo.ts]
export const x = 3;
export const y = 5;
//// [bar.ts]
import * as foo from "./foo";
function f(map: { [k: string]: number }) {
// ...
}
f(foo);
//// [foo.js]
"use strict";
<|fim▁hole|>exports.x = 3;
exports.y = 5;
//// [bar.js]
"use strict";
exports.__esModule = true;
var foo = require("./foo");
function f(map) {
// ...
}
f(foo);<|fim▁end|> | exports.__esModule = true;
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>use std::collections::BTreeMap;
pub fn transform (input: &BTreeMap<i32, Vec<String>>) -> BTreeMap<String, i32> {
let mut output = BTreeMap::new();<|fim▁hole|> output.insert(name.to_lowercase(), *value);
}
}
output
}<|fim▁end|> |
for (value, names) in input {
for name in names { |
<|file_name|>InProcessMessagingSkeleton.js<|end_file_name|><|fim▁begin|>/*
* #%L
* %%
* Copyright (C) 2011 - 2017 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
define("joynr/messaging/inprocess/InProcessMessagingSkeleton", [], function() {
/**
* @name InProcessMessagingSkeleton
* @constructor
*/
function InProcessMessagingSkeleton() {
var onReceive;
/**
* @name InProcessMessagingSkeleton#receiveMessage<|fim▁hole|> * @param {JoynrMessage} joynrMessage
* @returns {Object} A+ promise object
*/
this.receiveMessage = function receiveMessage(joynrMessage) {
return onReceive(joynrMessage);
};
/**
* A setter for the callback function that will receive the incoming messages
*
* @name InProcessMessagingSkeleton#registerListener
* @function
*
* @param {Function} newOnReceive the function that is called with the incoming JoynrMessage
*/
this.registerListener = function registerListener(newOnReceive) {
onReceive = newOnReceive;
};
}
return InProcessMessagingSkeleton;
});<|fim▁end|> | * @function
* |
<|file_name|>brcd_fc_san_lookup_service.py<|end_file_name|><|fim▁begin|># (c) Copyright 2014 Brocade Communications Systems Inc.
# All Rights Reserved.
#
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_utils import excutils
import paramiko
from cinder import exception
from cinder.i18n import _, _LE
from cinder.openstack.common import log as logging
from cinder import utils
from cinder.zonemanager.drivers.brocade import brcd_fabric_opts as fabric_opts
import cinder.zonemanager.drivers.brocade.fc_zone_constants as zone_constant
from cinder.zonemanager import fc_san_lookup_service as fc_service
LOG = logging.getLogger(__name__)
class BrcdFCSanLookupService(fc_service.FCSanLookupService):
"""The SAN lookup service that talks to Brocade switches.
Version History:
1.0.0 - Initial version
"""
VERSION = "1.0.0"
def __init__(self, **kwargs):
"""Initializing the client."""
super(BrcdFCSanLookupService, self).__init__(**kwargs)
self.configuration = kwargs.get('configuration', None)
self.create_configuration()
self.client = self.create_ssh_client(**kwargs)
def create_configuration(self):
"""Configuration specific to SAN context values."""
config = self.configuration
fabric_names = [x.strip() for x in config.fc_fabric_names.split(',')]
LOG.debug('Fabric Names: %s', fabric_names)
# There can be more than one SAN in the network and we need to
# get credentials for each for SAN context lookup later.
if len(fabric_names) > 0:
self.fabric_configs = fabric_opts.load_fabric_configurations(
fabric_names)
def create_ssh_client(self, **kwargs):
ssh_client = paramiko.SSHClient()
known_hosts_file = kwargs.get('known_hosts_file', None)<|fim▁hole|> if known_hosts_file is None:
ssh_client.load_system_host_keys()
else:
ssh_client.load_host_keys(known_hosts_file)
missing_key_policy = kwargs.get('missing_key_policy', None)
if missing_key_policy is None:
missing_key_policy = paramiko.WarningPolicy()
ssh_client.set_missing_host_key_policy(missing_key_policy)
return ssh_client
def get_device_mapping_from_network(self,
initiator_wwn_list,
target_wwn_list):
"""Provides the initiator/target map for available SAN contexts.
Looks up nameserver of each fc SAN configured to find logged in devices
and returns a map of initiator and target port WWNs for each fabric.
:param initiator_wwn_list: List of initiator port WWN
:param target_wwn_list: List of target port WWN
:returns List -- device wwn map in following format
{
<San name>: {
'initiator_port_wwn_list':
('200000051e55a100', '200000051e55a121'..)
'target_port_wwn_list':
('100000051e55a100', '100000051e55a121'..)
}
}
:raises Exception when connection to fabric is failed
"""
device_map = {}
formatted_target_list = []
formatted_initiator_list = []
fabric_map = {}
fabric_names = self.configuration.fc_fabric_names
fabrics = None
if not fabric_names:
raise exception.InvalidParameterValue(
err=_("Missing Fibre Channel SAN configuration "
"param - fc_fabric_names"))
fabrics = [x.strip() for x in fabric_names.split(',')]
LOG.debug("FC Fabric List: %s", fabrics)
if fabrics:
for t in target_wwn_list:
formatted_target_list.append(self.get_formatted_wwn(t))
for i in initiator_wwn_list:
formatted_initiator_list.append(self.
get_formatted_wwn(i))
for fabric_name in fabrics:
fabric_ip = self.fabric_configs[fabric_name].safe_get(
'fc_fabric_address')
fabric_user = self.fabric_configs[fabric_name].safe_get(
'fc_fabric_user')
fabric_pwd = self.fabric_configs[fabric_name].safe_get(
'fc_fabric_password')
fabric_port = self.fabric_configs[fabric_name].safe_get(
'fc_fabric_port')
# Get name server data from fabric and find the targets
# logged in
nsinfo = ''
try:
LOG.debug("Getting name server data for "
"fabric %s", fabric_ip)
self.client.connect(
fabric_ip, fabric_port, fabric_user, fabric_pwd)
nsinfo = self.get_nameserver_info()
except exception.FCSanLookupServiceException:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed collecting name server info from"
" fabric %s") % fabric_ip)
except Exception as e:
msg = _("SSH connection failed "
"for %(fabric)s with error: %(err)s"
) % {'fabric': fabric_ip, 'err': e}
LOG.error(msg)
raise exception.FCSanLookupServiceException(message=msg)
finally:
self.client.close()
LOG.debug("Lookup service:nsinfo-%s", nsinfo)
LOG.debug("Lookup service:initiator list from "
"caller-%s", formatted_initiator_list)
LOG.debug("Lookup service:target list from "
"caller-%s", formatted_target_list)
visible_targets = filter(lambda x: x in formatted_target_list,
nsinfo)
visible_initiators = filter(lambda x: x in
formatted_initiator_list, nsinfo)
if visible_targets:
LOG.debug("Filtered targets is: %s", visible_targets)
# getting rid of the : before returning
for idx, elem in enumerate(visible_targets):
elem = str(elem).replace(':', '')
visible_targets[idx] = elem
else:
LOG.debug("No targets are in the nameserver for SAN %s",
fabric_name)
if visible_initiators:
# getting rid of the : before returning ~sk
for idx, elem in enumerate(visible_initiators):
elem = str(elem).replace(':', '')
visible_initiators[idx] = elem
else:
LOG.debug("No initiators are in the nameserver "
"for SAN %s", fabric_name)
fabric_map = {
'initiator_port_wwn_list': visible_initiators,
'target_port_wwn_list': visible_targets
}
device_map[fabric_name] = fabric_map
LOG.debug("Device map for SAN context: %s", device_map)
return device_map
def get_nameserver_info(self):
"""Get name server data from fabric.
This method will return the connected node port wwn list(local
and remote) for the given switch fabric
"""
cli_output = None
nsinfo_list = []
try:
cli_output = self._get_switch_data(zone_constant.NS_SHOW)
except exception.FCSanLookupServiceException:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed collecting nsshow info for fabric"))
if cli_output:
nsinfo_list = self._parse_ns_output(cli_output)
try:
cli_output = self._get_switch_data(zone_constant.NS_CAM_SHOW)
except exception.FCSanLookupServiceException:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed collecting nscamshow"))
if cli_output:
nsinfo_list.extend(self._parse_ns_output(cli_output))
LOG.debug("Connector returning nsinfo-%s", nsinfo_list)
return nsinfo_list
def _get_switch_data(self, cmd):
stdin, stdout, stderr = None, None, None
utils.check_ssh_injection([cmd])
try:
stdin, stdout, stderr = self.client.exec_command(cmd)
switch_data = stdout.readlines()
except paramiko.SSHException as e:
msg = (_("SSH Command failed with error '%(err)s' "
"'%(command)s'") % {'err': e,
'command': cmd})
LOG.error(msg)
raise exception.FCSanLookupServiceException(message=msg)
finally:
if (stdin):
stdin.flush()
stdin.close()
if (stdout):
stdout.close()
if (stderr):
stderr.close()
return switch_data
def _parse_ns_output(self, switch_data):
"""Parses name server data.
Parses nameserver raw data and adds the device port wwns to the list
:returns list of device port wwn from ns info
"""
nsinfo_list = []
for line in switch_data:
if not(" NL " in line or " N " in line):
continue
linesplit = line.split(';')
if len(linesplit) > 2:
node_port_wwn = linesplit[2]
nsinfo_list.append(node_port_wwn)
else:
msg = _("Malformed nameserver string: %s") % line
LOG.error(msg)
raise exception.InvalidParameterValue(err=msg)
return nsinfo_list
def get_formatted_wwn(self, wwn_str):
"""Utility API that formats WWN to insert ':'."""
if (len(wwn_str) != 16):
return wwn_str.lower()
else:
return (':'.join([wwn_str[i:i + 2]
for i in range(0, len(wwn_str), 2)])).lower()<|fim▁end|> | |
<|file_name|>test_temperanotes.py<|end_file_name|><|fim▁begin|>import temperanotes
import pytest, bisect
@pytest.fixture
def idiot_temp():
temp = [1, 1.05, 1.1, 1.15, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9] # not a temperament, just a set of numbers for testing
assert len(temp) == 12 # need 12 notes for the chromatic scale
return temp
def test_note_names():
exclude = ['B#', 'Cb', 'E#', 'Fb']
assert len(temperanotes.note_names_sharp) == 12<|fim▁hole|> note_accidental = note + "#"
if not note_accidental in exclude:
assert note_accidental in temperanotes.note_names_sharp
note_accidental = note + "b"
if not note_accidental in exclude:
assert note_accidental in temperanotes.note_names_flat
def test_get_key_index():
assert temperanotes.get_key_index('A') == 0
assert temperanotes.get_key_index('C') == 3
assert temperanotes.get_key_index('F') == 8
assert temperanotes.get_key_index('F#') == 9
assert temperanotes.get_key_index('G#') == 11
assert temperanotes.get_key_index('Ab') == 11
def test_normal_octave_in_C(idiot_temp):
# when starting from C,
# A is the 10th semitone of the chromatic scale, i.e. idiot_temp[9]
expected_freq = [440.0 / idiot_temp[9] * i for i in idiot_temp]
actual_freq = temperanotes.frequencies(temperament = idiot_temp, notes_low = 0, notes_high = 12, key = 'C', base_freq = 440.0, key_freq = 'A')
assert actual_freq == expected_freq
def test_normal_octave(idiot_temp):
expected_freq = [440.0 * i for i in idiot_temp]
actual_freq = temperanotes.frequencies(temperament = idiot_temp, notes_low = 0, notes_high = 12, key = 'A', base_freq = 440.0, key_freq = 'A')
assert actual_freq == expected_freq
def test_lower_octave(idiot_temp):
expected_freq = [440.0 / 2 * i for i in idiot_temp]
actual_freq = temperanotes.frequencies(temperament = idiot_temp, notes_low = 12, notes_high = 0, key = 'A', base_freq = 440.0, key_freq = 'A')
assert actual_freq == expected_freq
def test_one_octave_and_one_note(idiot_temp):
expected_freq = [440.0 * i for i in idiot_temp] + [440.0 * 2]
assert len(expected_freq) == 13 # obvious, but making sure no simply bugs in test itself
actual_freq = temperanotes.frequencies(temperament = idiot_temp, notes_low = 0, notes_high = 13, key = 'A', base_freq = 440.0, key_freq = 'A')
assert actual_freq == expected_freq
def test_one_octave_and_one_note_per_direction(idiot_temp):
expected_freq_lo = [440.0 / 2 * i for i in idiot_temp]
expected_freq_hi = [440.0 * i for i in idiot_temp]
expected_freq = [440.0 / 4 * idiot_temp[-1]] + expected_freq_lo + expected_freq_hi + [440.0 * 2]
assert len(expected_freq) == 24 + 2 # obvious, but making sure no simply bugs in test itself
actual_freq = temperanotes.frequencies(temperament = idiot_temp, notes_low = 13, notes_high = 13, key = 'A', base_freq = 440.0, key_freq = 'A')
assert actual_freq == expected_freq
def test_one_octave_and_half_per_direction(idiot_temp):
expected_freq_lolo = [440.0 / 4 * i for i in idiot_temp]
expected_freq_lo = [440.0 / 2 * i for i in idiot_temp]
expected_freq_hi = [440.0 * i for i in idiot_temp]
expected_freq_hihi = [440.0 * 2 * i for i in idiot_temp]
expected_freq = expected_freq_lolo[6:] + expected_freq_lo + expected_freq_hi + expected_freq_hihi[:6]
assert len(expected_freq) == 48 - 12 # obvious, but making sure no simply bugs in test itself
actual_freq = temperanotes.frequencies(temperament = idiot_temp, notes_low = 18, notes_high = 18, key = 'A', base_freq = 440.0, key_freq = 'A')
assert actual_freq == expected_freq
def test_two_octaves(idiot_temp):
expected_freq_lo = [440.0 / 2 * i for i in idiot_temp]
expected_freq_hi = [440.0 * i for i in idiot_temp]
expected_freq = expected_freq_lo + expected_freq_hi
assert len(expected_freq) == 24 # obvious, but making sure no simply bugs in test itself
actual_freq = temperanotes.frequencies(temperament = idiot_temp, notes_low = 12, notes_high = 12, key = 'A', base_freq = 440.0, key_freq = 'A')
assert actual_freq == expected_freq
def test_four_octaves(idiot_temp):
expected_freq_lolo = [440.0 / 4 * i for i in idiot_temp]
expected_freq_lo = [440.0 / 2 * i for i in idiot_temp]
expected_freq_hi = [440.0 * i for i in idiot_temp]
expected_freq_hihi = [440.0 * 2 * i for i in idiot_temp]
expected_freq = expected_freq_lolo + expected_freq_lo + expected_freq_hi + expected_freq_hihi
assert len(expected_freq) == 48 # obvious, but making sure no simply bugs in test itself
actual_freq = temperanotes.frequencies(temperament = idiot_temp, notes_low = 24, notes_high = 24, key = 'A', base_freq = 440.0, key_freq = 'A')
assert actual_freq == expected_freq
def test_equal_temp():
expected = [1., 2. ** (1./12), 2. ** (1./6), 2. ** (1./4), 2. ** (1./3), 2. ** (5./12), 2. ** (1./2), 2. ** (7./12), 2. ** (2./3), 2. ** (3./4), 2. ** (5./6), 2. ** (11./12)]
actual = temperanotes.equal_temperament()
assert actual == expected
def test_cents():
expected = [100 * i for i in range(12)]
actual = temperanotes.to_cents(temperanotes.equal_temperament())
assert actual == expected
def test_read_temperament_nocents():
data = """#This is a comment
1
1.01 # this is another comment
1.3
1.4
# more comments
1.5
1.6
1.7
1.8
1.9
1.10
1.11
1.12"""
expected = [1, 1.01, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 1.10, 1.11, 1.12]
actual, cents = temperanotes.read_temperament(data)
assert actual == expected
assert len(cents) == 0
def test_read_temperament_withcents_and_math():
data = """#This is a comment
1, 100
sqrt(2), 200 # this is another comment
1.3, 4 ** (1/3) # 1.58 must round to 2
2 ** 1/12, 500
# more comments
1.5, 600
1.6, 700
1.7, 900
1.8, 1000
1.9, 2000 # comments can appear anywhere
1.10, 3000
1.11, 1
1.12, 7
# comments at the end"""
expected = [1, 1.4142135623730951, 1.3, 0.1666666666666666666666666, 1.5, 1.6, 1.7, 1.8, 1.9, 1.10, 1.11, 1.12]
actual, cents = temperanotes.read_temperament(data)
assert actual == expected
assert cents == [100, 200, 2, 500, 600, 700, 900, 1000, 2000, 3000, 1, 7]
def test_read_incorrect_temperaments():
data = 11 * "1, 100\n"
with pytest.raises(SystemExit):
temperanotes.read_temperament(data)
data = 13 * "1, 100\n"
with pytest.raises(SystemExit):
temperanotes.read_temperament(data)
def test_read_more_entries_cents():
data = (5 * "1, 100\n" +
2 * "2, 150, 200\n" + # additional data
5 * "7, 200\n")
with pytest.raises(SystemExit):
temperanotes.read_temperament(data)
def test_read_incorrect_cents():
data = (5 * "1, 100\n" +
2 * "2,\n" + # missing some cents (with comma)
5 * "7, 200\n")
with pytest.raises(SystemExit):
temperanotes.read_temperament(data)
def test_read_missing_cents():
data = (5 * "1, 100\n" +
2 * "2\n" + # missing some cents (without comma)
5 * "7, 200\n")
with pytest.raises(SystemExit):
temperanotes.read_temperament(data)
def test_read_file_with_errors():
data = (5 * "1, 100\n" +
2 * "foo_bar, 200\n" + # syntax error in frequencies
5 * "7, 700\n")
with pytest.raises(SystemExit):
temperanotes.read_temperament(data)
data = (5 * "1, 100\n" +
2 * "2, foo_bar\n" + # syntax error in cents
5 * "7, 700\n")
with pytest.raises(SystemExit):
temperanotes.read_temperament(data)
# not testing verify() since it's very simple
# not explicitly testing myeval() since it's implicitly tested in each read_temperament() invocation
def test_equal_piano():
piano = temperanotes.piano(temperanotes.equal_temperament())
index = bisect.bisect_left(piano, 440.) - 1
print "Index of the A-440", index, "(should be the 49th key or index 48)"
print "Value of index", index, "=", piano[index], "should be close to 440."
assert len(piano) == 88 # the piano has 88 keys
assert index == 48
assert piano[index] - 440. < 0.01
def test_equal_midi():
midi = temperanotes.midi(temperanotes.equal_temperament())
index = bisect.bisect_left(midi, 440.) - 1
print "Index of the A-440", index, "(should be 69)"
print "Value of index", index, "=", midi[index], "should be close to 440."
assert len(midi) == 128 # the midi spec's 128 notes (0 to 127)
assert index == 69
assert midi[index] - 440. < 0.01<|fim▁end|> | assert len(temperanotes.note_names_flat) == 12
for note in "ABCDEFG":
assert note in temperanotes.note_names_sharp
assert note in temperanotes.note_names_flat |
<|file_name|>checkin.py<|end_file_name|><|fim▁begin|>import datetime
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from six import print_
import bigbro
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--store',
dest='store',
help='Watch log subdirectory'),
)
help = 'Log employee RFID check-ins.'
def handle(self, *args, **options):
print_('Note: RFID scanner must be set up for keyboard input (see README).')
print_('Waiting for RFID input. Press Ctrl+C to quit.')
date_today = datetime.datetime.now().strftime('%Y-%m-%d')
log_location = bigbro.log_location(options['store'], date_today, 'checkin')<|fim▁hole|> time_f = datetime.datetime.now().strftime(settings.LOG_TIME_FMT)
print_(time_f, rfid, sep='\t', file=outf)
except KeyboardInterrupt:
print_('')
print_('Quitting...')
break<|fim▁end|> | with open(log_location, 'a') as outf:
while True:
try:
rfid = raw_input() |
<|file_name|>watcher.test.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as assert from 'assert';
import * as platform from 'vs/base/common/platform';
import { FileChangeType, FileChangesEvent } from 'vs/platform/files/common/files';
import uri from 'vs/base/common/uri';
import { IRawFileChange, toFileChangesEvent, normalize } from 'vs/workbench/services/files/node/watcher/common';
import { Event, Emitter } from 'vs/base/common/event';
class TestFileWatcher {
private readonly _onFileChanges: Emitter<FileChangesEvent>;
constructor() {
this._onFileChanges = new Emitter<FileChangesEvent>();
}
public get onFileChanges(): Event<FileChangesEvent> {
return this._onFileChanges.event;
}
public report(changes: IRawFileChange[]): void {
this.onRawFileEvents(changes);
}
private onRawFileEvents(events: IRawFileChange[]): void {
// Normalize
let normalizedEvents = normalize(events);
// Emit through event emitter
if (normalizedEvents.length > 0) {
this._onFileChanges.fire(toFileChangesEvent(normalizedEvents));
}
}
}
enum Path {
UNIX,
WINDOWS,
UNC
}
suite('Watcher', () => {
test('watching - simple add/update/delete', function (done: () => void) {
const watch = new TestFileWatcher();
const added = uri.file('/users/data/src/added.txt');
const updated = uri.file('/users/data/src/updated.txt');
const deleted = uri.file('/users/data/src/deleted.txt');
const raw: IRawFileChange[] = [
{ path: added.fsPath, type: FileChangeType.ADDED },
{ path: updated.fsPath, type: FileChangeType.UPDATED },
{ path: deleted.fsPath, type: FileChangeType.DELETED },
];
watch.onFileChanges(e => {
assert.ok(e);
assert.equal(e.changes.length, 3);
assert.ok(e.contains(added, FileChangeType.ADDED));
assert.ok(e.contains(updated, FileChangeType.UPDATED));
assert.ok(e.contains(deleted, FileChangeType.DELETED));
done();
});
watch.report(raw);
});
let pathSpecs = platform.isWindows ? [Path.WINDOWS, Path.UNC] : [Path.UNIX];
pathSpecs.forEach((p) => {
test('watching - delete only reported for top level folder (' + p + ')', function (done: () => void) {
const watch = new TestFileWatcher();
const deletedFolderA = uri.file(p === Path.UNIX ? '/users/data/src/todelete1' : p === Path.WINDOWS ? 'C:\\users\\data\\src\\todelete1' : '\\\\localhost\\users\\data\\src\\todelete1');
const deletedFolderB = uri.file(p === Path.UNIX ? '/users/data/src/todelete2' : p === Path.WINDOWS ? 'C:\\users\\data\\src\\todelete2' : '\\\\localhost\\users\\data\\src\\todelete2');
const deletedFolderBF1 = uri.file(p === Path.UNIX ? '/users/data/src/todelete2/file.txt' : p === Path.WINDOWS ? 'C:\\users\\data\\src\\todelete2\\file.txt' : '\\\\localhost\\users\\data\\src\\todelete2\\file.txt');
const deletedFolderBF2 = uri.file(p === Path.UNIX ? '/users/data/src/todelete2/more/test.txt' : p === Path.WINDOWS ? 'C:\\users\\data\\src\\todelete2\\more\\test.txt' : '\\\\localhost\\users\\data\\src\\todelete2\\more\\test.txt');
const deletedFolderBF3 = uri.file(p === Path.UNIX ? '/users/data/src/todelete2/super/bar/foo.txt' : p === Path.WINDOWS ? 'C:\\users\\data\\src\\todelete2\\super\\bar\\foo.txt' : '\\\\localhost\\users\\data\\src\\todelete2\\super\\bar\\foo.txt');
const deletedFileA = uri.file(p === Path.UNIX ? '/users/data/src/deleteme.txt' : p === Path.WINDOWS ? 'C:\\users\\data\\src\\deleteme.txt' : '\\\\localhost\\users\\data\\src\\deleteme.txt');
const addedFile = uri.file(p === Path.UNIX ? '/users/data/src/added.txt' : p === Path.WINDOWS ? 'C:\\users\\data\\src\\added.txt' : '\\\\localhost\\users\\data\\src\\added.txt');
const updatedFile = uri.file(p === Path.UNIX ? '/users/data/src/updated.txt' : p === Path.WINDOWS ? 'C:\\users\\data\\src\\updated.txt' : '\\\\localhost\\users\\data\\src\\updated.txt');
const raw: IRawFileChange[] = [
{ path: deletedFolderA.fsPath, type: FileChangeType.DELETED },
{ path: deletedFolderB.fsPath, type: FileChangeType.DELETED },
{ path: deletedFolderBF1.fsPath, type: FileChangeType.DELETED },
{ path: deletedFolderBF2.fsPath, type: FileChangeType.DELETED },
{ path: deletedFolderBF3.fsPath, type: FileChangeType.DELETED },
{ path: deletedFileA.fsPath, type: FileChangeType.DELETED },
{ path: addedFile.fsPath, type: FileChangeType.ADDED },
{ path: updatedFile.fsPath, type: FileChangeType.UPDATED }
];
watch.onFileChanges(e => {
assert.ok(e);
assert.equal(e.changes.length, 5);
assert.ok(e.contains(deletedFolderA, FileChangeType.DELETED));
assert.ok(e.contains(deletedFolderB, FileChangeType.DELETED));
assert.ok(e.contains(deletedFileA, FileChangeType.DELETED));
assert.ok(e.contains(addedFile, FileChangeType.ADDED));
assert.ok(e.contains(updatedFile, FileChangeType.UPDATED));
done();
});
watch.report(raw);
});
});
test('watching - event normalization: ignore CREATE followed by DELETE', function (done: () => void) {
const watch = new TestFileWatcher();
const created = uri.file('/users/data/src/related');
const deleted = uri.file('/users/data/src/related');
const unrelated = uri.file('/users/data/src/unrelated');
const raw: IRawFileChange[] = [
{ path: created.fsPath, type: FileChangeType.ADDED },
{ path: deleted.fsPath, type: FileChangeType.DELETED },
{ path: unrelated.fsPath, type: FileChangeType.UPDATED },
];
watch.onFileChanges(e => {
assert.ok(e);
assert.equal(e.changes.length, 1);
assert.ok(e.contains(unrelated, FileChangeType.UPDATED));
done();
});
watch.report(raw);
});
test('watching - event normalization: flatten DELETE followed by CREATE into CHANGE', function (done: () => void) {
const watch = new TestFileWatcher();
const deleted = uri.file('/users/data/src/related');
const created = uri.file('/users/data/src/related');
const unrelated = uri.file('/users/data/src/unrelated');
const raw: IRawFileChange[] = [
{ path: deleted.fsPath, type: FileChangeType.DELETED },
{ path: created.fsPath, type: FileChangeType.ADDED },
{ path: unrelated.fsPath, type: FileChangeType.UPDATED },
];
watch.onFileChanges(e => {
assert.ok(e);
assert.equal(e.changes.length, 2);
assert.ok(e.contains(deleted, FileChangeType.UPDATED));
assert.ok(e.contains(unrelated, FileChangeType.UPDATED));
done();
});
watch.report(raw);
});
test('watching - event normalization: ignore UPDATE when CREATE received', function (done: () => void) {
const watch = new TestFileWatcher();
const created = uri.file('/users/data/src/related');
const updated = uri.file('/users/data/src/related');
const unrelated = uri.file('/users/data/src/unrelated');
const raw: IRawFileChange[] = [
{ path: created.fsPath, type: FileChangeType.ADDED },
{ path: updated.fsPath, type: FileChangeType.UPDATED },
{ path: unrelated.fsPath, type: FileChangeType.UPDATED },<|fim▁hole|> watch.onFileChanges(e => {
assert.ok(e);
assert.equal(e.changes.length, 2);
assert.ok(e.contains(created, FileChangeType.ADDED));
assert.ok(!e.contains(created, FileChangeType.UPDATED));
assert.ok(e.contains(unrelated, FileChangeType.UPDATED));
done();
});
watch.report(raw);
});
test('watching - event normalization: apply DELETE', function (done: () => void) {
const watch = new TestFileWatcher();
const updated = uri.file('/users/data/src/related');
const updated2 = uri.file('/users/data/src/related');
const deleted = uri.file('/users/data/src/related');
const unrelated = uri.file('/users/data/src/unrelated');
const raw: IRawFileChange[] = [
{ path: updated.fsPath, type: FileChangeType.UPDATED },
{ path: updated2.fsPath, type: FileChangeType.UPDATED },
{ path: unrelated.fsPath, type: FileChangeType.UPDATED },
{ path: updated.fsPath, type: FileChangeType.DELETED }
];
watch.onFileChanges(e => {
assert.ok(e);
assert.equal(e.changes.length, 2);
assert.ok(e.contains(deleted, FileChangeType.DELETED));
assert.ok(!e.contains(updated, FileChangeType.UPDATED));
assert.ok(e.contains(unrelated, FileChangeType.UPDATED));
done();
});
watch.report(raw);
});
});<|fim▁end|> | ];
|
<|file_name|>ack.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Pub/Sub topics publish command."""
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.pubsub import util
<|fim▁hole|>class Ack(base.Command):
"""Acknowledges one or more messages on the specified subscription.
Acknowledges one or more messages as having been successfully received.
If a delivered message is not acknowledged, Cloud Pub/Sub will attempt to
deliver it again.
"""
@staticmethod
def Args(parser):
"""Register flags for this command."""
parser.add_argument('subscription',
help='Subscription name to ACK messages on.')
parser.add_argument('ackid', nargs='+',
help='One or more AckId to acknowledge.')
def Collection(self):
return util.SUBSCRIPTIONS_ACK_COLLECTION
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Ack display dictionary with information about the acknowledged messages
and related subscription.
"""
msgs = self.context['pubsub_msgs']
pubsub = self.context['pubsub']
ack_req = msgs.PubsubProjectsSubscriptionsAcknowledgeRequest(
acknowledgeRequest=msgs.AcknowledgeRequest(ackIds=args.ackid),
subscription=util.SubscriptionFormat(args.subscription))
pubsub.projects_subscriptions.Acknowledge(ack_req)
# Using this dict, instead of returning the AcknowledgeRequest directly,
# to preserve the naming conventions for subscriptionId.
return {'subscriptionId': ack_req.subscription,
'ackIds': ack_req.acknowledgeRequest.ackIds}<|fim▁end|> | |
<|file_name|>ex1.js<|end_file_name|><|fim▁begin|>var DI = require('../');
function A(options) {
this.name = 'A';
this.options = options;
}
function B(options) {
if (! (this instanceof B)) {
return new B(options);
}
this.name = 'B';
this.options = options;
}
var di = new DI();
function S(msg) {
console.log(msg);
}
di.addConfig({
A: A,
B: B,
S: S
});
di.addConfig({
"my": {
"configData": {
"key1": "val1",
"static": {
'@static': '&S'
}
},
"a": {
"@class": "&A",
"test": "test1",
"data": "&my.configData"
},
"b": {
"@factory": "&B",
"a": "&my.a",
"c": {
"d": ["1", 2]
},<|fim▁hole|> "test": "test2"
}
}
});
var c = di.getContainer();
var b = c.get('my.a');
console.log(b.options);<|fim▁end|> | "f": {
"@class": "&A",
"test": "test3"
}, |
<|file_name|>Cap02_pagina_25_comp_interactiva.py<|end_file_name|><|fim▁begin|>'''
@author: Sergio Rojas
@contact: [email protected]
--------------------------<|fim▁hole|> Atribución-NoComercial-CompartirIgual 3.0 Venezuela (CC BY-NC-SA 3.0 VE)
http://creativecommons.org/licenses/by-nc-sa/3.0/ve/
Creado en abril 19, 2016
'''
print(3+5)
print(2-6)
print(2*7)
print(6/2)
print(1/3)
print(1.0/3)
print(((2 + 7*(234 -15)+673)*775)/(5+890.0 -(234+1)*5.0))
print(( (2.0 + 7*(234 - 15) + 673)*775 )/( 5+890.0 - (234+1)*5.0 ))
print(( (2.0 + 7*(234 - 15) + 673)*775 ) /( 5+890.0 - (234+1)*5.0 ))
print(2.5**3)
print(2.5**(3.2 + 2.1))
print(6.78**30)
print(8.647504884825773*1e+24 - 8.647504884825773*10**24)
print(1e+2)
print(1e2)
print(1e-2)
print(2e4)
print(4**(1./2.))
print(4**0.5)
print(8**(1./3.))
print(8**0.3333)<|fim▁end|> | Contenido bajo |
<|file_name|>account.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
use crate::client::MDataInfo;
use crate::crypto::{shared_box, shared_secretbox, shared_sign};
use crate::errors::CoreError;
use crate::DIR_TAG;
use maidsafe_utilities::serialisation::{deserialise, serialise};
use routing::{FullId, XorName, XOR_NAME_LEN};
use rust_sodium::crypto::sign::Seed;
use rust_sodium::crypto::{box_, pwhash, secretbox, sign};
use tiny_keccak::sha3_256;
/// Representing the User Account information on the network.
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub struct Account {
/// The User Account Keys.
pub maid_keys: ClientKeys,
/// The user's access container.
pub access_container: MDataInfo,
/// The user's configuration directory.
pub config_root: MDataInfo,
/// Set to `true` when all root and standard containers
/// have been created successfully. `false` signifies that<|fim▁hole|> /// previous attempt might have failed - check on login.
pub root_dirs_created: bool,
}
impl Account {
/// Create new Account with a provided set of keys.
pub fn new(maid_keys: ClientKeys) -> Result<Self, CoreError> {
Ok(Account {
maid_keys,
access_container: MDataInfo::random_private(DIR_TAG)?,
config_root: MDataInfo::random_private(DIR_TAG)?,
root_dirs_created: false,
})
}
/// Symmetric encryption of Account using User's credentials.
/// Credentials are passed through key-derivation-function first
pub fn encrypt(&self, password: &[u8], pin: &[u8]) -> Result<Vec<u8>, CoreError> {
let serialised_self = serialise(self)?;
let (key, nonce) = Self::generate_crypto_keys(password, pin)?;
Ok(secretbox::seal(&serialised_self, &nonce, &key))
}
/// Symmetric decryption of Account using User's credentials.
/// Credentials are passed through key-derivation-function first
pub fn decrypt(encrypted_self: &[u8], password: &[u8], pin: &[u8]) -> Result<Self, CoreError> {
let (key, nonce) = Self::generate_crypto_keys(password, pin)?;
let decrypted_self = secretbox::open(encrypted_self, &nonce, &key)
.map_err(|_| CoreError::SymmetricDecipherFailure)?;
Ok(deserialise(&decrypted_self)?)
}
/// Generate User's Identity for the network using supplied credentials in
/// a deterministic way. This is similar to the username in various places.
pub fn generate_network_id(keyword: &[u8], pin: &[u8]) -> Result<XorName, CoreError> {
let mut id = XorName([0; XOR_NAME_LEN]);
Self::derive_key(&mut id.0[..], keyword, pin)?;
Ok(id)
}
fn generate_crypto_keys(
password: &[u8],
pin: &[u8],
) -> Result<(secretbox::Key, secretbox::Nonce), CoreError> {
let mut output = [0; secretbox::KEYBYTES + secretbox::NONCEBYTES];
Self::derive_key(&mut output[..], password, pin)?;
// OK to unwrap here, as we guaranteed the slices have the correct length.
let key = unwrap!(secretbox::Key::from_slice(&output[..secretbox::KEYBYTES]));
let nonce = unwrap!(secretbox::Nonce::from_slice(&output[secretbox::KEYBYTES..]));
Ok((key, nonce))
}
fn derive_key(output: &mut [u8], input: &[u8], user_salt: &[u8]) -> Result<(), CoreError> {
let mut salt = pwhash::Salt([0; pwhash::SALTBYTES]);
{
let pwhash::Salt(ref mut salt_bytes) = salt;
if salt_bytes.len() == 32 {
let hashed_pin = sha3_256(user_salt);
for it in salt_bytes.iter_mut().enumerate() {
*it.1 = hashed_pin[it.0];
}
} else {
return Err(CoreError::UnsupportedSaltSizeForPwHash);
}
}
pwhash::derive_key(
output,
input,
&salt,
pwhash::OPSLIMIT_INTERACTIVE,
pwhash::MEMLIMIT_INTERACTIVE,
)
.map(|_| ())
.map_err(|_| CoreError::UnsuccessfulPwHash)
}
}
/// Client signing and encryption keypairs
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub struct ClientKeys {
/// Signing public key
pub sign_pk: sign::PublicKey,
/// Signing secret key
pub sign_sk: shared_sign::SecretKey,
/// Encryption public key
pub enc_pk: box_::PublicKey,
/// Encryption private key
pub enc_sk: shared_box::SecretKey,
/// Symmetric encryption key
pub enc_key: shared_secretbox::Key,
}
impl ClientKeys {
/// Construct new `ClientKeys`
pub fn new(seed: Option<&Seed>) -> Self {
let sign = match seed {
Some(s) => shared_sign::keypair_from_seed(s),
None => shared_sign::gen_keypair(),
};
let enc = shared_box::gen_keypair();
let enc_key = shared_secretbox::gen_key();
ClientKeys {
sign_pk: sign.0,
sign_sk: sign.1,
enc_pk: enc.0,
enc_sk: enc.1,
enc_key,
}
}
}
impl Default for ClientKeys {
fn default() -> Self {
Self::new(None)
}
}
impl Into<FullId> for ClientKeys {
fn into(self) -> FullId {
let enc_sk = (*self.enc_sk).clone();
let sign_sk = (*self.sign_sk).clone();
FullId::with_keys((self.enc_pk, enc_sk), (self.sign_pk, sign_sk))
}
}
#[cfg(test)]
mod tests {
use super::*;
use maidsafe_utilities::serialisation::{deserialise, serialise};
use std::u32;
// Test deterministically generating User's Identity for the network using supplied credentials.
#[test]
fn generate_network_id() {
let keyword1 = b"user1";
let user1_id1 = unwrap!(Account::generate_network_id(keyword1, b"0"));
let user1_id2 = unwrap!(Account::generate_network_id(keyword1, b"1234"));
let user1_id3 = unwrap!(Account::generate_network_id(
keyword1,
u32::MAX.to_string().as_bytes(),
));
assert_ne!(user1_id1, user1_id2);
assert_ne!(user1_id1, user1_id3);
assert_ne!(user1_id2, user1_id3);
assert_eq!(
user1_id1,
unwrap!(Account::generate_network_id(keyword1, b"0"))
);
assert_eq!(
user1_id2,
unwrap!(Account::generate_network_id(keyword1, b"1234"))
);
assert_eq!(
user1_id3,
unwrap!(Account::generate_network_id(
keyword1,
u32::MAX.to_string().as_bytes(),
))
);
let keyword2 = b"user2";
let user1_id = unwrap!(Account::generate_network_id(keyword1, b"248"));
let user2_id = unwrap!(Account::generate_network_id(keyword2, b"248"));
assert_ne!(user1_id, user2_id);
}
// Test deterministically generating cryptographic keys.
#[test]
fn generate_crypto_keys() {
let password1 = b"super great password";
let password2 = b"even better password";
let keys1 = unwrap!(Account::generate_crypto_keys(password1, b"0"));
let keys2 = unwrap!(Account::generate_crypto_keys(password1, b"1234"));
let keys3 = unwrap!(Account::generate_crypto_keys(
password1,
u32::MAX.to_string().as_bytes(),
));
assert_ne!(keys1, keys2);
assert_ne!(keys1, keys3);
assert_ne!(keys2, keys3);
let keys1 = unwrap!(Account::generate_crypto_keys(password1, b"0"));
let keys2 = unwrap!(Account::generate_crypto_keys(password2, b"0"));
assert_ne!(keys1, keys2);
let keys1 = unwrap!(Account::generate_crypto_keys(password1, b"0"));
let keys2 = unwrap!(Account::generate_crypto_keys(password1, b"0"));
assert_eq!(keys1, keys2);
}
// Test serialising and deserialising accounts.
#[test]
fn serialisation() {
let account = unwrap!(Account::new(ClientKeys::new(None)));
let encoded = unwrap!(serialise(&account));
let decoded: Account = unwrap!(deserialise(&encoded));
assert_eq!(decoded, account);
}
// Test encryption and decryption of accounts.
#[test]
fn encryption() {
let account = unwrap!(Account::new(ClientKeys::new(None)));
let password = b"impossible to guess";
let pin = b"1000";
let encrypted = unwrap!(account.encrypt(password, pin));
let encoded = unwrap!(serialise(&account));
assert!(!encrypted.is_empty());
assert_ne!(encrypted, encoded);
let decrypted = unwrap!(Account::decrypt(&encrypted, password, pin));
assert_eq!(account, decrypted);
}
}<|fim▁end|> | |
<|file_name|>techno-date-cell-test.js<|end_file_name|><|fim▁begin|>/* jshint expr:true */
import { expect } from 'chai';
import {<|fim▁hole|>import hbs from 'htmlbars-inline-precompile';
describeComponent(
'techno-date-cell',
'Integration: TechnoDateCellComponent',
{
integration: true
},
function() {
it('renders', function() {
this.set('date', new Date("1/1/1925"));
this.render(hbs`{{techno-date-cell value=date}}`);
expect(this.$()).to.have.length(1);
expect(this.$().find('td')).to.have.length(1);
expect(this.$().find('td:contains("1/1/1925")')).to.have.length(1);
});
}
);<|fim▁end|> | describeComponent,
it
} from 'ember-mocha'; |
<|file_name|>generic.py<|end_file_name|><|fim▁begin|># Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import datetime
import os
import sys
import re
import urllib2
import copy
import itertools
import operator
import collections
import sickbeard
from sickbeard import helpers, classes, logger, db
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT#, SEED_POLICY_TIME, SEED_POLICY_RATIO
from sickbeard import tvcache
from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex
from lib.hachoir_parser import createParser
from sickbeard.name_parser.parser import NameParser, InvalidNameException
from sickbeard import scene_numbering
from sickbeard.common import Quality, Overview
class GenericProvider:
NZB = "nzb"
TORRENT = "torrent"
def __init__(self, name):
# these need to be set in the subclass
self.providerType = None
self.name = name
self.url = ''
self.supportsBacklog = False
self.cache = tvcache.TVCache(self)
def getID(self):
return GenericProvider.makeID(self.name)
@staticmethod
def makeID(name):
return re.sub("[^\w\d_]", "_", name.strip().lower())
def imageName(self):
return self.getID() + '.png'
def _checkAuth(self):
return
def isActive(self):
if self.providerType == GenericProvider.NZB and sickbeard.USE_NZBS:
return self.isEnabled()
elif self.providerType == GenericProvider.TORRENT and sickbeard.USE_TORRENTS:
return self.isEnabled()
else:
return False
def isEnabled(self):
"""
This should be overridden and should return the config setting eg. sickbeard.MYPROVIDER
"""
return False
def getResult(self, episodes):
"""
Returns a result of the correct type for this provider
"""
if self.providerType == GenericProvider.NZB:
result = classes.NZBSearchResult(episodes)
elif self.providerType == GenericProvider.TORRENT:
result = classes.TorrentSearchResult(episodes)
else:
result = classes.SearchResult(episodes)
result.provider = self
return result
def getURL(self, url, post_data=None, headers=None):
"""
By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies)
"""
if not headers:
headers = []
data = helpers.getURL(url, post_data, headers)
if not data:
logger.log(u"Error loading " + self.name + " URL: " + url, logger.ERROR)
return None
return data
def downloadResult(self, result):
"""
Save the result to disk.
"""
logger.log(u"Downloading a result from " + self.name+" at " + result.url)
data = self.getURL(result.url)
if data == None:
return False
# use the appropriate watch folder
if self.providerType == GenericProvider.NZB:
saveDir = sickbeard.NZB_DIR
writeMode = 'w'
elif self.providerType == GenericProvider.TORRENT:
saveDir = sickbeard.TORRENT_DIR
writeMode = 'wb'
else:
return False
# use the result name as the filename
file_name = ek.ek(os.path.join, saveDir, helpers.sanitizeFileName(result.name) + '.' + self.providerType)
logger.log(u"Saving to " + file_name, logger.DEBUG)
try:
with open(file_name, writeMode) as fileOut:
fileOut.write(data)
helpers.chmodAsParent(file_name)
except EnvironmentError, e:
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
return False
# as long as it's a valid download then consider it a successful snatch
return self._verify_download(file_name)
def _verify_download(self, file_name=None):
"""
Checks the saved file to see if it was actually valid, if not then consider the download a failure.
"""
# primitive verification of torrents, just make sure we didn't get a text file or something
if self.providerType == GenericProvider.TORRENT:
parser = createParser(file_name)
if parser:
mime_type = parser._getMimeType()
try:
parser.stream._input.close()
except:
pass
if mime_type != 'application/x-bittorrent':
logger.log(u"Result is not a valid torrent file", logger.WARNING)
return False
return True
def searchRSS(self):
self._checkAuth()
self.cache.updateCache()
return self.cache.findNeededEpisodes()
def getQuality(self, item):
"""
Figures out the quality of the given RSS item node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns a Quality value obtained from the node's data
"""
(title, url) = self._get_title_and_url(item) # @UnusedVariable
quality = Quality.sceneQuality(title)
return quality
def _doSearch(self):
return []
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
return []
def _get_episode_search_strings(self, ep_obj):
return []
def _get_title_and_url(self, item):
"""
Retrieves the title and URL data from the item XML node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns: A tuple containing two strings representing title and URL respectively
"""
title = helpers.get_xml_text(item.find('title'))
if title:
title = title.replace(' ', '.')
url = helpers.get_xml_text(item.find('link'))
if url:
url = url.replace('&', '&')
return (title, url)
def findEpisode(self, episode, manualSearch=False):
self._checkAuth()
# XEM episode scene numbering
sceneEpisode = copy.copy(episode)
sceneEpisode.convertToSceneNumbering()
logger.log(u'Searching "%s" for "%s" as "%s"'
% (self.name, episode.prettyName() , sceneEpisode.prettyName()))
self.cache.updateCache()
results = self.cache.searchCache(episode, manualSearch)
logger.log(u"Cache results: " + str(results), logger.DEBUG)
logger.log(u"manualSearch: " + str(manualSearch), logger.DEBUG)
# if we got some results then use them no matter what.
# OR
# return anyway unless we're doing a manual search
if results or not manualSearch:
return results
itemList = []
for cur_search_string in self._get_episode_search_strings(sceneEpisode):
itemList += self._doSearch(cur_search_string, show=episode.show)
for item in itemList:
(title, url) = self._get_title_and_url(item)
# parse the file name
try:
myParser = NameParser(False)
parse_result = myParser.parse(title, True)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
continue
if episode.show.air_by_date:
if parse_result.air_date != episode.airdate:
logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG)
continue
elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG)
continue
quality = self.getQuality(item)
if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
result = self.getResult([episode])
result.url = url
result.name = title
result.quality = quality
result.provider = self
result.content = None
results.append(result)
return results
def findSeasonResults(self, show, season):
itemList = []
results = {}
sceneSeasons = {}
searchSeason = False
# convert wanted seasons and episodes to XEM scene numbering
seasonEp = show.getAllEpisodes(season)
wantedEp = [x for x in seasonEp if show.getOverview(x.status) in (Overview.WANTED, Overview.QUAL)]
map(lambda x: x.convertToSceneNumbering(), wantedEp)
for x in wantedEp: sceneSeasons.setdefault(x.season,[]).append(x)
if wantedEp == seasonEp and not show.air_by_date:
searchSeason = True
for sceneSeason,sceneEpisodes in sceneSeasons.iteritems():
for curString in self._get_season_search_strings(show, str(sceneSeason), sceneEpisodes, searchSeason):
itemList += self._doSearch(curString)
for item in itemList:
(title, url) = self._get_title_and_url(item)
quality = self.getQuality(item)
# parse the file name
try:
myParser = NameParser(False)
parse_result = myParser.parse(title, True)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
continue
if not show.air_by_date:
# this check is meaningless for non-season searches
if (parse_result.season_number != None and parse_result.season_number != season) or (parse_result.season_number == None and season != 1):
logger.log(u"The result " + title + " doesn't seem to be a valid episode for season " + str(season) + ", ignoring", logger.DEBUG)
continue
# we just use the existing info for normal searches
actual_season = season
actual_episodes = parse_result.episode_numbers
else:
if not parse_result.air_by_date:
logger.log(u"This is supposed to be an air-by-date search but the result "+title+" didn't parse as one, skipping it", logger.DEBUG)
continue
myDB = db.DBConnection()
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [show.tvdbid, parse_result.air_date.toordinal()])
if len(sql_results) != 1:
logger.log(u"Tried to look up the date for the episode "+title+" but the database didn't give proper results, skipping it", logger.WARNING)
continue
actual_season = int(sql_results[0]["season"])
actual_episodes = [int(sql_results[0]["episode"])]
# make sure we want the episode
wantEp = True
for epNo in actual_episodes:
if not show.wantEpisode(actual_season, epNo, quality):
wantEp = False
break
if not wantEp:
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
# make a result object
epObj = []
<|fim▁hole|> result = self.getResult(epObj)
result.url = url
result.name = title
result.quality = quality
result.provider = self
result.content = None
if len(epObj) == 1:
epNum = epObj[0].episode
elif len(epObj) > 1:
epNum = MULTI_EP_RESULT
logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers), logger.DEBUG)
elif len(epObj) == 0:
epNum = SEASON_RESULT
result.extraInfo = [show]
logger.log(u"Separating full season result to check for later", logger.DEBUG)
if epNum in results:
results[epNum].append(result)
else:
results[epNum] = [result]
return results
def findPropers(self, search_date=None):
results = self.cache.listPropers(search_date)
return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time'])) for x in results]
class NZBProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.providerType = GenericProvider.NZB
class TorrentProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.providerType = GenericProvider.TORRENT
# self.option = {SEED_POLICY_TIME : '',
# SEED_POLICY_RATIO: '',
# 'PROCESS_METHOD': ''
# }
# def get_provider_options(self):
# pass
#
# def set_provider_options(self):
# self.option[SEED_POLICY_TIME] + '|' + self.option[SEED_POLICY_RATIO] + '|' + self.option['PROCESS_METHOD']<|fim▁end|> | for curEp in actual_episodes:
epObj.append(show.getEpisode(actual_season, curEp))
|
<|file_name|>Stormpath.js<|end_file_name|><|fim▁begin|>import 'isomorphic-fetch'
import base64 from 'base-64'
import utf8 from 'utf8'
import Request from './Request'
function encodeAccount(username, password) {
let bytes = utf8.encode(`${ username }:${ password }`)
return base64.encode(bytes)
}
export default class Stormpath {
constructor({ application, authentication } = {}) {
this.application = application
this.authentication = authentication
}
retrieveApplication() {
let url = `https://api.stormpath.com/v1/applications/${ this.application }/accounts`
let options = { authentication: this.authentication }<|fim▁hole|> }
createAccount(payload) {
let url = `https://api.stormpath.com/v1/applications/${ this.application }/accounts`
let options = {
authentication: this.authentication,
payload: payload
}
return Request.post(url, options)
}
retrieveAccount(account) {
let url = `https://api.stormpath.com/v1/accounts/${ account }`
let options = {
authentication: this.authentication
}
return Request.get(url, options)
}
authenticateAccount({ username, password } = {}) {
let url = `https://api.stormpath.com/v1/applications/${ this.application }/loginAttempts`
//url = 'http://requestb.in/uwektzuw'
let payload = {
type: 'basic',
value: encodeAccount(username, password)
}
let options = {
authentication: this.authentication,
payload: payload
}
return Request.post(url, options)
}
}
if (require.main === module) {
const credentials = {
application: 'zDhRIszpk93AwssJDXuPs',
authentication: {
username: '1HU99B538PG3SW50K5M2NPJBW',
password: '7ukbB9oDRjgyMEX/057SKtAwwLtOR3fbKvNQOp4i/uI'
}
}
const account = {
givenName: 'Denis',
surname: 'Storm',
username: 'DenisCarriere',
email: '[email protected]',
password: 'Denis44C',
customData: { number: 4 }
}
const stormpath = new Stormpath(credentials)
//stormpath.createAccount(account)
//stormpath.retrieveApplication()
stormpath.authenticateAccount(account)
stormpath.retrieveAccount('3NElH12QutCmRSi3e6PAmI')
.then(
data => console.log(data),
error => console.log(error)
)
}<|fim▁end|> | return Request.get(url, options) |
<|file_name|>email_parser.py<|end_file_name|><|fim▁begin|>from copy import deepcopy
from datetime import datetime
from datetime import timezone
from email.utils import mktime_tz
from email.utils import parsedate_tz
from io import BytesIO
from itertools import chain
from mimetypes import guess_type
from typing import Callable
from typing import Iterable
from typing import List
from typing import Optional
from typing import Tuple
from bs4 import BeautifulSoup
from PIL import Image
from pyzmail import PyzMessage
from pyzmail.parse import MailPart
from requests import Response
from requests import get as http_get
from opwen_email_server.config import MAX_HEIGHT_IMAGES
from opwen_email_server.config import MAX_WIDTH_IMAGES
from opwen_email_server.utils.log import LogMixin
from opwen_email_server.utils.serialization import to_base64
def _parse_body(message: PyzMessage, default_charset: str = 'ascii') -> str:
body_parts = (message.html_part, message.text_part)
for part in body_parts:
if part is None:
continue
payload = part.get_payload()
if payload is None:
continue
charset = part.charset or default_charset
return payload.decode(charset, errors='replace')
return ''
def _parse_attachments(mailparts: Iterable[MailPart]) -> Iterable[dict]:
attachment_parts = (part for part in mailparts if not part.is_body)
for part in attachment_parts:
filename = part.sanitized_filename
payload = part.get_payload()
attachment_id = part.content_id
if filename and payload:
attachment = {'filename': filename, 'content': payload}
if attachment_id:
attachment['cid'] = attachment_id
yield attachment
def _parse_addresses(message: PyzMessage, address_type: str) -> List[str]:
return sorted(email for _, email in message.get_addresses(address_type) if email)
def _parse_address(message: PyzMessage, address_type: str) -> Optional[str]:
return next(iter(_parse_addresses(message, address_type)), None)
def _parse_sent_at(message: PyzMessage) -> Optional[str]:
rfc_822 = message.get_decoded_header('date')
if not rfc_822:
return None
date_tz = parsedate_tz(rfc_822)
if not date_tz:
return None
timestamp = mktime_tz(date_tz)
# noinspection PyUnresolvedReferences
date_utc = datetime.fromtimestamp(timestamp, timezone.utc)
return date_utc.strftime('%Y-%m-%d %H:%M')
def parse_mime_email(mime_email: str) -> dict:
message = PyzMessage.factory(mime_email)
return {
'sent_at': _parse_sent_at(message),
'to': _parse_addresses(message, 'to'),
'cc': _parse_addresses(message, 'cc'),
'bcc': _parse_addresses(message, 'bcc'),
'from': _parse_address(message, 'from'),
'subject': message.get_subject(),
'body': _parse_body(message),
'attachments': list(_parse_attachments(message.mailparts)),
}
def format_attachments(email: dict) -> dict:
attachments = email.get('attachments', [])
if not attachments:
return email
formatted_attachments = deepcopy(attachments)
is_any_attachment_changed = False
for i, attachment in enumerate(attachments):
filename = attachment.get('filename', '')
content = attachment.get('content', b'')
formatted_content = _format_attachment(filename, content)
if content != formatted_content:
formatted_attachments[i]['content'] = formatted_content
is_any_attachment_changed = True
if not is_any_attachment_changed:
return email
new_email = dict(email)
new_email['attachments'] = formatted_attachments
return new_email
def _format_attachment(filename: str, content: bytes) -> bytes:
attachment_type = guess_type(filename)[0]
if not attachment_type:
return content
if 'image' in attachment_type.lower():
content = _change_image_size(content)
return content
def get_recipients(email: dict) -> Iterable[str]:
return chain(email.get('to') or [], email.get('cc') or [], email.get('bcc') or [])
def get_domains(email: dict) -> Iterable[str]:
return frozenset(get_domain(address) for address in get_recipients(email))
def get_domain(address: str) -> str:
return address.split('@')[-1]
def ensure_has_sent_at(email: dict):
if not email.get('sent_at'):
email['sent_at'] = datetime.utcnow().strftime('%Y-%m-%d %H:%M')
def _get_image_type(response: Response, url: str) -> Optional[str]:
content_type = response.headers.get('Content-Type')
if not content_type:
content_type = guess_type(url)[0]
return content_type
def _is_already_small(size: Tuple[int, int]) -> bool:
width, height = size
return width <= MAX_WIDTH_IMAGES and height <= MAX_HEIGHT_IMAGES
def _change_image_size(image_content_bytes: bytes) -> bytes:
image_bytes = BytesIO(image_content_bytes)
image_bytes.seek(0)
image = Image.open(image_bytes)
if _is_already_small(image.size):
return image_content_bytes
new_size = (MAX_WIDTH_IMAGES, MAX_HEIGHT_IMAGES)
image.thumbnail(new_size, Image.ANTIALIAS)
new_image = BytesIO()
image.save(new_image, image.format)
new_image.seek(0)
new_image_bytes = new_image.read()
return new_image_bytes
def _fetch_image_to_base64(image_url: str) -> Optional[str]:
response = http_get(image_url)
if not response.ok:
return None
image_type = _get_image_type(response, image_url)
if not image_type:
return None
if not response.content:
return None
small_image_bytes = _change_image_size(response.content)
small_image_base64 = to_base64(small_image_bytes)
return f'data:{image_type};base64,{small_image_base64}'
def _is_valid_url(url: Optional[str]) -> bool:
if not url:
return False
has_http_prefix = url.startswith('http://')
has_https_prefix = url.startswith('https://')
return has_http_prefix or has_https_prefix
def format_inline_images(email: dict, on_error: Callable) -> dict:
email_body = email.get('body', '')
if not email_body:
return email
soup = BeautifulSoup(email_body, 'html.parser')
image_tags = soup.find_all('img')
if not image_tags:
return email
for image_tag in image_tags:
image_url = image_tag.get('src')
if not _is_valid_url(image_url):
continue
try:
encoded_image = _fetch_image_to_base64(image_url)
except Exception as ex:
on_error('Unable to inline image %s: %s', image_url, ex)
else:
if encoded_image:
image_tag['src'] = encoded_image
new_email = dict(email)
new_email['body'] = str(soup)
return new_email
class MimeEmailParser(LogMixin):<|fim▁hole|> def __call__(self, mime_email: str) -> dict:
email = parse_mime_email(mime_email)
email = format_attachments(email)
email = format_inline_images(email, self.log_warning)
return email<|fim▁end|> | |
<|file_name|>MySQLDatabaseMeta.java<|end_file_name|><|fim▁begin|>package com.xxl.job.database.meta;
import com.xxl.job.database.dboperate.DBManager;
/**
* 基于mysql的一些特殊处理定义
*
*/
public class MySQLDatabaseMeta extends DatabaseMeta {
public MySQLDatabaseMeta() {<|fim▁hole|><|fim▁end|> | this.dbType = DBManager.MYSQL_DB;
}
} |
<|file_name|>Message.js<|end_file_name|><|fim▁begin|>/**
* Vasya Hobot
*
* Copyright (c) 2013-2014 Vyacheslav Slinko
* Licensed under the MIT License
*/
function Message(chat, body) {
this._chat = chat;
this._body = body;
}
Message.prototype.getChat = function() {
return this._chat;<|fim▁hole|> return this._body;
};
module.exports = Message;<|fim▁end|> | };
Message.prototype.getBody = function() { |
<|file_name|>remote.py<|end_file_name|><|fim▁begin|>import time
import random
from random import randint
# from library import Trigger, Axis
# from library import PS4
from library import Joystick
import RPi.GPIO as GPIO # remove!!!
from emotions import angry, happy, confused
# from pysabertooth import Sabertooth
# from smc import SMC
from library import LEDDisplay
from library import factory
from library import reset_all_hw
# Leg Motor Speed Global
global_LegMotor = 70
# # Happy Emotion
# def happy(leds, servos, mc, audio):
# print("4")
# print("Happy")
#
# # Dome Motor Initialization
# # mc = SMC(dome_motor_port, 115200)
# # mc.init()
#
# # Spins Motor
# # mc.init()
# mc.speed(3200)
#
# # LED Matrix Green
# # breadboard has mono
# # R2 has bi-color leds
# # mono:0 bi:1
# # led_type = 0
# # leds = [0]*5
# # leds[1] = LEDDisplay(0x70, led_type)
# # leds[2] = LEDDisplay(0x71, led_type)
# # leds[3] = LEDDisplay(0x72, led_type)
# # leds[4] = LEDDisplay(0x73, led_type)
#
# for x in [0, 1, 2, 3, 4, 5, 6, 7]:
# for y in [0, 1, 2, 3, 4, 5, 6, 7]:
# for i in range(1, 5):
# leds[i].set(x, y, 1)
#
# for i in range(1, 5):
# leds[i].write()
#
# # Servo Wave
# # s0.angle = 0
# # time.sleep(0.2)
# # s1.angle = 0
# # time.sleep(0.2)
# # s2.angle = 0
# # time.sleep(0.2)
# # s3.angle = 0
# # time.sleep(0.2)
# # s4.angle = 0
# # time.sleep(0.5)
# # s4.angle = 130
# # time.sleep(0.2)
# # s3.angle = 130
# # time.sleep(0.2)
# # s2.angle = 130
# # time.sleep(0.2)
# # s1.angle = 130
# # time.sleep(0.2)
# # s0.angle = 130
#
# for a in [0, 130]:
# for i in range(4):
# servos[i].angle = a
# time.sleep(0.2)
# time.sleep(0.5)
#
# time.sleep(1.5)
# mc.stop()
# time.sleep(1.5)
# for i in range(1, 5):
# leds[i].clear()
#
#
# # Confused Emotion
# def confused(leds, servos, mc, audio):
# print("5")
# print("Confused")
# # LED Matrix Yellow
# # leds = [0]*5
# # leds[1] = LEDDisplay(0x70, 1)
# # leds[2] = LEDDisplay(0x71, 1)
# # leds[3] = LEDDisplay(0x72, 1)
# # leds[4] = LEDDisplay(0x73, 1)
#
# for x in [0, 1, 2, 3, 4, 5, 6, 7]:
# for y in [0, 1, 2, 3, 4, 5, 6, 7]:
# for i in range(1, 5):
# leds[i].set(x, y, 3)
# for i in range(1, 5):
# leds[i].write()
# time.sleep(3)
# for i in range(1, 5):
# leds[i].clear()
#
#
# # Angry Emotion
# def angry(leds, servos, mc, audio):
# print("6")
# print("Angry")
# # LED Matrix Red
# # leds = [0]*5
# # leds[1] = LEDDisplay(0x70, 1)
# # leds[2] = LEDDisplay(0x71, 1)
# # leds[3] = LEDDisplay(0x72, 1)
# # leds[4] = LEDDisplay(0x73, 1)
#
# for x in [0, 1, 2, 3, 4, 5, 6, 7]:
# for y in [0, 1, 2, 3, 4, 5, 6, 7]:
# for i in range(1, 5):
# leds[i].set(x, y, 2)
#
# for i in range(1, 5):
# leds[i].write()
#
# # Plays Imperial Theme Sound
# audio.sound('imperial')
#
# # Servo Open and Close
# # s0.angle = 0
# # s1.angle = 0
# # s2.angle = 0
# # s3.angle = 0
# # s4.angle = 0
# # time.sleep(1)
# # s4.angle = 130
# # s3.angle = 130
# # s2.angle = 130
# # s1.angle = 130
# # s0.angle = 130
#
# for a in [0, 130]:
# for i in range(5):
# servos[i].angle = a
# time.sleep(1)
#
# time.sleep(3)
# for i in range(1, 5):
# leds[i].clear()
#######################################
# original remote
#######################################
# # Remote Mode
# def remote(remoteflag, namespace):
# print("Remote")
#
# # create objects
# (leds, dome, legs, servos, Flash) = factory(['leds', 'dome', 'legs', 'servos', 'flashlight'])
#
# # initalize everything
# dome.init()
# dome.speed(0)
#
# legs.drive(1, 0)
# legs.drive(2, 0)
#
# for s in servos:
# s.angle = 0
# time.sleep(0.25)
#
# # what is this???
# GPIO.setmode(GPIO.BCM)
# GPIO.setwarnings(False)
# GPIO.setup(26, GPIO.OUT)
#
# # Joystick Initialization
# js = Joystick()
#
# # get audio
# audio = namespace.audio
#
# # Flash = FlashlightPWM(15)
# # Flash = namespace.flashlight
#
# while(remoteflag.is_set()):
# try:
# # Button Initialization
# ps4 = js.get()
# btnSquare = ps4.buttons[0]
# btnTriangle = ps4.buttons[1]
# btnCircle = ps4.buttons[2]
# btnX = ps4.buttons[3]
# btnLeftStickLeftRight = ps4.leftStick.y
# btnLeftStickUpDown = ps4.leftStick.x
# btnRightStickLeftRight = ps4.rightStick.y
# btnRightStickUpDown = ps4.rightStick.x
# Left1 = ps4.shoulder[0]
# Right1 = ps4.shoulder[1]
# Left2 = ps4.triggers.x
# Right2 = ps4.triggers.y
# hat = ps4.hat
#
# # print("PRINT")
#
# # Button Controls
# if hat == 1:
# # Happy Emotion
# print("Arrow Up Pressed")
# happy(leds, servos, dome, audio) # namespace.emotions['happy'](leds, servos, mc, audio)
# if hat == 8:
# # Confused Emotion
# print("Arrow Left Pressed")
# confused(leds, servos, dome, audio)
# if hat == 2:
# # Angry Emotion
# print("Arrow Right Pressed")
# angry(leds, servos, dome, audio)
# if hat == 4:
# print("Arrow Down Pressed")
# if btnSquare == 1:
# # word = random_char(2)
# audio.speak_random(2)
# time.sleep(0.5)
# if btnTriangle == 1:
# # FlashLight ON
# GPIO.output(26, GPIO.HIGH)
# Flash.pwm.set_pwm(15, 0, 130)
# if btnCircle == 1:
# # FlashLight OFF
# GPIO.output(26, GPIO.LOW)
# Flash.pwm.set_pwm(15, 0, 0)
# if btnX == 1:
# for x in [0, 1, 2, 3, 4, 5, 6, 7]:
# for y in [0, 1, 2, 3, 4, 5, 6, 7]:
# if x == randint(0, 8) or y == randint(0, 8):
# for i in range(1, 5):
# leds[i].set(x, y, randint(0, 4))
# else:
# for i in range(1, 5):
# leds[i].set(x, y, 4)
# for i in range(1, 5):
# leds[i].write()
# time.sleep(0.1)
# for i in range(1, 5):
# leds[i].clear()
# if Left1 == 1:
# # Dome Motor Forward
# dome.speed(3200)
# time.sleep(2)
# dome.speed(0)
# if Right1 == 1:
# # Dome Motor Backward
# dome.speed(-3200)
# time.sleep(2)
# dome.speed(0)
# # if Left1 == 0 or Right1 == 0:
# # # Dome Motor Stop
# # dome.speed(0)
# # if Left2 > 1:
# # # Servo Open
# # s0.angle = 0
# # s1.angle = 0
# # s2.angle = 0
# # s3.angle = 0
# # s4.angle = 0
# # Flash.pwm.set_pwm(15, 0, 3000)
# #
# # if Right2 > 1:
# # # Servo Close
# # s0.angle = 130
# # s1.angle = 130
# # s2.angle = 130
# # s3.angle = 130
# # s4.angle = 130
# # Flash.pwm.set_pwm(15, 0, 130)
# if Left2 > 1:
# for s in servos:
# s.angle = 0
# time.sleep(0.25)
# Flash.pwm.set_pwm(15, 0, 300)
# if Right2 > 1:
# for s in servos:
# s.angle = 130
# time.sleep(0.25)
# Flash.pwm.set_pwm(15, 0, 130)
# if btnLeftStickLeftRight < 0.3 and btnLeftStickLeftRight > -0.3:
# legs.drive(1, 0)
# if btnRightStickUpDown < 0.3 and btnRightStickUpDown > -0.3:
# legs.drive(2, 0)
# if btnRightStickUpDown >= 0.3:
# # Right and Left Motor Forward
# legs.drive(1, btnRightStickUpDown*global_LegMotor)
# legs.drive(2, btnRightStickUpDown*-global_LegMotor)
# if btnRightStickUpDown <= -0.3:
# # Right and Left Motor Backward
# legs.drive(1, btnRightStickUpDown*global_LegMotor)
# legs.drive(2, btnRightStickUpDown*-global_LegMotor)
# if btnLeftStickLeftRight <= 0.3:
# # Turn Left
# legs.drive(1, btnLeftStickLeftRight*(-global_LegMotor))
# legs.drive(2, btnLeftStickLeftRight*-global_LegMotor)
# if btnLeftStickLeftRight >= -0.3:
# # Turn Right
# legs.drive(1, btnLeftStickLeftRight*(-global_LegMotor))
# legs.drive(2, btnLeftStickLeftRight*-global_LegMotor)
#
# except KeyboardInterrupt:
# print('js exiting ...')
# return
# return
def remote_func(hw, ns):
print("Remote")
dome = hw['dome']
dome.speed(0)
legs = hw['legs']
legs.drive(1, 0)
legs.drive(2, 0)
flashlight = hw['flashlight']
audio = hw['audio']
audio.speak('start')
while ns.current_state == 3:
print('remote ...')
spd = random.randint(0, 40)
legs.drive(1, spd)
legs.drive(2, spd)
dome.speed(spd)
time.sleep(0.5)
legs.drive(1, 0)
legs.drive(2, 0)
dome.speed(0)
time.sleep(0.1)
return
###### real loop here #####
# Joystick Initialization
js = Joystick()
while ns.current_state == 3:
try:
# Button Initialization<|fim▁hole|> btnCircle = ps4.buttons[2]
btnX = ps4.buttons[3]
btnLeftStickLeftRight = ps4.leftStick.y
btnLeftStickUpDown = ps4.leftStick.x
btnRightStickLeftRight = ps4.rightStick.y
btnRightStickUpDown = ps4.rightStick.x
Left1 = ps4.shoulder[0]
Right1 = ps4.shoulder[1]
Left2 = ps4.triggers.x
Right2 = ps4.triggers.y
hat = ps4.hat
# print("PRINT")
# Button Controls
if hat == 1:
# Happy Emotion
print("Arrow Up Pressed")
happy(leds, servos, dome, audio) # namespace.emotions['happy'](leds, servos, mc, audio)
if hat == 8:
# Confused Emotion
print("Arrow Left Pressed")
confused(leds, servos, dome, audio)
if hat == 2:
# Angry Emotion
print("Arrow Right Pressed")
angry(leds, servos, dome, audio)
if hat == 4:
print("Arrow Down Pressed")
if btnSquare == 1:
# word = random_char(2)
audio.speak_random(2)
time.sleep(0.5)
if btnTriangle == 1:
# FlashLight ON
GPIO.output(26, GPIO.HIGH)
Flash.pwm.set_pwm(15, 0, 130)
if btnCircle == 1:
# FlashLight OFF
GPIO.output(26, GPIO.LOW)
Flash.pwm.set_pwm(15, 0, 0)
if btnX == 1:
for x in [0, 1, 2, 3, 4, 5, 6, 7]:
for y in [0, 1, 2, 3, 4, 5, 6, 7]:
if x == randint(0, 8) or y == randint(0, 8):
for i in range(1, 5):
leds[i].set(x, y, randint(0, 4))
else:
for i in range(1, 5):
leds[i].set(x, y, 4)
for i in range(1, 5):
leds[i].write()
time.sleep(0.1)
for i in range(1, 5):
leds[i].clear()
if Left1 == 1:
# Dome Motor Forward
dome.speed(3200)
time.sleep(2)
dome.speed(0)
if Right1 == 1:
# Dome Motor Backward
dome.speed(-3200)
time.sleep(2)
dome.speed(0)
# if Left1 == 0 or Right1 == 0:
# # Dome Motor Stop
# dome.speed(0)
# if Left2 > 1:
# # Servo Open
# s0.angle = 0
# s1.angle = 0
# s2.angle = 0
# s3.angle = 0
# s4.angle = 0
# Flash.pwm.set_pwm(15, 0, 3000)
#
# if Right2 > 1:
# # Servo Close
# s0.angle = 130
# s1.angle = 130
# s2.angle = 130
# s3.angle = 130
# s4.angle = 130
# Flash.pwm.set_pwm(15, 0, 130)
if Left2 > 1:
for s in servos:
s.angle = 0
time.sleep(0.25)
Flash.pwm.set_pwm(15, 0, 300)
if Right2 > 1:
for s in servos:
s.angle = 130
time.sleep(0.25)
Flash.pwm.set_pwm(15, 0, 130)
if btnLeftStickLeftRight < 0.3 and btnLeftStickLeftRight > -0.3:
legs.drive(1, 0)
if btnRightStickUpDown < 0.3 and btnRightStickUpDown > -0.3:
legs.drive(2, 0)
if btnRightStickUpDown >= 0.3:
# Right and Left Motor Forward
legs.drive(1, btnRightStickUpDown*global_LegMotor)
legs.drive(2, btnRightStickUpDown*-global_LegMotor)
if btnRightStickUpDown <= -0.3:
# Right and Left Motor Backward
legs.drive(1, btnRightStickUpDown*global_LegMotor)
legs.drive(2, btnRightStickUpDown*-global_LegMotor)
if btnLeftStickLeftRight <= 0.3:
# Turn Left
legs.drive(1, btnLeftStickLeftRight*(-global_LegMotor))
legs.drive(2, btnLeftStickLeftRight*-global_LegMotor)
if btnLeftStickLeftRight >= -0.3:
# Turn Right
legs.drive(1, btnLeftStickLeftRight*(-global_LegMotor))
legs.drive(2, btnLeftStickLeftRight*-global_LegMotor)
except KeyboardInterrupt:
print('js exiting ...')
return
# exiting, reset all hw
reset_all_hw(hw)
return<|fim▁end|> | ps4 = js.get()
btnSquare = ps4.buttons[0]
btnTriangle = ps4.buttons[1] |
<|file_name|>Content.js<|end_file_name|><|fim▁begin|>import React from 'react';
<<<<<<< HEAD
import { Switch, Route } from 'react-router-dom';
import Home from './Home';
import About from './About';
import Blog from './Blog';
import Resume from './Resume';
import Error404 from './Error404';
=======
// import GithubForm from './forms/github/GithubForm';
import GithubRecent from './forms/github/RecentList';
import './Content.css';<|fim▁hole|>
class Content extends React.Component {
render() {
return (
<div className="app-content">
<<<<<<< HEAD
<Switch>
<Route exact path="/" component={Home} />
{/* <Route exact path="/about" component={About} /> */}
<Route component={Error404} />
</Switch>
=======
<div className="content-description">
<br />College Student. Love Coding. Interested in Web and Machine Learning.
</div>
<hr />
<div className="content-list">
<div className="list-projects">
<h3>Recent Projects</h3>
{/* <GithubRecent userName="slkjse9" standardDate={5259492000} /> */}
<GithubRecent userName="hejuby" standardDate={3.154e+10} />
{/* <h2>Activity</h2> */}
<h3>Experience</h3>
<h3>Education</h3>
<ul>
<li><h4>Computer Science</h4>Colorado State University, Fort Collins (2017 -)</li>
</ul>
<br />
</div>
</div>
{/* <div className="content-home-github-recent-title">
<h2>Recent Works</h2>
<h3>updated in 2 months</h3>
</div> */}
{/* <h3>Recent</h3>
<GithubForm contentType="recent"/> */}
{/* <h3>Lifetime</h3>
{/* <GithubForm contentType="life"/> */}
{/* <div className="content-home-blog-recent-title">
<h2>Recent Posts</h2>
<h3>written in 2 months</h3>
</div>
<BlogRecent /> */}
>>>>>>> 23d814bedfd5c07e05ea49d9a90053074a4c829a
</div>
);
}
}
export default Content;<|fim▁end|> | >>>>>>> 23d814bedfd5c07e05ea49d9a90053074a4c829a |
<|file_name|>px-validation-custom-tests.js<|end_file_name|><|fim▁begin|>// This is the wrapper for custom tests, called upon web components ready state
function runCustomTests() {
// Place any setup steps like variable declaration and initialization here
// This is the placeholder suite to place custom tests in
// Use testCase(options) for a more convenient setup of the test cases
suite('Custom Automation Tests for px-validation', function() {
var px_validation = document.getElementById('px_validation_1');
suiteSetup(function(done){
flush(function(){<|fim▁hole|>
test('Check there is a single px-validator child defined on test fixture', function(){
assert.lengthOf(Polymer.dom(px_validation).children, 1);
});
test('Integer isNumber validation returns true', function() {
assert.isTrue(px_validation.validate(2).passedValidation);
});
test('String representation of number via isNumber validation returns true', function() {
assert.isTrue(px_validation.validate('2').passedValidation);
});
});
}<|fim▁end|> | done();
});
}); |
<|file_name|>dsf_with_sic.py<|end_file_name|><|fim▁begin|>class DSF_SIC_Map(object):
"""docstring for SIC_Map"""
def __init__(self, dsffile = 'crsp/dsf.csv', sicfile = 'sic_codes.txt'):
self.dsf = pd.read_csv("dsf.csv", dtype = {'CUSIP': np.str, 'PRC': np.float}, na_values = {'PRC': '-'})
self.sic = pd.read_table(sicfile, header = 1)
self.sic.columns = ['HSICCD', 'SICNAME']
def process(self, day = 20100101, columns = ['PERMNO', 'DATE', 'PRC', 'VOL', 'SHROUT', 'RET', 'HSICCD']):
self.dsf_startdate(date = day)
self.dsf_subset(to_keep = columns)
self.sic_merge()
def dsf_startdate(self, date = 20100101):
self.dsf = self.dsf[self.dsf.DATE >= date]
def dsf_subset(self, to_keep = ['PERMNO', 'DATE', 'PRC', 'VOL', 'SHROUT', 'RET', 'HSICCD']):<|fim▁hole|><|fim▁end|> | self.dsf = self.dsf[to_keep]
def sic_merge(self):
self.clean_dsf = self.dsf.merge(self.sic, how = "left") |
<|file_name|>basicscroll.js<|end_file_name|><|fim▁begin|>var url = document.URL;
var array = url.split("/");
var base = array[3];
if (array[2] == 'localhost') {
var staticurl = '/' + base + '/client/dashboard/reporting';
//var url_action = array[6].split("?")[0];
} else {
var staticurl = '/client/dashboard/reporting';
// var url_action = array[5].split("?")[0];
}
$(document).ready(function(){
$('.basic_info_menu').click(function(){
$url = $(this).find('a').attr("href");
var res = $url.split("#");
var hash = '#'+res[1];
window.location.hash = hash;
leftNavigation();
// now scroll to element with that id<|fim▁hole|>
});
$('#selectall').click(function(){
var select = $("#selectall").is(":checked");
if(select)
{
$('.permission_check').prop('checked', true);
$('.permission_check').css("pointer-events", "none");
}
else
{
$('.permission_check').prop('checked', false);
$('.permission_check').css("pointer-events", "auto");
}
});
$('#select_all_0').click(function(){
var select = $("#select_all_0").is(":checked");
if(select)
{
$('#select_all_0').removeClass('permission_check');
$('.permission_check').prop('checked', true);
$('.permission_check').css("pointer-events", "none");
}
else
{
$('.permission_check').prop('checked', false);
$('.permission_check').css("pointer-events", "auto");
}
});
$('.benefit_plan_info').click(function(){
$url = $(this).find('a').attr("href");
var res = $url.split("#");
var hash = '#'+res[1];
window.location.hash = hash;
benefitNavigation();
// now scroll to element with that id
});
});<|fim▁end|> | |
<|file_name|>test_copyonwrite_mkdir.py<|end_file_name|><|fim▁begin|>from __future__ import with_statement
from nose.tools import (
eq_ as eq,
)
from filesystem.test.util import (
maketemp,
assert_raises,
)
import errno
import os
import filesystem
import filesystem.copyonwrite
def test_mkdir():
tmp = maketemp()
filesystem.copyonwrite.path(filesystem.path(tmp)).child('foo').mkdir()
foo = os.path.join(tmp, 'foo')<|fim▁hole|> p = filesystem.copyonwrite.path(filesystem.path(tmp)).child('foo')
with p.open('w') as f:
f.write('bar')
e = assert_raises(
OSError,
p.mkdir,
)
eq(e.errno, errno.EEXIST)<|fim▁end|> | assert not os.path.isdir(foo)
def test_mkdir_bad_exists():
tmp = maketemp() |
<|file_name|>_showexponent.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class ShowexponentValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="showexponent", parent_name="parcats.line.colorbar", **kwargs
):
super(ShowexponentValidator, self).__init__(
plotly_name=plotly_name,<|fim▁hole|> role=kwargs.pop("role", "style"),
values=kwargs.pop("values", ["all", "first", "last", "none"]),
**kwargs
)<|fim▁end|> | parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"), |
<|file_name|>mesh2vtk.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import numpy as np
mdir = "mesh3d/"
fname = "out_p6-p4-p8"
####################
print "input mesh data file"
f1 = open(mdir+fname+".mesh", 'r')<|fim▁hole|>
for line in f1:
if line.startswith("Vertices"): break
pcount = int(f1.next())
xyz = np.empty((pcount, 3), dtype=np.float)
for t in range(pcount):
xyz[t] = map(float,f1.next().split()[0:3])
for line in f1:
if line.startswith("Triangles"): break
trisc = int(f1.next())
tris = np.empty((trisc,4), dtype=int)
for t in range(trisc):
tris[t] = map(int,f1.next().split())
for line in f1:
if line.startswith("Tetrahedra"): break
tetsc = int(f1.next())
tets = np.empty((tetsc,5), dtype=int)
for t in range(tetsc):
tets[t] = map(int,f1.next().split())
f1.close()
####################
print "identify geometry"
ftype = [('v0', np.int),('v1', np.int),('v2', np.int),('label', 'S2')]
faces = np.empty(trisc/2, dtype=ftype)
for i in range(len(faces)):
faces[i] = (tris[2*i][0],tris[2*i][1],tris[2*i][2],str(tris[2*i][3])+str(tris[2*i+1][3]))
face_list,face_count = np.unique(faces['label'], return_counts=True)
vtype = [('v0', np.int),('v1', np.int),('v2', np.int),('v3', np.int),('label', 'S1')]
vols = np.empty(tetsc, dtype=vtype)
for i in range(tetsc):
vols[i] = (tets[i][0],tets[i][1],tets[i][2],tets[i][3],str(tets[i][4]))
vol_list,vol_count = np.unique(vols['label'], return_counts=True)
####################
print "output vtk data files for faces"
for i, f in enumerate(face_list):
f2 = open(mdir+fname+"_"+face_list[i]+".vtk", 'w')
f2.write("# vtk DataFile Version 2.0\n")
f2.write("mesh data\n")
f2.write("ASCII\n")
f2.write("DATASET UNSTRUCTURED_GRID\n")
f2.write("POINTS "+str(pcount)+" float\n") # overkill, all points!
for v in xyz:
f2.write(str(v[0]-35.33)+' '+str(35.33-v[1])+' '+str(12.36-v[2])+'\n')
f2.write("CELLS "+str(face_count[i])+" "+str(face_count[i]*4)+"\n")
for v in faces:
if v[3] == f:
f2.write("3 "+str(v[0]-1)+' '+str(v[1]-1)+' '+str(v[2]-1)+'\n')
f2.write("CELL_TYPES "+str(face_count[i])+"\n")
for t in range(face_count[i]): f2.write("5 ")
f2.write("\n")
f2.close()
####################
print "output vtk data files for volumes"
for i, f in enumerate(vol_list):
f2 = open(mdir+fname+"_"+vol_list[i]+".vtk", 'w')
f2.write("# vtk DataFile Version 2.0\n")
f2.write("mesh data\n")
f2.write("ASCII\n")
f2.write("DATASET UNSTRUCTURED_GRID\n")
f2.write("POINTS "+str(pcount)+" float\n") # overkill, all points!
for v in xyz:
f2.write(str(v[0]-35.33)+' '+str(35.33-v[1])+' '+str(12.36-v[2])+'\n')
f2.write("CELLS "+str(vol_count[i])+" "+str(vol_count[i]*5)+"\n")
for v in vols:
if v[4] == f:
f2.write("4 "+str(v[0]-1)+' '+str(v[1]-1)+' '+str(v[2]-1)+' '+str(v[3]-1)+'\n')
f2.write("CELL_TYPES "+str(vol_count[i])+"\n")
for t in range(vol_count[i]): f2.write("10 ")
f2.write("\n")
f2.close()
####################<|fim▁end|> | |
<|file_name|>validator.py<|end_file_name|><|fim▁begin|>import urllib.parse
from upol_search_engine.upol_crawler.tools import blacklist, robots
def validate_regex(url, regex):
"""Check if url is validate with regex"""
return regex.match(url)
def validate_anchor(url):
"""Check if url include anchor"""
cheme, netloc, path, qs, anchor = urllib.parse.urlsplit(url)
if anchor:
return False
else:
return True
def validate_phpbb(url):
"""Validate if url from phpBB system is valid or blacklisted"""
scheme, netloc, path, qs, anchor = urllib.parse.urlsplit(url)
path = path + qs + anchor
url_keywords = ['posting.php',
'ucp.php',
'view=print',
'memberlist.php',
'mark']<|fim▁hole|> if url_keyword in path:
return False
return True
def validate_wiki(url):
"""Validate if url from wiki system is valid or blacklisted"""
scheme, netloc, path, qs, anchor = urllib.parse.urlsplit(url)
path = path + qs + anchor
url_keywords = ['&']
for url_keyword in url_keywords:
if url_keyword in path:
return False
return True
def validate(url, regex, blacklist_list):
"""Complete validator"""
if not validate_anchor(url):
return False, 'UrlHasAnchor'
if not validate_regex(url, regex):
return False, 'UrlInvalidRegex'
if blacklist.is_url_blocked(url, blacklist_list):
return False, 'UrlIsBlacklisted'
if not robots.is_crawler_allowed(url):
return False, 'UrlRobotsBlocked'
return True, None<|fim▁end|> |
for url_keyword in url_keywords: |
<|file_name|>ppaux.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::def;
use middle::subst::{VecPerParamSpace,Subst};
use middle::subst;
use middle::ty::{BoundRegion, BrAnon, BrNamed};
use middle::ty::{ReEarlyBound, BrFresh, ctxt};
use middle::ty::{ReFree, ReScope, ReInfer, ReStatic, Region, ReEmpty};
use middle::ty::{ReSkolemized, ReVar, BrEnv};
use middle::ty::{mt, Ty, ParamTy};
use middle::ty::{ty_bool, ty_char, ty_struct, ty_enum};
use middle::ty::{ty_err, ty_str, ty_vec, ty_float, ty_bare_fn};
use middle::ty::{ty_param, ty_ptr, ty_rptr, ty_tup, ty_open};
use middle::ty::{ty_unboxed_closure};
use middle::ty::{ty_uniq, ty_trait, ty_int, ty_uint, ty_infer};
use middle::ty;
use middle::ty_fold::TypeFoldable;
use std::collections::HashMap;
use std::collections::hash_state::HashState;
use std::hash::{Hash, Hasher};
use std::rc::Rc;
use syntax::abi;
use syntax::ast_map;
use syntax::codemap::{Span, Pos};
use syntax::parse::token;
use syntax::print::pprust;
use syntax::ptr::P;
use syntax::{ast, ast_util};
use syntax::owned_slice::OwnedSlice;
/// Produces a string suitable for debugging output.
pub trait Repr<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String;
}
/// Produces a string suitable for showing to the user.
pub trait UserString<'tcx> : Repr<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String;
}
pub fn note_and_explain_region(cx: &ctxt,
prefix: &str,
region: ty::Region,
suffix: &str) -> Option<Span> {
match explain_region_and_span(cx, region) {
(ref str, Some(span)) => {
cx.sess.span_note(
span,
&format!("{}{}{}", prefix, *str, suffix)[]);
Some(span)
}
(ref str, None) => {
cx.sess.note(
&format!("{}{}{}", prefix, *str, suffix)[]);
None
}
}
}
/// When a free region is associated with `item`, how should we describe the item in the error
/// message.
fn item_scope_tag(item: &ast::Item) -> &'static str {
match item.node {
ast::ItemImpl(..) => "impl",
ast::ItemStruct(..) => "struct",
ast::ItemEnum(..) => "enum",
ast::ItemTrait(..) => "trait",
ast::ItemFn(..) => "function body",
_ => "item"
}
}
pub fn explain_region_and_span(cx: &ctxt, region: ty::Region)
-> (String, Option<Span>) {
return match region {
ReScope(scope) => {
match cx.map.find(scope.node_id()) {
Some(ast_map::NodeBlock(ref blk)) => {
explain_span(cx, "block", blk.span)
}
Some(ast_map::NodeExpr(expr)) => {
match expr.node {
ast::ExprCall(..) => explain_span(cx, "call", expr.span),
ast::ExprMethodCall(..) => {
explain_span(cx, "method call", expr.span)
},
ast::ExprMatch(_, _, ast::MatchSource::IfLetDesugar { .. }) =>
explain_span(cx, "if let", expr.span),
ast::ExprMatch(_, _, ast::MatchSource::WhileLetDesugar) => {
explain_span(cx, "while let", expr.span)
},
ast::ExprMatch(..) => explain_span(cx, "match", expr.span),
_ => explain_span(cx, "expression", expr.span)
}
}
Some(ast_map::NodeStmt(stmt)) => {
explain_span(cx, "statement", stmt.span)
}
Some(ast_map::NodeItem(it)) => {
let tag = item_scope_tag(&*it);
explain_span(cx, tag, it.span)
}
Some(_) | None => {
// this really should not happen
(format!("unknown scope: {:?}. Please report a bug.", scope), None)
}
}
}
ReFree(ref fr) => {
let prefix = match fr.bound_region {
BrAnon(idx) => {
format!("the anonymous lifetime #{} defined on", idx + 1)
}
BrFresh(_) => "an anonymous lifetime defined on".to_string(),
_ => {
format!("the lifetime {} as defined on",
bound_region_ptr_to_string(cx, fr.bound_region))
}
};
match cx.map.find(fr.scope.node_id()) {
Some(ast_map::NodeBlock(ref blk)) => {
let (msg, opt_span) = explain_span(cx, "block", blk.span);
(format!("{} {}", prefix, msg), opt_span)
}
Some(ast_map::NodeItem(it)) => {
let tag = item_scope_tag(&*it);
let (msg, opt_span) = explain_span(cx, tag, it.span);
(format!("{} {}", prefix, msg), opt_span)
}
Some(_) | None => {
// this really should not happen
(format!("{} unknown free region bounded by scope {:?}", prefix, fr.scope), None)
}
}
}
ReStatic => { ("the static lifetime".to_string(), None) }
ReEmpty => { ("the empty lifetime".to_string(), None) }
ReEarlyBound(_, _, _, name) => {
(format!("{}", token::get_name(name)), None)
}
// I believe these cases should not occur (except when debugging,
// perhaps)
ty::ReInfer(_) | ty::ReLateBound(..) => {
(format!("lifetime {:?}", region), None)
}
};
fn explain_span(cx: &ctxt, heading: &str, span: Span)
-> (String, Option<Span>) {
let lo = cx.sess.codemap().lookup_char_pos_adj(span.lo);
(format!("the {} at {}:{}", heading, lo.line, lo.col.to_uint()),
Some(span))
}
}
pub fn bound_region_ptr_to_string(cx: &ctxt, br: BoundRegion) -> String {
bound_region_to_string(cx, "", false, br)
}
pub fn bound_region_to_string(cx: &ctxt,
prefix: &str, space: bool,
br: BoundRegion) -> String {
let space_str = if space { " " } else { "" };
if cx.sess.verbose() {
return format!("{}{}{}", prefix, br.repr(cx), space_str)
}
match br {
BrNamed(_, name) => {
format!("{}{}{}", prefix, token::get_name(name), space_str)
}
BrAnon(_) | BrFresh(_) | BrEnv => prefix.to_string()
}
}
// In general, if you are giving a region error message,
// you should use `explain_region()` or, better yet,
// `note_and_explain_region()`
pub fn region_ptr_to_string(cx: &ctxt, region: Region) -> String {
region_to_string(cx, "&", true, region)
}
pub fn region_to_string(cx: &ctxt, prefix: &str, space: bool, region: Region) -> String {
let space_str = if space { " " } else { "" };
if cx.sess.verbose() {
return format!("{}{}{}", prefix, region.repr(cx), space_str)
}
// These printouts are concise. They do not contain all the information
// the user might want to diagnose an error, but there is basically no way
// to fit that into a short string. Hence the recommendation to use
// `explain_region()` or `note_and_explain_region()`.
match region {
ty::ReScope(_) => prefix.to_string(),
ty::ReEarlyBound(_, _, _, name) => {
token::get_name(name).get().to_string()
}
ty::ReLateBound(_, br) => bound_region_to_string(cx, prefix, space, br),
ty::ReFree(ref fr) => bound_region_to_string(cx, prefix, space, fr.bound_region),
ty::ReInfer(ReSkolemized(_, br)) => {
bound_region_to_string(cx, prefix, space, br)
}
ty::ReInfer(ReVar(_)) => prefix.to_string(),
ty::ReStatic => format!("{}'static{}", prefix, space_str),
ty::ReEmpty => format!("{}'<empty>{}", prefix, space_str),
}
}
pub fn mutability_to_string(m: ast::Mutability) -> String {
match m {
ast::MutMutable => "mut ".to_string(),
ast::MutImmutable => "".to_string(),
}
}
pub fn mt_to_string<'tcx>(cx: &ctxt<'tcx>, m: &mt<'tcx>) -> String {
format!("{}{}",
mutability_to_string(m.mutbl),
ty_to_string(cx, m.ty))
}
pub fn trait_store_to_string(cx: &ctxt, s: ty::TraitStore) -> String {
match s {
ty::UniqTraitStore => "Box ".to_string(),
ty::RegionTraitStore(r, m) => {
format!("{}{}", region_ptr_to_string(cx, r), mutability_to_string(m))
}
}
}
pub fn vec_map_to_string<T, F>(ts: &[T], f: F) -> String where
F: FnMut(&T) -> String,
{
let tstrs = ts.iter().map(f).collect::<Vec<String>>();
format!("[{}]", tstrs.connect(", "))
}
pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
fn bare_fn_to_string<'tcx>(cx: &ctxt<'tcx>,
opt_def_id: Option<ast::DefId>,
unsafety: ast::Unsafety,
abi: abi::Abi,
ident: Option<ast::Ident>,
sig: &ty::PolyFnSig<'tcx>)
-> String {
let mut s = String::new();
match unsafety {
ast::Unsafety::Normal => {}
ast::Unsafety::Unsafe => {
s.push_str(unsafety.to_string().as_slice());
s.push(' ');
}
};
if abi != abi::Rust {
s.push_str(&format!("extern {} ", abi.to_string())[]);
};
s.push_str("fn");
match ident {
Some(i) => {
s.push(' ');
s.push_str(token::get_ident(i).get());
}
_ => { }
}
push_sig_to_string(cx, &mut s, '(', ')', sig, "");
match opt_def_id {
Some(def_id) => {
s.push_str(" {");
let path_str = ty::item_path_str(cx, def_id);
s.push_str(&path_str[]);
s.push_str("}");
}
None => { }
}
s
}
fn closure_to_string<'tcx>(cx: &ctxt<'tcx>, cty: &ty::ClosureTy<'tcx>) -> String {
let mut s = String::new();
match cty.store {
ty::UniqTraitStore => {}
ty::RegionTraitStore(region, _) => {
s.push_str(®ion_to_string(cx, "", true, region)[]);
}
}
match cty.unsafety {
ast::Unsafety::Normal => {}
ast::Unsafety::Unsafe => {
s.push_str(cty.unsafety.to_string().as_slice());
s.push(' ');
}
};
let bounds_str = cty.bounds.user_string(cx);
match cty.store {
ty::UniqTraitStore => {
assert_eq!(cty.onceness, ast::Once);
s.push_str("proc");
push_sig_to_string(cx, &mut s, '(', ')', &cty.sig,
&bounds_str[]);
}
ty::RegionTraitStore(..) => {
match cty.onceness {
ast::Many => {}
ast::Once => s.push_str("once ")
}
push_sig_to_string(cx, &mut s, '|', '|', &cty.sig,
&bounds_str[]);
}
}
s
}
fn push_sig_to_string<'tcx>(cx: &ctxt<'tcx>,
s: &mut String,
bra: char,
ket: char,
sig: &ty::PolyFnSig<'tcx>,
bounds: &str) {
s.push(bra);
let strs = sig.0.inputs
.iter()
.map(|a| ty_to_string(cx, *a))
.collect::<Vec<_>>();
s.push_str(strs.connect(", ").as_slice());
if sig.0.variadic {
s.push_str(", ...");
}
s.push(ket);
if !bounds.is_empty() {
s.push_str(":");
s.push_str(bounds);
}
match sig.0.output {
ty::FnConverging(t) => {
if !ty::type_is_nil(t) {
s.push_str(" -> ");
s.push_str(&ty_to_string(cx, t)[]);
}
}
ty::FnDiverging => {
s.push_str(" -> !");
}
}
}
fn infer_ty_to_string(cx: &ctxt, ty: ty::InferTy) -> String {
let print_var_ids = cx.sess.verbose();
match ty {
ty::TyVar(ref vid) if print_var_ids => vid.repr(cx),
ty::IntVar(ref vid) if print_var_ids => vid.repr(cx),
ty::FloatVar(ref vid) if print_var_ids => vid.repr(cx),
ty::TyVar(_) | ty::IntVar(_) | ty::FloatVar(_) => format!("_"),
ty::FreshTy(v) => format!("FreshTy({})", v),
ty::FreshIntTy(v) => format!("FreshIntTy({})", v)
}
}
// pretty print the structural type representation:
match typ.sty {
ty_bool => "bool".to_string(),
ty_char => "char".to_string(),
ty_int(t) => ast_util::int_ty_to_string(t, None).to_string(),
ty_uint(t) => ast_util::uint_ty_to_string(t, None).to_string(),
ty_float(t) => ast_util::float_ty_to_string(t).to_string(),
ty_uniq(typ) => format!("Box<{}>", ty_to_string(cx, typ)),
ty_ptr(ref tm) => {
format!("*{} {}", match tm.mutbl {
ast::MutMutable => "mut",
ast::MutImmutable => "const",
}, ty_to_string(cx, tm.ty))
}
ty_rptr(r, ref tm) => {
let mut buf = region_ptr_to_string(cx, *r);
buf.push_str(&mt_to_string(cx, tm)[]);
buf
}
ty_open(typ) =>
format!("opened<{}>", ty_to_string(cx, typ)),
ty_tup(ref elems) => {
let strs = elems
.iter()
.map(|elem| ty_to_string(cx, *elem))
.collect::<Vec<_>>();
match &strs[] {
[ref string] => format!("({},)", string),
strs => format!("({})", strs.connect(", "))
}
}
ty_bare_fn(opt_def_id, ref f) => {
bare_fn_to_string(cx, opt_def_id, f.unsafety, f.abi, None, &f.sig)
}
ty_infer(infer_ty) => infer_ty_to_string(cx, infer_ty),
ty_err => "[type error]".to_string(),
ty_param(ref param_ty) => {
if cx.sess.verbose() {
param_ty.repr(cx)
} else {
param_ty.user_string(cx)
}
}
ty_enum(did, substs) | ty_struct(did, substs) => {
let base = ty::item_path_str(cx, did);
let generics = ty::lookup_item_type(cx, did).generics;
parameterized(cx, base.as_slice(), substs, &generics, did)
}
ty_trait(box ty::TyTrait {
ref principal, ref bounds
}) => {
let principal = principal.user_string(cx);
let bound_str = bounds.user_string(cx);
let bound_sep = if bound_str.is_empty() { "" } else { " + " };
format!("{}{}{}",
principal,
bound_sep,
bound_str)
}
ty::ty_projection(ref data) => {
format!("<{} as {}>::{}",
data.trait_ref.self_ty().user_string(cx),
data.trait_ref.user_string(cx),
data.item_name.user_string(cx))
}
ty_str => "str".to_string(),
ty_unboxed_closure(ref did, _, substs) => {
let unboxed_closures = cx.unboxed_closures.borrow();
unboxed_closures.get(did).map(|cl| {
closure_to_string(cx, &cl.closure_type.subst(cx, substs))
}).unwrap_or_else(|| {
if did.krate == ast::LOCAL_CRATE {
let span = cx.map.span(did.node);
format!("closure[{}]", span.repr(cx))
} else {
format!("closure")
}
})
}
ty_vec(t, sz) => {
let inner_str = ty_to_string(cx, t);
match sz {
Some(n) => format!("[{}; {}]", inner_str, n),
None => format!("[{}]", inner_str),
}
}
}
}
pub fn explicit_self_category_to_str(category: &ty::ExplicitSelfCategory)
-> &'static str {
match *category {
ty::StaticExplicitSelfCategory => "static",
ty::ByValueExplicitSelfCategory => "self",
ty::ByReferenceExplicitSelfCategory(_, ast::MutMutable) => {
"&mut self"
}
ty::ByReferenceExplicitSelfCategory(_, ast::MutImmutable) => "&self",
ty::ByBoxExplicitSelfCategory => "Box<self>",
}
}
pub fn parameterized<'tcx>(cx: &ctxt<'tcx>,
base: &str,
substs: &subst::Substs<'tcx>,
generics: &ty::Generics<'tcx>,
did: ast::DefId)
-> String
{
if cx.sess.verbose() {
if substs.is_noop() {
return format!("{}", base);
} else {
return format!("{}<{},{}>",
base,
substs.regions.repr(cx),
substs.types.repr(cx));
}
}
let mut strs = Vec::new();
match substs.regions {
subst::ErasedRegions => { }
subst::NonerasedRegions(ref regions) => {
for &r in regions.iter() {
let s = region_to_string(cx, "", false, r);
if s.is_empty() {
// This happens when the value of the region
// parameter is not easily serialized. This may be
// because the user omitted it in the first place,
// or because it refers to some block in the code,
// etc. I'm not sure how best to serialize this.
strs.push(format!("'_"));
} else {
strs.push(s)
}
}
}
}
let tps = substs.types.get_slice(subst::TypeSpace);
let ty_params = generics.types.get_slice(subst::TypeSpace);
let has_defaults = ty_params.last().map_or(false, |def| def.default.is_some());
let num_defaults = if has_defaults {
ty_params.iter().zip(tps.iter()).rev().take_while(|&(def, &actual)| {
match def.default {
Some(default) => default.subst(cx, substs) == actual,
None => false
}
}).count()
} else {
0
};
for t in tps[0..(tps.len() - num_defaults)].iter() {
strs.push(ty_to_string(cx, *t))
}
if cx.lang_items.fn_trait_kind(did).is_some() {
format!("{}({}){}",
base,
if strs[0].starts_with("(") && strs[0].ends_with(",)") {
&strs[0][1 .. (strs[0].len() - 2)] // Remove '(' and ',)'
} else if strs[0].starts_with("(") && strs[0].ends_with(")") {
&strs[0][1 .. (strs[0].len() - 1)] // Remove '(' and ')'
} else {
&strs[0][]
},
if &*strs[1] == "()" { String::new() } else { format!(" -> {}", strs[1]) })
} else if strs.len() > 0 {
format!("{}<{}>", base, strs.connect(", "))
} else {
format!("{}", base)
}
}
pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String {
let mut s = typ.repr(cx).to_string();
if s.len() >= 32u {
s = (&s[0u..32u]).to_string();
}
return s;
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Option<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match self {
&None => "None".to_string(),
&Some(ref t) => t.repr(tcx),
}
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for P<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
(*self).repr(tcx)
}
}
impl<'tcx,T:Repr<'tcx>,U:Repr<'tcx>> Repr<'tcx> for Result<T,U> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match self {
&Ok(ref t) => t.repr(tcx),
&Err(ref u) => format!("Err({})", u.repr(tcx))
}
}
}
impl<'tcx> Repr<'tcx> for () {
fn repr(&self, _tcx: &ctxt) -> String {
"()".to_string()
}
}
impl<'a, 'tcx, T: ?Sized +Repr<'tcx>> Repr<'tcx> for &'a T {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
Repr::repr(*self, tcx)
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Rc<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
(&**self).repr(tcx)
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Box<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
(&**self).repr(tcx)
}
}
fn repr_vec<'tcx, T:Repr<'tcx>>(tcx: &ctxt<'tcx>, v: &[T]) -> String {
vec_map_to_string(v, |t| t.repr(tcx))
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for [T] {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
repr_vec(tcx, self)
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
repr_vec(tcx, &self[])
}
}
// This is necessary to handle types like Option<~[T]>, for which
// autoderef cannot convert the &[T] handler
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
repr_vec(tcx, &self[])
}
}
impl<'tcx, T:UserString<'tcx>> UserString<'tcx> for Vec<T> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let strs: Vec<String> =
self.iter().map(|t| t.user_string(tcx)).collect();
strs.connect(", ")
}
}
impl<'tcx> Repr<'tcx> for def::Def {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::TypeParameterDef<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("TypeParameterDef({:?}, {}, {:?}/{})",
self.def_id,
self.bounds.repr(tcx),
self.space,
self.index)
}
}
impl<'tcx> Repr<'tcx> for ty::RegionParameterDef {
fn repr(&self, tcx: &ctxt) -> String {
format!("RegionParameterDef(name={}, def_id={}, bounds={})",
token::get_name(self.name),
self.def_id.repr(tcx),
self.bounds.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::TyS<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
ty_to_string(tcx, self)
}
}
impl<'tcx> Repr<'tcx> for ty::mt<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
mt_to_string(tcx, self)
}
}
impl<'tcx> Repr<'tcx> for subst::Substs<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("Substs[types={}, regions={}]",
self.types.repr(tcx),
self.regions.repr(tcx))
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for subst::VecPerParamSpace<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("[{};{};{}]",
self.get_slice(subst::TypeSpace).repr(tcx),
self.get_slice(subst::SelfSpace).repr(tcx),
self.get_slice(subst::FnSpace).repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::ItemSubsts<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("ItemSubsts({})", self.substs.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for subst::RegionSubsts {
fn repr(&self, tcx: &ctxt) -> String {
match *self {
subst::ErasedRegions => "erased".to_string(),
subst::NonerasedRegions(ref regions) => regions.repr(tcx)
}
}
}
impl<'tcx> Repr<'tcx> for ty::BuiltinBounds {
fn repr(&self, _tcx: &ctxt) -> String {
let mut res = Vec::new();
for b in self.iter() {
res.push(match b {
ty::BoundSend => "Send".to_string(),
ty::BoundSized => "Sized".to_string(),
ty::BoundCopy => "Copy".to_string(),
ty::BoundSync => "Sync".to_string(),
});
}
res.connect("+")
}
}
impl<'tcx> Repr<'tcx> for ty::ExistentialBounds<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
self.user_string(tcx)
}
}
impl<'tcx> Repr<'tcx> for ty::ParamBounds<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
let mut res = Vec::new();
res.push(self.builtin_bounds.repr(tcx));
for t in self.trait_bounds.iter() {
res.push(t.repr(tcx));
}
res.connect("+")
}
}
impl<'tcx> Repr<'tcx> for ty::TraitRef<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
// when printing out the debug representation, we don't need
// to enumerate the `for<...>` etc because the debruijn index
// tells you everything you need to know.
let base = ty::item_path_str(tcx, self.def_id);
let trait_def = ty::lookup_trait_def(tcx, self.def_id);
format!("TraitRef({}, {})",
self.substs.self_ty().repr(tcx),
parameterized(tcx, base.as_slice(), self.substs, &trait_def.generics, self.def_id))
}
}
impl<'tcx> Repr<'tcx> for ty::TraitDef<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("TraitDef(generics={}, bounds={}, trait_ref={})",
self.generics.repr(tcx),
self.bounds.repr(tcx),
self.trait_ref.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ast::TraitItem {
fn repr(&self, _tcx: &ctxt) -> String {
match *self {
ast::RequiredMethod(ref data) => format!("RequiredMethod({}, id={})",
data.ident, data.id),
ast::ProvidedMethod(ref data) => format!("ProvidedMethod(id={})",
data.id),
ast::TypeTraitItem(ref data) => format!("TypeTraitItem({}, id={})",
data.ty_param.ident, data.ty_param.id),
}
}
}
impl<'tcx> Repr<'tcx> for ast::Expr {
fn repr(&self, _tcx: &ctxt) -> String {
format!("expr({}: {})", self.id, pprust::expr_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Path {
fn repr(&self, _tcx: &ctxt) -> String {
format!("path({})", pprust::path_to_string(self))
}
}
impl<'tcx> UserString<'tcx> for ast::Path {
fn user_string(&self, _tcx: &ctxt) -> String {
pprust::path_to_string(self)
}
}
impl<'tcx> Repr<'tcx> for ast::Ty {
fn repr(&self, _tcx: &ctxt) -> String {
format!("type({})", pprust::ty_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Item {
fn repr(&self, tcx: &ctxt) -> String {
format!("item({})", tcx.map.node_to_string(self.id))
}
}
impl<'tcx> Repr<'tcx> for ast::Lifetime {
fn repr(&self, _tcx: &ctxt) -> String {
format!("lifetime({}: {})", self.id, pprust::lifetime_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Stmt {
fn repr(&self, _tcx: &ctxt) -> String {
format!("stmt({}: {})",
ast_util::stmt_id(self),
pprust::stmt_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Pat {
fn repr(&self, _tcx: &ctxt) -> String {
format!("pat({}: {})", self.id, pprust::pat_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ty::BoundRegion {<|fim▁hole|> format!("BrNamed({}, {})", id.repr(tcx), token::get_name(name))
}
ty::BrFresh(id) => format!("BrFresh({})", id),
ty::BrEnv => "BrEnv".to_string()
}
}
}
impl<'tcx> Repr<'tcx> for ty::Region {
fn repr(&self, tcx: &ctxt) -> String {
match *self {
ty::ReEarlyBound(id, space, index, name) => {
format!("ReEarlyBound({}, {:?}, {}, {})",
id,
space,
index,
token::get_name(name))
}
ty::ReLateBound(binder_id, ref bound_region) => {
format!("ReLateBound({:?}, {})",
binder_id,
bound_region.repr(tcx))
}
ty::ReFree(ref fr) => fr.repr(tcx),
ty::ReScope(id) => {
format!("ReScope({:?})", id)
}
ty::ReStatic => {
"ReStatic".to_string()
}
ty::ReInfer(ReVar(ref vid)) => {
format!("{:?}", vid)
}
ty::ReInfer(ReSkolemized(id, ref bound_region)) => {
format!("re_skolemized({}, {})", id, bound_region.repr(tcx))
}
ty::ReEmpty => {
"ReEmpty".to_string()
}
}
}
}
impl<'tcx> UserString<'tcx> for ty::Region {
fn user_string(&self, tcx: &ctxt) -> String {
region_to_string(tcx, "", false, *self)
}
}
impl<'tcx> Repr<'tcx> for ty::FreeRegion {
fn repr(&self, tcx: &ctxt) -> String {
format!("ReFree({}, {})",
self.scope.node_id(),
self.bound_region.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ast::DefId {
fn repr(&self, tcx: &ctxt) -> String {
// Unfortunately, there seems to be no way to attempt to print
// a path for a def-id, so I'll just make a best effort for now
// and otherwise fallback to just printing the crate/node pair
if self.krate == ast::LOCAL_CRATE {
match tcx.map.find(self.node) {
Some(ast_map::NodeItem(..)) |
Some(ast_map::NodeForeignItem(..)) |
Some(ast_map::NodeImplItem(..)) |
Some(ast_map::NodeTraitItem(..)) |
Some(ast_map::NodeVariant(..)) |
Some(ast_map::NodeStructCtor(..)) => {
return format!(
"{:?}:{}",
*self,
ty::item_path_str(tcx, *self))
}
_ => {}
}
}
return format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::TypeScheme<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("TypeScheme {{generics: {}, ty: {}}}",
self.generics.repr(tcx),
self.ty.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::Generics<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("Generics(types: {}, regions: {}, predicates: {})",
self.types.repr(tcx),
self.regions.repr(tcx),
self.predicates.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::GenericBounds<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("GenericBounds({})",
self.predicates.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::ItemVariances {
fn repr(&self, tcx: &ctxt) -> String {
format!("ItemVariances(types={}, \
regions={})",
self.types.repr(tcx),
self.regions.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::Variance {
fn repr(&self, _: &ctxt) -> String {
// The first `.to_string()` returns a &'static str (it is not an implementation
// of the ToString trait). Because of that, we need to call `.to_string()` again
// if we want to have a `String`.
let result: &'static str = (*self).to_string();
result.to_string()
}
}
impl<'tcx> Repr<'tcx> for ty::Method<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("method(name: {}, generics: {}, fty: {}, \
explicit_self: {}, vis: {}, def_id: {})",
self.name.repr(tcx),
self.generics.repr(tcx),
self.fty.repr(tcx),
self.explicit_self.repr(tcx),
self.vis.repr(tcx),
self.def_id.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ast::Name {
fn repr(&self, _tcx: &ctxt) -> String {
token::get_name(*self).get().to_string()
}
}
impl<'tcx> UserString<'tcx> for ast::Name {
fn user_string(&self, _tcx: &ctxt) -> String {
token::get_name(*self).get().to_string()
}
}
impl<'tcx> Repr<'tcx> for ast::Ident {
fn repr(&self, _tcx: &ctxt) -> String {
token::get_ident(*self).get().to_string()
}
}
impl<'tcx> Repr<'tcx> for ast::ExplicitSelf_ {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::Visibility {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::BareFnTy<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("BareFnTy {{unsafety: {}, abi: {}, sig: {}}}",
self.unsafety,
self.abi.to_string(),
self.sig.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::FnSig<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("fn{} -> {}", self.inputs.repr(tcx), self.output.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::FnOutput<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match *self {
ty::FnConverging(ty) =>
format!("FnConverging({0})", ty.repr(tcx)),
ty::FnDiverging =>
"FnDiverging".to_string()
}
}
}
impl<'tcx> Repr<'tcx> for ty::MethodCallee<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("MethodCallee {{origin: {}, ty: {}, {}}}",
self.origin.repr(tcx),
self.ty.repr(tcx),
self.substs.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::MethodOrigin<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match self {
&ty::MethodStatic(def_id) => {
format!("MethodStatic({})", def_id.repr(tcx))
}
&ty::MethodStaticUnboxedClosure(def_id) => {
format!("MethodStaticUnboxedClosure({})", def_id.repr(tcx))
}
&ty::MethodTypeParam(ref p) => {
p.repr(tcx)
}
&ty::MethodTraitObject(ref p) => {
p.repr(tcx)
}
}
}
}
impl<'tcx> Repr<'tcx> for ty::MethodParam<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("MethodParam({},{})",
self.trait_ref.repr(tcx),
self.method_num)
}
}
impl<'tcx> Repr<'tcx> for ty::MethodObject<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("MethodObject({},{},{})",
self.trait_ref.repr(tcx),
self.method_num,
self.real_index)
}
}
impl<'tcx> Repr<'tcx> for ty::TraitStore {
fn repr(&self, tcx: &ctxt) -> String {
trait_store_to_string(tcx, *self)
}
}
impl<'tcx> Repr<'tcx> for ty::BuiltinBound {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> UserString<'tcx> for ty::BuiltinBound {
fn user_string(&self, _tcx: &ctxt) -> String {
match *self {
ty::BoundSend => "Send".to_string(),
ty::BoundSized => "Sized".to_string(),
ty::BoundCopy => "Copy".to_string(),
ty::BoundSync => "Sync".to_string(),
}
}
}
impl<'tcx> Repr<'tcx> for Span {
fn repr(&self, tcx: &ctxt) -> String {
tcx.sess.codemap().span_to_string(*self).to_string()
}
}
impl<'tcx, A:UserString<'tcx>> UserString<'tcx> for Rc<A> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let this: &A = &**self;
this.user_string(tcx)
}
}
impl<'tcx> UserString<'tcx> for ty::ParamBounds<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let mut result = Vec::new();
let s = self.builtin_bounds.user_string(tcx);
if !s.is_empty() {
result.push(s);
}
for n in self.trait_bounds.iter() {
result.push(n.user_string(tcx));
}
result.connect(" + ")
}
}
impl<'tcx> UserString<'tcx> for ty::ExistentialBounds<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
if self.builtin_bounds.contains(&ty::BoundSend) &&
self.region_bound == ty::ReStatic
{ // Region bound is implied by builtin bounds:
return self.builtin_bounds.repr(tcx);
}
let mut res = Vec::new();
let region_str = self.region_bound.user_string(tcx);
if !region_str.is_empty() {
res.push(region_str);
}
for bound in self.builtin_bounds.iter() {
res.push(bound.user_string(tcx));
}
res.connect("+")
}
}
impl<'tcx> UserString<'tcx> for ty::BuiltinBounds {
fn user_string(&self, tcx: &ctxt) -> String {
self.iter()
.map(|bb| bb.user_string(tcx))
.collect::<Vec<String>>()
.connect("+")
.to_string()
}
}
impl<'tcx, T> UserString<'tcx> for ty::Binder<T>
where T : UserString<'tcx> + TypeFoldable<'tcx>
{
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
// Replace any anonymous late-bound regions with named
// variants, using gensym'd identifiers, so that we can
// clearly differentiate between named and unnamed regions in
// the output. We'll probably want to tweak this over time to
// decide just how much information to give.
let mut names = Vec::new();
let (unbound_value, _) = ty::replace_late_bound_regions(tcx, self, |br| {
ty::ReLateBound(ty::DebruijnIndex::new(1), match br {
ty::BrNamed(_, name) => {
names.push(token::get_name(name));
br
}
ty::BrAnon(_) |
ty::BrFresh(_) |
ty::BrEnv => {
let name = token::gensym("'r");
names.push(token::get_name(name));
ty::BrNamed(ast_util::local_def(ast::DUMMY_NODE_ID), name)
}
})
});
let names: Vec<_> = names.iter().map(|s| s.get()).collect();
let value_str = unbound_value.user_string(tcx);
if names.len() == 0 {
value_str
} else {
format!("for<{}> {}", names.connect(","), value_str)
}
}
}
impl<'tcx> UserString<'tcx> for ty::TraitRef<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let path_str = ty::item_path_str(tcx, self.def_id);
let trait_def = ty::lookup_trait_def(tcx, self.def_id);
parameterized(tcx, path_str.as_slice(), self.substs,
&trait_def.generics, self.def_id)
}
}
impl<'tcx> UserString<'tcx> for Ty<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
ty_to_string(tcx, *self)
}
}
impl<'tcx> UserString<'tcx> for ast::Ident {
fn user_string(&self, _tcx: &ctxt) -> String {
token::get_name(self.name).get().to_string()
}
}
impl<'tcx> Repr<'tcx> for abi::Abi {
fn repr(&self, _tcx: &ctxt) -> String {
self.to_string()
}
}
impl<'tcx> UserString<'tcx> for abi::Abi {
fn user_string(&self, _tcx: &ctxt) -> String {
self.to_string()
}
}
impl<'tcx> Repr<'tcx> for ty::UpvarId {
fn repr(&self, tcx: &ctxt) -> String {
format!("UpvarId({};`{}`;{})",
self.var_id,
ty::local_var_name_str(tcx, self.var_id),
self.closure_expr_id)
}
}
impl<'tcx> Repr<'tcx> for ast::Mutability {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::BorrowKind {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::UpvarBorrow {
fn repr(&self, tcx: &ctxt) -> String {
format!("UpvarBorrow({}, {})",
self.kind.repr(tcx),
self.region.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::IntVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::FloatVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::RegionVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::TyVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::IntVarValue {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::IntTy {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::UintTy {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::FloatTy {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::ExplicitSelfCategory {
fn repr(&self, _: &ctxt) -> String {
explicit_self_category_to_str(self).to_string()
}
}
impl<'tcx> UserString<'tcx> for ParamTy {
fn user_string(&self, _tcx: &ctxt) -> String {
format!("{}", token::get_name(self.name))
}
}
impl<'tcx> Repr<'tcx> for ParamTy {
fn repr(&self, tcx: &ctxt) -> String {
let ident = self.user_string(tcx);
format!("{}/{:?}.{}", ident, self.space, self.idx)
}
}
impl<'tcx, A:Repr<'tcx>, B:Repr<'tcx>> Repr<'tcx> for (A,B) {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
let &(ref a, ref b) = self;
format!("({},{})", a.repr(tcx), b.repr(tcx))
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for ty::Binder<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("Binder({})", self.0.repr(tcx))
}
}
impl<'tcx, S, K, V> Repr<'tcx> for HashMap<K, V, S>
where K: Hash<<S as HashState>::Hasher> + Eq + Repr<'tcx>,
V: Repr<'tcx>,
S: HashState,
<S as HashState>::Hasher: Hasher<Output=u64>,
{
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("HashMap({})",
self.iter()
.map(|(k,v)| format!("{} => {}", k.repr(tcx), v.repr(tcx)))
.collect::<Vec<String>>()
.connect(", "))
}
}
impl<'tcx, T, U> Repr<'tcx> for ty::OutlivesPredicate<T,U>
where T : Repr<'tcx> + TypeFoldable<'tcx>,
U : Repr<'tcx> + TypeFoldable<'tcx>,
{
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("OutlivesPredicate({}, {})",
self.0.repr(tcx),
self.1.repr(tcx))
}
}
impl<'tcx, T, U> UserString<'tcx> for ty::OutlivesPredicate<T,U>
where T : UserString<'tcx> + TypeFoldable<'tcx>,
U : UserString<'tcx> + TypeFoldable<'tcx>,
{
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} : {}",
self.0.user_string(tcx),
self.1.user_string(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::EquatePredicate<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("EquatePredicate({}, {})",
self.0.repr(tcx),
self.1.repr(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::EquatePredicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} == {}",
self.0.user_string(tcx),
self.1.user_string(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::TraitPredicate<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("TraitPredicate({})",
self.trait_ref.repr(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::TraitPredicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} : {}",
self.trait_ref.self_ty().user_string(tcx),
self.trait_ref.user_string(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::ProjectionPredicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} == {}",
self.projection_ty.user_string(tcx),
self.ty.user_string(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::ProjectionTy<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("<{} as {}>::{}",
self.trait_ref.self_ty().repr(tcx),
self.trait_ref.repr(tcx),
self.item_name.repr(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::ProjectionTy<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("<{} as {}>::{}",
self.trait_ref.self_ty().user_string(tcx),
self.trait_ref.user_string(tcx),
self.item_name.user_string(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::Predicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
match *self {
ty::Predicate::Trait(ref data) => data.user_string(tcx),
ty::Predicate::Equate(ref predicate) => predicate.user_string(tcx),
ty::Predicate::RegionOutlives(ref predicate) => predicate.user_string(tcx),
ty::Predicate::TypeOutlives(ref predicate) => predicate.user_string(tcx),
ty::Predicate::Projection(ref predicate) => predicate.user_string(tcx),
}
}
}<|fim▁end|> | fn repr(&self, tcx: &ctxt) -> String {
match *self {
ty::BrAnon(id) => format!("BrAnon({})", id),
ty::BrNamed(id, name) => { |
<|file_name|>iosxr.py<|end_file_name|><|fim▁begin|>#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by<|fim▁hole|># (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from ansible.plugins.terminal import TerminalBase
from ansible.errors import AnsibleConnectionFailure
class TerminalModule(TerminalBase):
terminal_stdout_re = [
re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(br"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$"),
re.compile(br']]>]]>[\r\n]?')
]
terminal_stderr_re = [
re.compile(br"% ?Error"),
re.compile(br"% ?Bad secret"),
re.compile(br"invalid input", re.I),
re.compile(br"(?:incomplete|ambiguous) command", re.I),
re.compile(br"connection timed out", re.I),
re.compile(br"[^\r\n]+ not found", re.I),
re.compile(br"'[^']' +returned error code: ?\d+"),
re.compile(br"Failed to commit", re.I)
]
def on_open_shell(self):
try:
for cmd in (b'terminal length 0', b'terminal width 512', b'terminal exec prompt no-timestamp'):
self._exec_cli_command(cmd)
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')<|fim▁end|> | # the Free Software Foundation, either version 3 of the License, or |
<|file_name|>fastcgi_test.go<|end_file_name|><|fim▁begin|>package fastcgi
import (
"net"
"net/http"
"net/http/fcgi"
"net/http/httptest"
"net/url"
"strconv"
"testing"
)
func TestServeHTTP(t *testing.T) {
body := "This is some test body content"
bodyLenStr := strconv.Itoa(len(body))
listener, err := net.Listen("tcp", "127.0.0.1:0")
if err != nil {
t.Fatalf("Unable to create listener for test: %v", err)
}
defer listener.Close()
go fcgi.Serve(listener, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Length", bodyLenStr)
w.Write([]byte(body))
}))
handler := Handler{
Next: nil,
Rules: []Rule{{Path: "/", Address: listener.Addr().String()}},
}
r, err := http.NewRequest("GET", "/", nil)
if err != nil {
t.Fatalf("Unable to create request: %v", err)
}
w := httptest.NewRecorder()
status, err := handler.ServeHTTP(w, r)
if got, want := status, 0; got != want {
t.Errorf("Expected returned status code to be %d, got %d", want, got)
}
if err != nil {
t.Errorf("Expected nil error, got: %v", err)
}
if got, want := w.Header().Get("Content-Length"), bodyLenStr; got != want {
t.Errorf("Expected Content-Length to be '%s', got: '%s'", want, got)
}
if got, want := w.Body.String(), body; got != want {
t.Errorf("Expected response body to be '%s', got: '%s'", want, got)
}
}
func TestRuleParseAddress(t *testing.T) {
getClientTestTable := []struct {
rule *Rule
expectednetwork string
expectedaddress string
}{
{&Rule{Address: "tcp://172.17.0.1:9000"}, "tcp", "172.17.0.1:9000"},
{&Rule{Address: "fastcgi://localhost:9000"}, "tcp", "localhost:9000"},
{&Rule{Address: "172.17.0.15"}, "tcp", "172.17.0.15"},
{&Rule{Address: "/my/unix/socket"}, "unix", "/my/unix/socket"},
{&Rule{Address: "unix:/second/unix/socket"}, "unix", "/second/unix/socket"},
}
for _, entry := range getClientTestTable {
if actualnetwork, _ := entry.rule.parseAddress(); actualnetwork != entry.expectednetwork {
t.Errorf("Unexpected network for address string %v. Got %v, expected %v", entry.rule.Address, actualnetwork, entry.expectednetwork)
}
if _, actualaddress := entry.rule.parseAddress(); actualaddress != entry.expectedaddress {
t.Errorf("Unexpected parsed address for address string %v. Got %v, expected %v", entry.rule.Address, actualaddress, entry.expectedaddress)
}
}
}
func TestRuleIgnoredPath(t *testing.T) {
rule := &Rule{
Path: "/fastcgi",
IgnoredSubPaths: []string{"/download", "/static"},
}
tests := []struct {
url string
expected bool
}{
{"/fastcgi", true},
{"/fastcgi/dl", true},
{"/fastcgi/download", false},
{"/fastcgi/download/static", false},
{"/fastcgi/static", false},
{"/fastcgi/static/download", false},
{"/fastcgi/something/download", true},
{"/fastcgi/something/static", true},
{"/fastcgi//static", false},
{"/fastcgi//static//download", false},
{"/fastcgi//download", false},
}
for i, test := range tests {
allowed := rule.AllowedPath(test.url)
if test.expected != allowed {
t.Errorf("Test %d: expected %v found %v", i, test.expected, allowed)
}
}
}
func TestBuildEnv(t *testing.T) {
testBuildEnv := func(r *http.Request, rule Rule, fpath string, envExpected map[string]string) {
var h Handler
env, err := h.buildEnv(r, rule, fpath)
if err != nil {
t.Error("Unexpected error:", err.Error())
}
for k, v := range envExpected {
if env[k] != v {
t.Errorf("Unexpected %v. Got %v, expected %v", k, env[k], v)
}
}
}
rule := Rule{}
url, err := url.Parse("http://localhost:2015/fgci_test.php?test=blabla")
if err != nil {
t.Error("Unexpected error:", err.Error())
}
r := http.Request{
Method: "GET",
URL: url,
Proto: "HTTP/1.1",
ProtoMajor: 1,
ProtoMinor: 1,
Host: "localhost:2015",
RemoteAddr: "[2b02:1810:4f2d:9400:70ab:f822:be8a:9093]:51688",
RequestURI: "/fgci_test.php",
}
fpath := "/fgci_test.php"
var envExpected = map[string]string{
"REMOTE_ADDR": "[2b02:1810:4f2d:9400:70ab:f822:be8a:9093]",
"REMOTE_PORT": "51688",
"SERVER_PROTOCOL": "HTTP/1.1",
"QUERY_STRING": "test=blabla",
"REQUEST_METHOD": "GET",
"HTTP_HOST": "localhost:2015",
}
// 1. Test for full canonical IPv6 address
testBuildEnv(&r, rule, fpath, envExpected)
// 2. Test for shorthand notation of IPv6 address
r.RemoteAddr = "[::1]:51688"
envExpected["REMOTE_ADDR"] = "[::1]"
testBuildEnv(&r, rule, fpath, envExpected)
// 3. Test for IPv4 address
r.RemoteAddr = "192.168.0.10:51688"<|fim▁hole|>}<|fim▁end|> | envExpected["REMOTE_ADDR"] = "192.168.0.10"
testBuildEnv(&r, rule, fpath, envExpected) |
<|file_name|>python-arithmetic-operators.py<|end_file_name|><|fim▁begin|>if __name__ == '__main__':
a = int(raw_input())<|fim▁hole|> b = int(raw_input())
print a + b
print a - b
print a * b<|fim▁end|> | |
<|file_name|>pages.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon
#
# This file is part of a weboob module.
#<|fim▁hole|># This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
import re
import json
from datetime import datetime
from weboob.browser.pages import LoggedPage, HTMLPage, JsonPage
from weboob.browser.elements import DictElement, ItemElement, method
from weboob.browser.filters.standard import Date, CleanDecimal, CleanText, Format, Field, Env, Regexp, Currency
from weboob.browser.filters.json import Dict
from weboob.capabilities import NotAvailable
from weboob.capabilities.bank import Account, Loan
from weboob.capabilities.contact import Advisor
from weboob.capabilities.profile import Profile
from weboob.capabilities.bill import DocumentTypes, Subscription, Document
from weboob.tools.capabilities.bank.transactions import FrenchTransaction
from weboob.exceptions import BrowserUnavailable
class Transaction(FrenchTransaction):
PATTERNS = [
(re.compile(r'^CB (?P<text>.*?) FACT (?P<dd>\d{2})(?P<mm>\d{2})(?P<yy>\d{2})', re.IGNORECASE), FrenchTransaction.TYPE_CARD),
(re.compile(r'^RET(RAIT)? DAB (?P<dd>\d+)-(?P<mm>\d+)-.*', re.IGNORECASE), FrenchTransaction.TYPE_WITHDRAWAL),
(re.compile(r'^RET(RAIT)? DAB (?P<text>.*?) (?P<dd>\d{2})(?P<mm>\d{2})(?P<yy>\d{2}) (?P<HH>\d{2})H(?P<MM>\d{2})', re.IGNORECASE), FrenchTransaction.TYPE_WITHDRAWAL),
(re.compile(r'^VIR(EMENT)?(\.PERIODIQUE)? (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_TRANSFER),
(re.compile(r'^PRLV (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_ORDER),
(re.compile(r'^CHEQUE.*', re.IGNORECASE), FrenchTransaction.TYPE_CHECK),
(re.compile(r'^(CONVENTION \d+ )?COTIS(ATION)? (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_BANK),
(re.compile(r'^\* (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_BANK),
(re.compile(r'^REMISE (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_DEPOSIT),
(re.compile(r'^(?P<text>.*)( \d+)? QUITTANCE .*', re.IGNORECASE), FrenchTransaction.TYPE_ORDER),
(re.compile(r'^CB [\d\*]+ TOT DIF .*', re.IGNORECASE), FrenchTransaction.TYPE_CARD_SUMMARY),
(re.compile(r'^CB [\d\*]+ (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_CARD),
(re.compile(r'^CB (?P<text>.*?) (?P<dd>\d{2})(?P<mm>\d{2})(?P<yy>\d{2})', re.IGNORECASE), FrenchTransaction.TYPE_CARD),
(re.compile(r'\*CB (?P<text>.*?) (?P<dd>\d{2})(?P<mm>\d{2})(?P<yy>\d{2})', re.IGNORECASE), FrenchTransaction.TYPE_CARD),
(re.compile(r'^FAC CB (?P<text>.*?) (?P<dd>\d{2})/(?P<mm>\d{2})', re.IGNORECASE), FrenchTransaction.TYPE_CARD),
]
class LoginPage(JsonPage):
def get_response(self):
return self.doc
class CenetLoginPage(HTMLPage):
def login(self, username, password, nuser, codeCaisse, _id, vkpass):
form = self.get_form(id='aspnetForm')
form['__EVENTTARGET'] = "btn_authentifier_securise"
form['__EVENTARGUMENT'] = '{"CodeCaisse":"%s","NumeroBad":"%s","NumeroUsager":"%s",\
"MotDePasse":"%s","IdentifiantClavier":"%s","ChaineConnexion":"%s"}' \
% (codeCaisse, username, nuser, password, _id, vkpass)
form.submit()
class CenetHomePage(LoggedPage, HTMLPage):
@method
class get_advisor(ItemElement):
klass = Advisor
obj_name = CleanText('//section[contains(@id, "ChargeAffaires")]//strong')
obj_email = CleanText('//li[contains(@id, "MailContact")]')
obj_phone = CleanText('//li[contains(@id, "TelAgence")]', replace=[('.', '')])
obj_mobile = NotAvailable
obj_agency = CleanText('//section[contains(@id, "Agence")]//strong')
obj_address = CleanText('//li[contains(@id, "AdresseAgence")]')
def obj_fax(self):
return CleanText('//li[contains(@id, "FaxAgence")]', replace=[('.', '')])(self) or NotAvailable
@method
class get_profile(ItemElement):
klass = Profile
obj_name = CleanText('//li[@class="identite"]/a/span')
class CenetJsonPage(JsonPage):
def __init__(self, browser, response, *args, **kwargs):
super(CenetJsonPage, self).__init__(browser, response, *args, **kwargs)
# Why you are so ugly....
self.doc = json.loads(self.doc['d'])
if self.doc['Erreur'] and (self.doc['Erreur']['Titre'] or self.doc['Erreur']['Code']):
self.logger.warning('error on %r: %s', self.url, self.doc['Erreur']['Titre'] or self.doc['Erreur']['Code'])
raise BrowserUnavailable(self.doc['Erreur']['Titre'] or self.doc['Erreur']['Description'])
self.doc['DonneesSortie'] = json.loads(self.doc['DonneesSortie'])
class CenetAccountsPage(LoggedPage, CenetJsonPage):
ACCOUNT_TYPES = {'CCP': Account.TYPE_CHECKING}
@method
class get_accounts(DictElement):
item_xpath = "DonneesSortie"
class item(ItemElement):
klass = Account
obj_id = CleanText(Dict('Numero'))
obj_label = CleanText(Dict('Intitule'))
obj_iban = CleanText(Dict('IBAN'))
def obj_balance(self):
absolut_amount = CleanDecimal(Dict('Solde/Valeur'))(self)
if CleanText(Dict('Solde/CodeSens'))(self) == 'D':
return -absolut_amount
return absolut_amount
def obj_currency(self):
return CleanText(Dict('Devise'))(self).upper()
def obj_type(self):
return self.page.ACCOUNT_TYPES.get(Dict('TypeCompte')(self), Account.TYPE_UNKNOWN)
def obj__formated(self):
return self.el
class CenetLoanPage(LoggedPage, CenetJsonPage):
@method
class get_accounts(DictElement):
item_xpath = "DonneesSortie"
class item(ItemElement):
klass = Loan
obj_id = CleanText(Dict('IdentifiantUniqueContrat'), replace=[(' ', '-')])
obj_label = CleanText(Dict('Libelle'))
obj_total_amount = CleanDecimal(Dict('MontantInitial/Valeur'))
obj_currency = Currency(Dict('MontantInitial/Devise'))
obj_type = Account.TYPE_LOAN
obj_duration = CleanDecimal(Dict('Duree'))
obj_rate = CleanDecimal.French(Dict('Taux'))
obj_next_payment_amount = CleanDecimal(Dict('MontantProchaineEcheance/Valeur'))
def obj_balance(self):
balance = CleanDecimal(Dict('CapitalRestantDu/Valeur'))(self)
if balance > 0:
balance *= -1
return balance
def obj_subscription_date(self):
sub_date = Dict('DateDebutEffet')(self)
if sub_date:
date = CleanDecimal().filter(sub_date) / 1000
return datetime.fromtimestamp(date).date()
return NotAvailable
def obj_maturity_date(self):
mat_date = Dict('DateDerniereEcheance')(self)
if mat_date:
date = CleanDecimal().filter(mat_date) / 1000
return datetime.fromtimestamp(date).date()
return NotAvailable
def obj_next_payment_date(self):
next_date = Dict('DateProchaineEcheance')(self)
if next_date:
date = CleanDecimal().filter(next_date) / 1000
return datetime.fromtimestamp(date).date()
return NotAvailable
class CenetCardsPage(LoggedPage, CenetJsonPage):
def get_cards(self):
cards = Dict('DonneesSortie')(self.doc)
# Remove dates to prevent bad parsing
def reword_dates(card):
tmp_card = card
for k, v in tmp_card.items():
if isinstance(v, dict):
v = reword_dates(v)
if k == "Date" and v is not None and "Date" in v:
card[k] = None
for card in cards:
reword_dates(card)
return cards
class CenetAccountHistoryPage(LoggedPage, CenetJsonPage):
TR_TYPES_LABEL = {
'VIR': Transaction.TYPE_TRANSFER,
'CHEQUE': Transaction.TYPE_CHECK,
'REMISE CHEQUE': Transaction.TYPE_CASH_DEPOSIT,
'PRLV': Transaction.TYPE_ORDER,
}
TR_TYPES_API = {
'VIR': Transaction.TYPE_TRANSFER,
'PE': Transaction.TYPE_ORDER, # PRLV
'CE': Transaction.TYPE_CHECK, # CHEQUE
'DE': Transaction.TYPE_CASH_DEPOSIT, # APPRO
'PI': Transaction.TYPE_CASH_DEPOSIT, # REMISE CHEQUE
}
@method
class get_history(DictElement):
item_xpath = "DonneesSortie"
class item(ItemElement):
klass = Transaction
obj_raw = Format('%s %s', Dict('Libelle'), Dict('Libelle2'))
obj_label = CleanText(Dict('Libelle'))
obj_date = Date(Dict('DateGroupImputation'), dayfirst=True)
obj_rdate = Date(Dict('DateGroupReglement'), dayfirst=True)
def obj_type(self):
ret = Transaction.TYPE_UNKNOWN
# The API may send the same key for 'PRLV' and 'VIR' transactions
# So the label is checked first, then the API key
for k, v in self.page.TR_TYPES_LABEL.items():
if Field('label')(self).startswith(k):
ret = v
break
if ret == Transaction.TYPE_UNKNOWN:
ret = self.page.TR_TYPES_API.get(Dict('TypeOperationDisplay')(self), Transaction.TYPE_UNKNOWN)
if ret != Transaction.TYPE_UNKNOWN:
return ret
for pattern, type in Transaction.PATTERNS:
if pattern.match(Field('raw')(self)):
return type
return Transaction.TYPE_UNKNOWN
def obj_amount(self):
amount = CleanDecimal(Dict('Montant/Valeur'))(self)
return -amount if Dict('Montant/CodeSens')(self) == "D" else amount
def next_offset(self):
offset = Dict('OffsetSortie')(self.doc)
if offset:
assert Dict('EstComplete')(self.doc) == 'false'
return offset
class CenetCardSummaryPage(LoggedPage, CenetJsonPage):
@method
class get_history(DictElement):
item_xpath = "DonneesSortie/OperationsCB"
class item(ItemElement):
klass = Transaction
obj_label = CleanText(Dict('Libelle'))
obj_date = Date(Dict('DateGroupImputation'), dayfirst=True)
obj_type = Transaction.TYPE_DEFERRED_CARD
def obj_raw(self):
label = Dict('Libelle')(self)
label2 = Dict('Libelle2')(self)
if label2 and label2 != 'None':
return '%s %s' % (label, label2)
else:
return label
def obj_rdate(self):
rdate = re.search('(FACT\s)(\d{6})', Field('label')(self))
if rdate.group(2):
return Date(dayfirst=True).filter(rdate.group(2))
return NotAvailable
def obj_amount(self):
amount = CleanDecimal(Dict('Montant/Valeur'))(self)
return -amount if Dict('Montant/CodeSens')(self) == "D" else amount
class _LogoutPage(HTMLPage):
def on_load(self):
raise BrowserUnavailable(CleanText('//*[@class="messErreur"]')(self.doc))
class ErrorPage(_LogoutPage):
pass
class UnavailablePage(HTMLPage):
def on_load(self):
raise BrowserUnavailable(CleanText('//div[@id="message_error_hs"]')(self.doc))
class SubscriptionPage(LoggedPage, CenetJsonPage):
@method
class iter_subscription(DictElement):
item_xpath = 'DonneesSortie'
class item(ItemElement):
klass = Subscription
obj_id = CleanText(Dict('Numero'))
obj_label = CleanText(Dict('Intitule'))
obj_subscriber = Env('subscriber')
@method
class iter_documents(DictElement):
item_xpath = 'DonneesSortie'
class item(ItemElement):
klass = Document
obj_id = Format('%s_%s_%s', Env('sub_id'), Dict('Numero'), CleanText(Env('french_date'), symbols='/'))
obj_format = 'pdf'
obj_type = DocumentTypes.OTHER
obj__numero = CleanText(Dict('Numero'))
obj__sub_id = Env('sub_id')
obj__sub_label = Env('sub_label')
obj__download_id = CleanText(Dict('IdDocument'))
def obj_date(self):
date = Regexp(Dict('DateArrete'), r'Date\((\d+)\)')(self)
date = int(date) // 1000
return datetime.fromtimestamp(date).date()
def obj_label(self):
return '%s %s' % (CleanText(Dict('Libelle'))(self), Env('french_date')(self))
def parse(self, el):
self.env['french_date'] = Field('date')(self).strftime('%d/%m/%Y')
class DownloadDocumentPage(LoggedPage, HTMLPage):
def download_form(self, document):
data = {
'Numero': document._numero,
'Libelle': document._sub_label.replace(' ', '+'),
'DateArrete': '',
'IdDocument': document._download_id
}
form = self.get_form(id='aspnetForm')
form['__EVENTTARGET'] = 'btn_telecharger'
form['__EVENTARGUMENT'] = json.dumps(data)
return form.submit()<|fim▁end|> | |
<|file_name|>JavaScriptOperatorTest.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.lib.script;
import java.util.HashMap;
import org.junit.Assert;
import org.junit.Test;
import com.datatorrent.lib.testbench.CollectorTestSink;
import com.datatorrent.lib.util.TestUtils;
/**
* Functional tests for {@link com.datatorrent.lib.script.JavaScriptOperator}.
*/
public class JavaScriptOperatorTest
{
@Test
public void testJavaOperator()
{
// Create bash operator instance (calculate suqare).
JavaScriptOperator oper = new JavaScriptOperator();
oper.addSetupScript("function square() { return val*val;}");
oper.setInvoke("square");
oper.setPassThru(true);
oper.setup(null);
CollectorTestSink<Object> sink = new CollectorTestSink<Object>();
TestUtils.setSink(oper.result, sink);
// Add input sample data.
HashMap<String, Object> tuple = new HashMap<String, Object>();
tuple.put("val", 2);
// Process operator.
oper.beginWindow(0);
oper.inBindings.process(tuple);
oper.endWindow();
// Validate value.
Assert.assertEquals("number emitted tuples", 1, sink.collectedTuples.size());
for (Object o : sink.collectedTuples) { // count is 12
Assert.assertEquals("4.0 is expected", (Double) o, 4.0, 0);
}
}
}<|fim▁end|> | *
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
<|file_name|>paretto-edit.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';<|fim▁hole|> templateUrl: './paretto-edit.component.html',
styleUrls: ['./paretto-edit.component.scss']
})
export class ParettoEditComponent implements OnInit {
constructor() { }
ngOnInit() {
}
}<|fim▁end|> |
@Component({
selector: 'app-paretto-edit', |
<|file_name|>module_unittest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for the module module, which contains Module and related classes."""
import os
import unittest
from tvcm import fake_fs
from tvcm import module
from tvcm import resource_loader
from tvcm import project as project_module
class ModuleIntegrationTests(unittest.TestCase):
def test_module(self):
fs = fake_fs.FakeFS()
fs.AddFile('/src/x.html', """
<!DOCTYPE html>
<link rel="import" href="/y.html">
<link rel="import" href="/z.html">
<script>
'use strict';
</script>
""")
fs.AddFile('/src/y.html', """
<!DOCTYPE html>
<link rel="import" href="/z.html">
""")
fs.AddFile('/src/z.html', """
<!DOCTYPE html>
""")
fs.AddFile('/src/tvcm.html', '<!DOCTYPE html>')
with fs:
project = project_module.Project([os.path.normpath('/src/')])
loader = resource_loader.ResourceLoader(project)
x_module = loader.LoadModule('x')
self.assertEquals([loader.loaded_modules['y'],
loader.loaded_modules['z']],
x_module.dependent_modules)
already_loaded_set = set()
load_sequence = []
x_module.ComputeLoadSequenceRecursive(load_sequence, already_loaded_set)
self.assertEquals([loader.loaded_modules['z'],
loader.loaded_modules['y'],
x_module],
load_sequence)
def testBasic(self):
fs = fake_fs.FakeFS()
fs.AddFile('/x/src/my_module.html', """
<!DOCTYPE html>
<link rel="import" href="/tvcm/foo.html">
});
""")
fs.AddFile('/x/tvcm/foo.html', """
<!DOCTYPE html>
});
""")
project = project_module.Project([os.path.normpath('/x')])
loader = resource_loader.ResourceLoader(project)
with fs:
my_module = loader.LoadModule(module_name='src.my_module')
dep_names = [x.name for x in my_module.dependent_modules]
self.assertEquals(['tvcm.foo'], dep_names)
def testDepsExceptionContext(self):
fs = fake_fs.FakeFS()
fs.AddFile('/x/src/my_module.html', """
<!DOCTYPE html>
<link rel="import" href="/tvcm/foo.html">
""")
fs.AddFile('/x/tvcm/foo.html', """
<!DOCTYPE html>
<link rel="import" href="missing.html">
""")
project = project_module.Project([os.path.normpath('/x')])
loader = resource_loader.ResourceLoader(project)
with fs:
exc = None
try:
loader.LoadModule(module_name='src.my_module')
assert False, 'Expected an exception'
except module.DepsException, e:
exc = e
self.assertEquals(
['src.my_module', 'tvcm.foo'],
exc.context)
def testGetAllDependentFilenamesRecursive(self):
fs = fake_fs.FakeFS()
fs.AddFile('/x/y/z/foo.html', """
<!DOCTYPE html>
<link rel="import" href="/z/foo2.html">
<link rel="stylesheet" href="/z/foo.css">
<script src="/bar.js"></script>
""")
fs.AddFile('/x/y/z/foo.css', """
.x .y {
background-image: url(foo.jpeg);
}
""")
fs.AddFile('/x/y/z/foo.jpeg', '')
fs.AddFile('/x/y/z/foo2.html', """
<!DOCTYPE html>
""")
fs.AddFile('/x/raw/bar.js', 'hello')
project = project_module.Project([
os.path.normpath('/x/y'), os.path.normpath('/x/raw/')])
loader = resource_loader.ResourceLoader(project)<|fim▁hole|>
dependent_filenames = my_module.GetAllDependentFilenamesRecursive()
self.assertEquals(
[
os.path.normpath('/x/y/z/foo.html'),
os.path.normpath('/x/raw/bar.js'),
os.path.normpath('/x/y/z/foo.css'),
os.path.normpath('/x/y/z/foo.jpeg'),
os.path.normpath('/x/y/z/foo2.html'),
],
dependent_filenames)<|fim▁end|> | with fs:
my_module = loader.LoadModule(module_name='z.foo')
self.assertEquals(1, len(my_module.dependent_raw_scripts)) |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>// this is a little Node program, running on a Node server
// require() will look inside of a folder and get information, similar to import
// NOTE: gulpfile.js must be in the main directory
var gulp = require('gulp');
var sass = require('gulp-sass');
var concat = require('gulp-concat');
var autoprefixer = require('gulp-autoprefixer');
var minifyCss = require('gulp-minify-css');
var browserSync = require('browser-sync').create();
var reload = browserSync.reload;
var jade = require('gulp-jade');
var plumber = require('gulp-plumber');
////////////////////////////////////////////
// JADE COMPILE
////////////////////////////////////////////
gulp.task('jade', function() {
var jade_locals = {};
// using src = ./*.jade causes index.layout.jade to also compile which we don't want... unless we have multiple main directory files... in which case we do use ./*.jade
// otherwise use src = ./index.jade if there aren't other jade files in ./ (i.e. contact.jade, about.jade, etc.)
return gulp.src('./index.jade')
.pipe(plumber())
.pipe(jade({
locals: jade_locals,
pretty: true
}))
.pipe(gulp.dest('./'))
});
////////////////////////////////////////////
// END JADE COMPILE
////////////////////////////////////////////
////////////////////////////////////////////
// SASS COMPILE
////////////////////////////////////////////
gulp.task('sass', function () {
return gulp.src('css/*.scss')
.pipe(plumber())
.pipe(sass({
'sourcemap=none':true,
'errLogToConsole':true
}))
.pipe(concat('style.css'))
.pipe(autoprefixer({
browsers: ['last 2 versions'],
cascade: false
}))
// .pipe(minifyCss({compatibility: 'ie8'}))
.pipe(gulp.dest('css/'))
.pipe(browserSync.stream());
});
////////////////////////////////////////////
// END SASS COMPILE
////////////////////////////////////////////
////////////////////////////////////////////
// BROWSER SYNC
////////////////////////////////////////////
gulp.task('server', ['sass','jade'], function() {
browserSync.init({
server: "./",
});
gulp.watch("css/*.scss", ['sass']);<|fim▁hole|> // the SCSS partials need to be in their own folder because css/*.scss causes all of them to trigger in the same directory, in the order they currently are which messes up everything
gulp.watch("css/partials/*.scss", ['sass']);
gulp.watch('./*.jade',['jade']);
// to get jade partials to trigger changes
gulp.watch('includes/*.jade',['jade']);
// whenever the .js files change reload
gulp.watch("js/*.js").on('change', reload);
// whenever the .css file changes reload
gulp.watch("css/*.css").on('change', reload);
// whenever the .html file changes reload
gulp.watch("*.html").on('change', reload);
});
////////////////////////////////////////////
// END BROWSER SYNC
////////////////////////////////////////////
////////////////////////////////////////////
// DEFAULT
////////////////////////////////////////////
gulp.task('default', ['server'], function () {
// place everything in here in 'server'
});
////////////////////////////////////////////
// END DEFAULT
////////////////////////////////////////////<|fim▁end|> | // to get SASS partials to trigger changes |
<|file_name|>FilerioCom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from module.plugins.internal.XFSAccount import XFSAccount
class FilerioCom(XFSAccount):
__name__ = "FilerioCom"
__type__ = "account"
__version__ = "0.07"<|fim▁hole|> __status__ = "testing"
__description__ = """FileRio.in account plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "[email protected]")]
PLUGIN_DOMAIN = "filerio.in"<|fim▁end|> | |
<|file_name|>test_endpoint.py<|end_file_name|><|fim▁begin|>from asyncio import coroutine
import pytest
from aiohttp import HttpBadRequest, HttpMethodNotAllowed
from fluentmock import create_mock
from aiohttp_rest import RestEndpoint
class CustomEndpoint(RestEndpoint):
def get(self):
pass
def patch(self):
pass
@pytest.fixture
def endpoint():
return RestEndpoint()
@pytest.fixture
def custom_endpoint():
return CustomEndpoint()
def test_exiting_methods_are_registered_during_initialisation(custom_endpoint: CustomEndpoint):
assert len(custom_endpoint.methods) == 2
assert ('GET', custom_endpoint.get) in custom_endpoint.methods.items()
assert ('PATCH', custom_endpoint.patch) in custom_endpoint.methods.items()
def test_register_method(endpoint: RestEndpoint):
def sample_method():
pass
endpoint.register_method('verb', sample_method)
assert ('VERB', sample_method) in endpoint.methods.items()
@pytest.mark.asyncio
async def test_dispatch_uses_correct_handler_for_verb(endpoint: RestEndpoint):
endpoint.register_method('VERB1', coroutine(lambda: 5))
endpoint.register_method('VERB2', coroutine(lambda: 17))
assert await endpoint.dispatch(create_mock(method='VERB1', match_info={})) == 5
assert await endpoint.dispatch(create_mock(method='VERB2', match_info={})) == 17
@pytest.mark.asyncio
async def test_dispatch_passes_request_when_required(endpoint: RestEndpoint):
endpoint.register_method('REQUEST', coroutine(lambda request: request))
request = create_mock(method='REQUEST', match_info={})
assert await endpoint.dispatch(request) == request
@pytest.mark.asyncio
async def test_dispatch_passes_match_info_when_required(endpoint: RestEndpoint):
endpoint.register_method('MATCH_INFO', coroutine(lambda prop1, prop2: (prop2, prop1)))
request = create_mock(method='MATCH_INFO', match_info={'prop1': 1, 'prop2': 2})
assert await endpoint.dispatch(request) == (2, 1)<|fim▁hole|> endpoint.register_method('BAD_MATCH_INFO', coroutine(lambda no_match: no_match))
request = create_mock(method='BAD_MATCH_INFO', match_info={})
with pytest.raises(HttpBadRequest):
await endpoint.dispatch(request)
@pytest.mark.asyncio
async def test_dispatch_raises_method_not_allowed_when_verb_not_matched(endpoint: RestEndpoint):
request = create_mock(method='NO_METHOD')
with pytest.raises(HttpMethodNotAllowed):
await endpoint.dispatch(request)<|fim▁end|> |
@pytest.mark.asyncio
async def test_dispatch_raises_bad_request_when_match_info_does_not_exist(endpoint: RestEndpoint): |
<|file_name|>jp2kakadu.py<|end_file_name|><|fim▁begin|># Copyright 2014 NeuroData (http://neurodata.io)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import argparse
import glob
import subprocess
import pdb
"""
This is a script to convert jp2 to png for Mitra's Data. \
We use Kakadu software for this script. Kakadu only runs on Ubuntu \
and has to have the library added to shared path.
"""
def main():
parser = argparse.ArgumentParser(description='Convert JP2 to PNG')
parser.add_argument('path', action="store", help='Directory with JP2 Files')
parser.add_argument('location', action="store", help='Directory to write to')
result = parser.parse_args()
# Reading all the jp2 files in that directory
filelist = glob.glob(result.path+'*.jp2')
for name in filelist:
print "Opening: {}".format( name )
# Identifying the subdirectory to place the data under
if name.find('F') != -1:
subfile = 'F/'
elif name.find('IHC') != -1:
subfile = 'IHC/'
elif name.find('N') != -1:
subfile = 'N/'
# Determine the write location of the file. This was /mnt on datascopes
writelocation = result.location+subfile+name.split(result.path)[1].split('_')[3].split('.')[0]
# Call kakadu expand from the command line, specify the input and the output filenames<|fim▁hole|>
if __name__ == "__main__":
main()<|fim▁end|> | subprocess.call( [ './kdu_expand' ,'-i', '{}'.format(name), '-o', '{}.tiff'.format(writelocation) ] ) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#
# Created by DraX on 2005.08.08
#
# Updated by ElgarL on 28.09.2005
#
print "importing village master data: Talking Island Village ...done"
import sys
from net.sf.l2j.gameserver.model.quest import State
from net.sf.l2j.gameserver.model.quest import QuestState
from net.sf.l2j.gameserver.model.quest.jython import QuestJython as JQuest
GRAND_MASTER_BITZ = 7026
class Quest (JQuest) :
def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr)
def onEvent (self,event,st):
htmltext = "No Quest"
if event == "7026-01.htm":
htmltext = event
if event == "7026-02.htm":
htmltext = event
if event == "7026-03.htm":
htmltext = event
if event == "7026-04.htm":
htmltext = event
if event == "7026-05.htm":
htmltext = event
if event == "7026-06.htm":
htmltext = event
if event == "7026-07.htm":
htmltext = event
return htmltext
def onTalk (Self,npc,st):
npcId = npc.getNpcId()
Race = st.getPlayer().getRace()
pcId = st.getPlayer().getClassId().getId()
# Humans got accepted
if npcId == GRAND_MASTER_BITZ and Race in [Race.human]:
#fighter
if pcId == 0x00:
htmltext = "7026-01.htm"
#warrior, knight, rogue
if pcId == 0x01 or pcId == 0x04 or pcId == 0x07:
htmltext = "7026-08.htm"
#warlord, paladin, treasureHunter<|fim▁hole|> if pcId == 0x03 or pcId == 0x05 or pcId == 0x08:
htmltext = "7026-09.htm"
#gladiator, darkAvenger, hawkeye
if pcId == 0x02 or pcId == 0x06 or pcId == 0x09:
htmltext = "7026-09.htm"
#mage, wizard, cleric]:
if pcId == 0x0a or pcId == 0x0b or pcId == 0x0f:
htmltext = "7026-10.htm"
#sorceror, necromancer, warlock, bishop, prophet
if pcId == 0x0c or pcId == 0x0d or pcId == 0x0e or pcId == 0x10 or pcId == 0x11:
htmltext = "7026-10.htm"
st.setState(STARTED)
return htmltext
# All other Races must be out
if npcId == GRAND_MASTER_BITZ and Race in [Race.dwarf, Race.darkelf, Race.elf, Race.orc]:
st.setState(COMPLETED)
st.exitQuest(1)
return "7026-10.htm"
QUEST = Quest(7026,"7026_bitz_occupation_change","village_master")
CREATED = State('Start', QUEST)
STARTED = State('Started', QUEST)
COMPLETED = State('Completed', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(7026)
STARTED.addTalkId(7026)<|fim▁end|> | |
<|file_name|>benchmark.scipy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# @license Apache-2.0
#
# Copyright (c) 2018 The Stdlib Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Benchmark scipy.stats.arcsine."""
from __future__ import print_function
import timeit
REPEATS = 3
COUNT = [0] # use a list to allow modification within nested scopes
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(iterations, elapsed):
"""Print benchmark results.
# Arguments
* `iterations`: number of iterations
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(1000000, 0.131009101868)
```
"""
rate = iterations / elapsed
print(" ---")
print(" iterations: " + str(iterations))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark(name, setup, stmt, iterations):
"""Run the benchmark and print benchmark results.
# Arguments
* `name`: benchmark name
* `setup`: benchmark setup
* `stmt`: statement to benchmark
* `iterations`: number of iterations
# Examples
``` python
python> benchmark("random", "from random import random;", "y = random()", 1000000)
```
"""
t = timeit.Timer(stmt, setup=setup)
print_version()
i = 0
while i < REPEATS:
print("# python::" + name)
COUNT[0] += 1
elapsed = t.timeit(number=iterations)
print_results(iterations, elapsed)
print("ok " + str(COUNT[0]) + " benchmark finished")
i += 1
def main():
"""Run the benchmarks."""
name = "arcsine:entropy"
setup = "from scipy.stats import arcsine; from random import random; rv = arcsine(10.0, 20.0);"
stmt = "y = rv.entropy()"
iterations = 1000
benchmark(name, setup, stmt, iterations)
name = "arcsine:kurtosis"
setup = "from scipy.stats import arcsine; from random import random; rv = arcsine(10.0, 20.0);"
stmt = "y = rv.stats(moments='k')"
iterations = 1000
benchmark(name, setup, stmt, iterations)<|fim▁hole|> stmt = "y = rv.mean()"
iterations = 1000
benchmark(name, setup, stmt, iterations)
name = "arcsine:median"
setup = "from scipy.stats import arcsine; from random import random; rv = arcsine(10.0, 20.0);"
stmt = "y = rv.median()"
iterations = 1000
benchmark(name, setup, stmt, iterations)
name = "arcsine:skewness"
setup = "from scipy.stats import arcsine; from random import random; rv = arcsine(10.0, 20.0);"
stmt = "y = rv.stats(moments='s')"
iterations = 1000
benchmark(name, setup, stmt, iterations)
name = "arcsine:stdev"
setup = "from scipy.stats import arcsine; from random import random; rv = arcsine(10.0, 20.0);"
stmt = "y = rv.std()"
iterations = 1000
benchmark(name, setup, stmt, iterations)
name = "arcsine:variance"
setup = "from scipy.stats import arcsine; from random import random; rv = arcsine(10.0, 20.0);"
stmt = "y = rv.var()"
iterations = 1000
benchmark(name, setup, stmt, iterations)
name = "arcsine:cdf"
setup = "from scipy.stats import arcsine; from random import random; rv = arcsine(10.0, 20.0);"
stmt = "y = rv.cdf(random())"
iterations = 1000
benchmark(name, setup, stmt, iterations)
name = "arcsine:logpdf"
setup = "from scipy.stats import arcsine; from random import random; rv = arcsine(10.0, 20.0);"
stmt = "y = rv.logpdf(random())"
iterations = 1000
benchmark(name, setup, stmt, iterations)
name = "arcsine:pdf"
setup = "from scipy.stats import arcsine; from random import random; rv = arcsine(10.0, 20.0);"
stmt = "y = rv.pdf(random())"
iterations = 1000
benchmark(name, setup, stmt, iterations)
name = "arcsine:quantile"
setup = "from scipy.stats import arcsine; from random import random; rv = arcsine(10.0, 20.0);"
stmt = "y = rv.ppf(random())"
iterations = 1000
benchmark(name, setup, stmt, iterations)
print_summary(COUNT[0], COUNT[0])
if __name__ == "__main__":
main()<|fim▁end|> |
name = "arcsine:mean"
setup = "from scipy.stats import arcsine; from random import random; rv = arcsine(10.0, 20.0);" |
<|file_name|>Airy_root_finder.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
from scipy.special import airy
from numpy import abs
def f(xinput):
x0=xinput
xoutput=x0
Ai=abs(airy(-xoutput)[0])
while Ai>1e-12:<|fim▁hole|> Ai=ai[0]
Aip=ai[1]
xoutput=xoutput+Ai/Aip
return Ai, xoutput<|fim▁end|> | ai=abs(airy(-xoutput)) |
<|file_name|>ml_error.py<|end_file_name|><|fim▁begin|>"""
ML Estimation of Spatial Error Model
"""
__author__ = "Luc Anselin [email protected], Serge Rey [email protected]"
import numpy as np
import numpy.linalg as la
import pysal as ps
from pysal.spreg.utils import RegressionPropsY, RegressionPropsVM
import diagnostics as DIAG
import user_output as USER
import summary_output as SUMMARY
import regimes as REGI
from w_utils import symmetrize
try:
from scipy.optimize import minimize_scalar
minimize_scalar_available = True
except ImportError:
minimize_scalar_available = False
__all__ = ["ML_Error"]
class BaseML_Error(RegressionPropsY, RegressionPropsVM, REGI.Regimes_Frame):
"""
ML estimation of the spatial error model (note no consistency
checks, diagnostics or constants added); Anselin (1988) [1]_
Parameters
----------
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, excluding the constant
w : Sparse matrix
Spatial weights sparse matrix
method : string
if 'full', brute force calculation (full matrix expressions)
epsilon : float
tolerance criterion in mimimize_scalar function and inverse_product
regimes_att : dictionary
Dictionary containing elements to be used in case of a regimes model,
i.e. 'x' before regimes, 'regimes' list and 'cols2regi'
Attributes
----------
betas : array
kx1 array of estimated coefficients
lam : float
estimate of spatial autoregressive coefficient
u : array
nx1 array of residuals
e_filtered : array
spatially filtered residuals
predy : array
nx1 array of predicted y values
n : integer
Number of observations
k : integer
Number of variables for which coefficients are estimated
(including the constant, excluding the rho)
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, including the constant
method : string
log Jacobian method
if 'full': brute force (full matrix computations)
if 'ord' : Ord eigenvalue method
epsilon : float
tolerance criterion used in minimize_scalar function and inverse_product
mean_y : float
Mean of dependent variable
std_y : float
Standard deviation of dependent variable
vm : array
Variance covariance matrix (k+1 x k+1) - includes lambda
vm1 : array
2x2 array of variance covariance for lambda, sigma
sig2 : float
Sigma squared used in computations
logll : float
maximized log-likelihood (including constant terms)
Examples
--------
>>> import numpy as np
>>> import pysal as ps
>>> np.set_printoptions(suppress=True) #prevent scientific format
>>> db = ps.open(ps.examples.get_path("south.dbf"),'r')
>>> y_name = "HR90"
>>> y = np.array(db.by_col(y_name))
>>> y.shape = (len(y),1)
>>> x_names = ["RD90","PS90","UE90","DV90"]
>>> x = np.array([db.by_col(var) for var in x_names]).T
>>> x = np.hstack((np.ones((len(y),1)),x))
>>> ww = ps.open(ps.examples.get_path("south_q.gal"))
>>> w = ww.read()
>>> ww.close()
>>> w.transform = 'r'
>>> mlerr = BaseML_Error(y,x,w) #doctest: +SKIP
>>> "{0:.6f}".format(mlerr.lam) #doctest: +SKIP
'0.299078'
>>> np.around(mlerr.betas, decimals=4) #doctest: +SKIP
array([[ 6.1492],
[ 4.4024],
[ 1.7784],
[-0.3781],
[ 0.4858],
[ 0.2991]])
>>> "{0:.6f}".format(mlerr.mean_y) #doctest: +SKIP
'9.549293'
>>> "{0:.6f}".format(mlerr.std_y) #doctest: +SKIP
'7.038851'
>>> np.diag(mlerr.vm) #doctest: +SKIP
array([ 1.06476526, 0.05548248, 0.04544514, 0.00614425, 0.01481356,
0.00143001])
>>> "{0:.6f}".format(mlerr.sig2[0][0]) #doctest: +SKIP
'32.406854'
>>> "{0:.6f}".format(mlerr.logll) #doctest: +SKIP
'-4471.407067'
>>> mlerr1 = BaseML_Error(y,x,w,method='ord') #doctest: +SKIP
>>> "{0:.6f}".format(mlerr1.lam) #doctest: +SKIP
'0.299078'
>>> np.around(mlerr1.betas, decimals=4) #doctest: +SKIP
array([[ 6.1492],
[ 4.4024],
[ 1.7784],
[-0.3781],
[ 0.4858],
[ 0.2991]])
>>> "{0:.6f}".format(mlerr1.mean_y) #doctest: +SKIP
'9.549293'
>>> "{0:.6f}".format(mlerr1.std_y) #doctest: +SKIP
'7.038851'
>>> np.around(np.diag(mlerr1.vm), decimals=4) #doctest: +SKIP
array([ 1.0648, 0.0555, 0.0454, 0.0061, 0.0148, 0.0014])
>>> "{0:.4f}".format(mlerr1.sig2[0][0]) #doctest: +SKIP
'32.4069'
>>> "{0:.4f}".format(mlerr1.logll) #doctest: +SKIP
'-4471.4071'
References
----------
.. [1] Anselin, L. (1988) "Spatial Econometrics: Methods and Models".
Kluwer Academic Publishers. Dordrecht.
"""
def __init__(self, y, x, w, method='full', epsilon=0.0000001, regimes_att=None):
# set up main regression variables and spatial filters
self.y = y
if regimes_att:
self.x = x.toarray()
else:
self.x = x
self.n, self.k = self.x.shape
self.method = method
self.epsilon = epsilon
W = w.full()[0]
ylag = ps.lag_spatial(w, self.y)
xlag = self.get_x_lag(w, regimes_att)
# call minimizer using concentrated log-likelihood to get lambda
methodML = method.upper()
if methodML in ['FULL', 'ORD']:
if methodML == 'FULL':
res = minimize_scalar(err_c_loglik, 0.0, bounds=(-1.0, 1.0),
args=(self.n, self.y, ylag, self.x,
xlag, W), method='bounded',
tol=epsilon)
elif methodML == 'ORD':
# check on symmetry structure
if w.asymmetry(intrinsic=False) == []:
ww = symmetrize(w)<|fim▁hole|> evals = la.eigvalsh(WW)
else:
evals = la.eigvals(W)
res = minimize_scalar(
err_c_loglik_ord, 0.0, bounds=(-1.0, 1.0),
args=(self.n, self.y, ylag, self.x,
xlag, evals), method='bounded',
tol=epsilon)
else:
raise Exception, "{0} is an unsupported method".format(method)
self.lam = res.x
# compute full log-likelihood, including constants
ln2pi = np.log(2.0 * np.pi)
llik = -res.fun - self.n / 2.0 * ln2pi - self.n / 2.0
self.logll = llik
# b, residuals and predicted values
ys = self.y - self.lam * ylag
xs = self.x - self.lam * xlag
xsxs = np.dot(xs.T, xs)
xsxsi = np.linalg.inv(xsxs)
xsys = np.dot(xs.T, ys)
b = np.dot(xsxsi, xsys)
self.betas = np.vstack((b, self.lam))
self.u = y - np.dot(self.x, b)
self.predy = self.y - self.u
# residual variance
self.e_filtered = self.u - self.lam * ps.lag_spatial(w, self.u)
self.sig2 = np.dot(self.e_filtered.T, self.e_filtered) / self.n
# variance-covariance matrix betas
varb = self.sig2 * xsxsi
# variance-covariance matrix lambda, sigma
a = -self.lam * W
np.fill_diagonal(a, 1.0)
ai = la.inv(a)
wai = np.dot(W, ai)
tr1 = np.trace(wai)
wai2 = np.dot(wai, wai)
tr2 = np.trace(wai2)
waiTwai = np.dot(wai.T, wai)
tr3 = np.trace(waiTwai)
v1 = np.vstack((tr2 + tr3,
tr1 / self.sig2))
v2 = np.vstack((tr1 / self.sig2,
self.n / (2.0 * self.sig2 ** 2)))
v = np.hstack((v1, v2))
self.vm1 = np.linalg.inv(v)
# create variance matrix for beta, lambda
vv = np.hstack((varb, np.zeros((self.k, 1))))
vv1 = np.hstack(
(np.zeros((1, self.k)), self.vm1[0, 0] * np.ones((1, 1))))
self.vm = np.vstack((vv, vv1))
self._cache = {}
def get_x_lag(self, w, regimes_att):
if regimes_att:
xlag = ps.lag_spatial(w, regimes_att['x'])
xlag = REGI.Regimes_Frame.__init__(self, xlag,
regimes_att['regimes'], constant_regi=None, cols2regi=regimes_att['cols2regi'])[0]
xlag = xlag.toarray()
else:
xlag = ps.lag_spatial(w, self.x)
return xlag
class ML_Error(BaseML_Error):
"""
ML estimation of the spatial lag model with all results and diagnostics;
Anselin (1988) [1]_
Parameters
----------
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, excluding the constant
w : Sparse matrix
Spatial weights sparse matrix
method : string
if 'full', brute force calculation (full matrix expressions)
ir 'ord', Ord eigenvalue method
epsilon : float
tolerance criterion in mimimize_scalar function and inverse_product
spat_diag : boolean
if True, include spatial diagnostics
vm : boolean
if True, include variance-covariance matrix in summary
results
name_y : string
Name of dependent variable for use in output
name_x : list of strings
Names of independent variables for use in output
name_w : string
Name of weights matrix for use in output
name_ds : string
Name of dataset for use in output
Attributes
----------
betas : array
(k+1)x1 array of estimated coefficients (rho first)
lam : float
estimate of spatial autoregressive coefficient
u : array
nx1 array of residuals
e_filtered : array
nx1 array of spatially filtered residuals
predy : array
nx1 array of predicted y values
n : integer
Number of observations
k : integer
Number of variables for which coefficients are estimated
(including the constant, excluding lambda)
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, including the constant
method : string
log Jacobian method
if 'full': brute force (full matrix computations)
epsilon : float
tolerance criterion used in minimize_scalar function and inverse_product
mean_y : float
Mean of dependent variable
std_y : float
Standard deviation of dependent variable
varb : array
Variance covariance matrix (k+1 x k+1) - includes var(lambda)
vm1 : array
variance covariance matrix for lambda, sigma (2 x 2)
sig2 : float
Sigma squared used in computations
logll : float
maximized log-likelihood (including constant terms)
pr2 : float
Pseudo R squared (squared correlation between y and ypred)
utu : float
Sum of squared residuals
std_err : array
1xk array of standard errors of the betas
z_stat : list of tuples
z statistic; each tuple contains the pair (statistic,
p-value), where each is a float
name_y : string
Name of dependent variable for use in output
name_x : list of strings
Names of independent variables for use in output
name_w : string
Name of weights matrix for use in output
name_ds : string
Name of dataset for use in output
title : string
Name of the regression method used
Examples
--------
>>> import numpy as np
>>> import pysal as ps
>>> np.set_printoptions(suppress=True) #prevent scientific format
>>> db = ps.open(ps.examples.get_path("south.dbf"),'r')
>>> ds_name = "south.dbf"
>>> y_name = "HR90"
>>> y = np.array(db.by_col(y_name))
>>> y.shape = (len(y),1)
>>> x_names = ["RD90","PS90","UE90","DV90"]
>>> x = np.array([db.by_col(var) for var in x_names]).T
>>> ww = ps.open(ps.examples.get_path("south_q.gal"))
>>> w = ww.read()
>>> ww.close()
>>> w_name = "south_q.gal"
>>> w.transform = 'r'
>>> mlerr = ML_Error(y,x,w,name_y=y_name,name_x=x_names,\
name_w=w_name,name_ds=ds_name) #doctest: +SKIP
>>> np.around(mlerr.betas, decimals=4) #doctest: +SKIP
array([[ 6.1492],
[ 4.4024],
[ 1.7784],
[-0.3781],
[ 0.4858],
[ 0.2991]])
>>> "{0:.4f}".format(mlerr.lam) #doctest: +SKIP
'0.2991'
>>> "{0:.4f}".format(mlerr.mean_y) #doctest: +SKIP
'9.5493'
>>> "{0:.4f}".format(mlerr.std_y) #doctest: +SKIP
'7.0389'
>>> np.around(np.diag(mlerr.vm), decimals=4) #doctest: +SKIP
array([ 1.0648, 0.0555, 0.0454, 0.0061, 0.0148, 0.0014])
>>> np.around(mlerr.sig2, decimals=4) #doctest: +SKIP
array([[ 32.4069]])
>>> "{0:.4f}".format(mlerr.logll) #doctest: +SKIP
'-4471.4071'
>>> "{0:.4f}".format(mlerr.aic) #doctest: +SKIP
'8952.8141'
>>> "{0:.4f}".format(mlerr.schwarz) #doctest: +SKIP
'8979.0779'
>>> "{0:.4f}".format(mlerr.pr2) #doctest: +SKIP
'0.3058'
>>> "{0:.4f}".format(mlerr.utu) #doctest: +SKIP
'48534.9148'
>>> np.around(mlerr.std_err, decimals=4) #doctest: +SKIP
array([ 1.0319, 0.2355, 0.2132, 0.0784, 0.1217, 0.0378])
>>> np.around(mlerr.z_stat, decimals=4) #doctest: +SKIP
array([[ 5.9593, 0. ],
[ 18.6902, 0. ],
[ 8.3422, 0. ],
[ -4.8233, 0. ],
[ 3.9913, 0.0001],
[ 7.9089, 0. ]])
>>> mlerr.name_y #doctest: +SKIP
'HR90'
>>> mlerr.name_x #doctest: +SKIP
['CONSTANT', 'RD90', 'PS90', 'UE90', 'DV90', 'lambda']
>>> mlerr.name_w #doctest: +SKIP
'south_q.gal'
>>> mlerr.name_ds #doctest: +SKIP
'south.dbf'
>>> mlerr.title #doctest: +SKIP
'MAXIMUM LIKELIHOOD SPATIAL ERROR (METHOD = FULL)'
References
----------
.. [1] Anselin, L. (1988) "Spatial Econometrics: Methods and Models".
Kluwer Academic Publishers. Dordrecht.
"""
def __init__(self, y, x, w, method='full', epsilon=0.0000001,
spat_diag=False, vm=False, name_y=None, name_x=None,
name_w=None, name_ds=None):
n = USER.check_arrays(y, x)
USER.check_y(y, n)
USER.check_weights(w, y, w_required=True)
x_constant = USER.check_constant(x)
method = method.upper()
if method in ['FULL', 'ORD']:
BaseML_Error.__init__(self, y=y, x=x_constant,
w=w, method=method, epsilon=epsilon)
self.title = "MAXIMUM LIKELIHOOD SPATIAL ERROR" + \
" (METHOD = " + method + ")"
self.name_ds = USER.set_name_ds(name_ds)
self.name_y = USER.set_name_y(name_y)
self.name_x = USER.set_name_x(name_x, x)
self.name_x.append('lambda')
self.name_w = USER.set_name_w(name_w, w)
self.aic = DIAG.akaike(reg=self)
self.schwarz = DIAG.schwarz(reg=self)
SUMMARY.ML_Error(reg=self, w=w, vm=vm, spat_diag=spat_diag)
else:
raise Exception, "{0} is an unsupported method".format(method)
def err_c_loglik(lam, n, y, ylag, x, xlag, W):
# concentrated log-lik for error model, no constants, brute force
ys = y - lam * ylag
xs = x - lam * xlag
ysys = np.dot(ys.T, ys)
xsxs = np.dot(xs.T, xs)
xsxsi = np.linalg.inv(xsxs)
xsys = np.dot(xs.T, ys)
x1 = np.dot(xsxsi, xsys)
x2 = np.dot(xsys.T, x1)
ee = ysys - x2
sig2 = ee[0][0] / n
nlsig2 = (n / 2.0) * np.log(sig2)
a = -lam * W
np.fill_diagonal(a, 1.0)
jacob = np.log(np.linalg.det(a))
# this is the negative of the concentrated log lik for minimization
clik = nlsig2 - jacob
return clik
def err_c_loglik_ord(lam, n, y, ylag, x, xlag, evals):
# concentrated log-lik for error model, no constants, brute force
ys = y - lam * ylag
xs = x - lam * xlag
ysys = np.dot(ys.T, ys)
xsxs = np.dot(xs.T, xs)
xsxsi = np.linalg.inv(xsxs)
xsys = np.dot(xs.T, ys)
x1 = np.dot(xsxsi, xsys)
x2 = np.dot(xsys.T, x1)
ee = ysys - x2
sig2 = ee[0][0] / n
nlsig2 = (n / 2.0) * np.log(sig2)
revals = lam * evals
jacob = np.log(1 - revals).sum()
if isinstance(jacob, complex):
jacob = jacob.real
# this is the negative of the concentrated log lik for minimization
clik = nlsig2 - jacob
return clik
def _test():
import doctest
start_suppress = np.get_printoptions()['suppress']
np.set_printoptions(suppress=True)
doctest.testmod()
np.set_printoptions(suppress=start_suppress)<|fim▁end|> | WW = ww.todense() |
<|file_name|>DescribeAlgorithmRequest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.personalize.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeAlgorithm" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeAlgorithmRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The Amazon Resource Name (ARN) of the algorithm to describe.
* </p>
*/
private String algorithmArn;
/**
* <p>
* The Amazon Resource Name (ARN) of the algorithm to describe.
* </p>
*
* @param algorithmArn
* The Amazon Resource Name (ARN) of the algorithm to describe.
*/
public void setAlgorithmArn(String algorithmArn) {
this.algorithmArn = algorithmArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the algorithm to describe.
* </p>
*
* @return The Amazon Resource Name (ARN) of the algorithm to describe.
*/
public String getAlgorithmArn() {
return this.algorithmArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the algorithm to describe.
* </p>
*
* @param algorithmArn
* The Amazon Resource Name (ARN) of the algorithm to describe.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeAlgorithmRequest withAlgorithmArn(String algorithmArn) {
setAlgorithmArn(algorithmArn);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAlgorithmArn() != null)
sb.append("AlgorithmArn: ").append(getAlgorithmArn());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeAlgorithmRequest == false)
return false;
DescribeAlgorithmRequest other = (DescribeAlgorithmRequest) obj;<|fim▁hole|> if (other.getAlgorithmArn() != null && other.getAlgorithmArn().equals(this.getAlgorithmArn()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAlgorithmArn() == null) ? 0 : getAlgorithmArn().hashCode());
return hashCode;
}
@Override
public DescribeAlgorithmRequest clone() {
return (DescribeAlgorithmRequest) super.clone();
}
}<|fim▁end|> | if (other.getAlgorithmArn() == null ^ this.getAlgorithmArn() == null)
return false; |
<|file_name|>SamplingAccuracyEvaluation.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import operator
def populationListGenerate(filePath, target):
print('Generate Population List')
populationList = []
populationFile = open(filePath, 'r')
while True:
line = populationFile.readline()
if not line: break
line_data = line.split(',')
populationList.append(line_data[target])
populationFile.close()
return populationList
def calculateScore(evalList):
score = 0
for i in range(len(evalList)):
if i == 0:
score = score + abs(evalList[i])/4
else:
score = score + abs(evalList[i])/3
return score
def run(windowSize, sampleSize, filePath, target=0):
print('############## Sampling Accuracy Evaluation ##############')
count = 1
numOfTrials = 1
jSDPieceCount = 20
pAAPieceCount = 20
print('Window Size: ' ,windowSize)
print('Sample Size: ' ,sampleSize)
print('JSD Piece Count: ' ,jSDPieceCount)
print('PAA Piece Count: ' ,pAAPieceCount)
populationList = populationListGenerate(filePath, target)
windowList = []
accuracyMeasureCount = 3
evalDic = {}
reservoirEvalList = [0.0 for _ in range(accuracyMeasureCount)]
hashEvalList = [0.0 for _ in range(accuracyMeasureCount)]
priorityEvalList = [0.0 for _ in range(accuracyMeasureCount)]
print()
for data in populationList:
windowList.append(data)
if count == windowSize:
print('################## ' + str(numOfTrials) + ' Evaluation Start ####################')
# if numOfTrials == 1: PG.printGraph(windowList, 'Population', numOfTrials)
print()
print(str(numOfTrials)+'_ReservoirSampling')
sampleList = SA.sortedReservoirSam(sampleSize, windowList)
tempEvalList = AE.run(windowList, sampleList, jSDPieceCount, pAAPieceCount)
SC.sumPerIndex(reservoirEvalList, tempEvalList)
# if numOfTrials == 1: PG.printGraph(sampleList, 'Reservoir', numOfTrials)
print()
print(str(numOfTrials)+'_HashSampling')
sampleList = SA.hashSam(sampleSize, windowList)
tempEvalList = AE.run(windowList, sampleList, jSDPieceCount, pAAPieceCount)
SC.sumPerIndex(hashEvalList, tempEvalList)
# if numOfTrials == 1: PG.printGraph(sampleList, 'Hash', numOfTrials)
print()
print(str(numOfTrials)+'_PrioritySampling')
sampleList = SA.sortedPrioritySam(sampleSize, windowList)
tempEvalList = AE.run(windowList, sampleList, jSDPieceCount, pAAPieceCount)
SC.sumPerIndex(priorityEvalList, tempEvalList)
# if numOfTrials == 1: PG.printGraph(sampleList, 'Priority', numOfTrials)
print()
numOfTrials = numOfTrials + 1
count = 0
windowList = []
count = count + 1
for i in range(accuracyMeasureCount):
reservoirEvalList[i] = reservoirEvalList[i] / numOfTrials
hashEvalList[i] = hashEvalList[i] / numOfTrials
priorityEvalList[i] = priorityEvalList[i] / numOfTrials
evalDic['RESERVOIR_SAMPLING'] = calculateScore(reservoirEvalList)
evalDic['HASH_SAMPLING'] = calculateScore(hashEvalList)
evalDic['PRIORITY_SAMPLING'] = calculateScore(priorityEvalList)
sortedEvalList = sorted(evalDic.items(), key = operator.itemgetter(1))
return sortedEvalList[0][0]<|fim▁end|> | from SamplingAccuracyEvaluation import SamplingAlgorithm as SA
from SamplingAccuracyEvaluation import AccuracyEvaluation as AE
from SamplingAccuracyEvaluation import PrintGraph as PG
from SamplingAccuracyEvaluation import StatisticalCalculation as SC |
<|file_name|>content_type_collection.py<|end_file_name|><|fim▁begin|>from office365.runtime.client_object_collection import ClientObjectCollection
from office365.sharepoint.content_type import ContentType
<|fim▁hole|> super(ContentTypeCollection, self).__init__(context, ContentType, resource_path)<|fim▁end|> | class ContentTypeCollection(ClientObjectCollection):
"""Content Type resource collection"""
def __init__(self, context, resource_path=None): |
<|file_name|>gen_generic_lpu.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Create generic LPU and simple pulse input signal.
"""
from itertools import product
import sys
import numpy as np
import h5py
import networkx as nx
def create_lpu_graph(lpu_name, N_sensory, N_local, N_proj):
"""
Create a generic LPU graph.
Creates a graph containing the neuron and synapse parameters for an LPU
containing the specified number of local and projection neurons. The graph
also contains the parameters for a set of sensory neurons that accept
external input. All neurons are either spiking or graded potential neurons;
the Leaky Integrate-and-Fire model is used for the former, while the
Morris-Lecar model is used for the latter (i.e., the neuron's membrane
potential is deemed to be its output rather than the time when it emits an
action potential). Synapses use either the alpha function model or a
conductance-based model.
Parameters
----------
lpu_name : str
Name of LPU. Used in port identifiers.
N_sensory : int
Number of sensory neurons.
N_local : int
Number of local neurons.
N_proj : int
Number of project neurons.
Returns
-------
g : networkx.MultiDiGraph
Generated graph.
"""
# Set numbers of neurons:
neu_type = ('sensory', 'local', 'proj')
neu_num = (N_sensory, N_local, N_proj)
# Neuron ids are between 0 and the total number of neurons:
G = nx.MultiDiGraph()
in_port_idx = 0
spk_out_id = 0
gpot_out_id = 0
for (t, n) in zip(neu_type, neu_num):
for i in range(n):
id = t+"_"+str(i)
name = t+"_"+str(i)
# Half of the sensory neurons and projection neurons are
# spiking neurons. The other half are graded potential neurons.
# All local neurons are graded potential only.
if t != 'local' and np.random.rand() < 0.5:
G.add_node(id,
{'class': 'LeakyIAF',
'name': name+'_s',
'initV': np.random.uniform(-60.0,-25.0),
'reset_potential': -67.5489770451,
'resting_potential': 0.0,
'threshold': -25.1355161007,
'resistance': 1002.445570216,
'capacitance': 0.0669810502993,
'circuit': 'proj' if t == 'proj' else 'local'
})
# Projection neurons are all assumed to be attached to output
# ports (which are represented as separate nodes):
if t == 'proj':
G.add_node(id+'_port',
{'class': 'Port',
'name': name+'port',
'port_type': 'spike',
'port_io': 'out',
'selector': '/%s/out/spk/%s' % (lpu_name, str(spk_out_id))
})
G.add_edge(id, id+'_port')
spk_out_id += 1
else:
# An input port node is created for and attached to each non-projection
# neuron with a synapse; this assumes that data propagates from one LPU to
# another as follows:
# LPU0[projection neuron] -> LPU0[output port] -> LPU1[input port] ->
# LPU1[synapse] -> LPU1[non-projection neuron]
G.add_node('in_port'+str(in_port_idx),
{'class': 'Port',
'name': 'in_port'+str(in_port_idx),
'port_type': 'spike',
'port_io': 'in',
'selector': '/%s/in/spk/%s' % (lpu_name, in_port_idx)
})
G.add_node('synapse_'+'in_port'+str(in_port_idx)+'_to_'+id,
{'class': 'AlphaSynapse',
'name': 'in_port'+str(in_port_idx)+'-'+name,
'ad': 0.19*1000,
'ar': 1.1*100,
'gmax': 0.003*1e-3,
'reverse': 65.0,
'circuit': 'local'
})
G.add_edge('in_port'+str(in_port_idx),
'synapse_'+'in_port'+str(in_port_idx)+'_to_'+id)
G.add_edge('synapse_'+'in_port'+str(in_port_idx)+'_to_'+id,
id)
in_port_idx += 1
else:
G.add_node(id,
{'class': "MorrisLecar",
'name': name+'_g',
'V1': 30.,
'V2': 15.,
'V3': 0.,
'V4': 30.,
'phi': 0.025,
'offset': 0.,
'V_L': -50.,
'V_Ca': 100.0,
'V_K': -70.0,
'g_Ca': 1.1,
'g_K': 2.0,
'g_L': 0.5,
'initV': -52.14,
'initn': 0.02,
'circuit': 'proj' if t == 'proj' else 'local'
})
# Projection neurons are all assumed to be attached to output
# ports (which are not represented as separate nodes):
if t == 'proj':
G.add_node(id+'_port',
{'class': 'Port',
'name': name+'port',
'port_type': 'gpot',
'port_io': 'out',
'selector': '/%s/out/gpot/%s' % (lpu_name, str(gpot_out_id))
})
G.add_edge(id, id+'_port')
gpot_out_id += 1
else:
G.add_node('in_port'+str(in_port_idx),
{'class': 'Port',
'name': 'in_port'+str(in_port_idx),
'port_type': 'gpot',
'port_io': 'in',
'selector': '/%s/in/gpot/%s' % (lpu_name, in_port_idx)
})
G.add_node('synapse_'+'in_port'+str(in_port_idx)+'_to_'+id,
{'class': 'PowerGPotGPot',
'name': 'in_port'+str(in_port_idx)+'-'+name,
'reverse': -80.0,
'saturation': 0.03*1e-3,
'slope': 0.8*1e-6,
'power': 1.0,
'threshold': -50.0,
'circuit': 'local'
})
G.add_edge('in_port'+str(in_port_idx),
'synapse_'+'in_port'+str(in_port_idx)+'_to_'+id,
delay = 0.001)
G.add_edge('synapse_'+'in_port'+str(in_port_idx)+'_to_'+id,
id)
in_port_idx += 1
# Assume a probability of synapse existence for each group of synapses:
# sensory -> local, sensory -> projection, local -> projection,
# projection -> local:
for r, (i, j) in zip((0.5, 0.1, 0.1, 0.3),
((0, 1), (0, 2), (1, 2), (2, 1))):
for src, tar in product(range(neu_num[i]), range(neu_num[j])):
# Don't connect all neurons:
if np.random.rand() > r: continue
# Connections from the sensory neurons use the alpha function model;
# all other connections use the power_gpot_gpot model:
pre_id = neu_type[i]+"_"+str(src)
post_id = neu_type[j]+"_"+str(tar)
name = G.node[pre_id]['name'] + '-' + G.node[post_id]['name']
synapse_id = 'synapse_' + name
if G.node[pre_id]['class'] is 'LeakyIAF':
G.add_node(synapse_id,
{'class' : 'AlphaSynapse',
'name' : name,
'ar' : 1.1*1e2,
'ad' : 1.9*1e3,
'reverse' : 65.0 if G.node[post_id]['class'] is 'LeakyIAF' else 10.0,
'gmax' : 3*1e-6 if G.node[post_id]['class'] is 'LeakyIAF' else 3.1e-7,
'circuit' : 'local'})
G.add_edge(pre_id, synapse_id)
G.add_edge(synapse_id, post_id)
else:
G.add_node(synapse_id,
{'class' : 'PowerGPotGPot',
'name' : name,
'slope' : 0.8*1e-6,
'threshold' : -50.0,
'power' : 1.0,
'saturation' : 0.03*1e-3,
'reverse' : -100.0,
'circuit' : 'local'})
G.add_edge(pre_id, synapse_id, delay = 0.001)
G.add_edge(synapse_id, post_id)
return G
def create_lpu(file_name, lpu_name, N_sensory, N_local, N_proj):
"""
Create a generic LPU graph.
Creates a GEXF file containing the neuron and synapse parameters for an LPU
containing the specified number of local and projection neurons. The GEXF
file also contains the parameters for a set of sensory neurons that accept
external input. All neurons are either spiking or graded potential neurons;
the Leaky Integrate-and-Fire model is used for the former, while the
Morris-Lecar model is used for the latter (i.e., the neuron's membrane
potential is deemed to be its output rather than the time when it emits an
action potential). Synapses use either the alpha function model or a
conductance-based model.
Parameters
----------
file_name : str
Output GEXF file name.
lpu_name : str
Name of LPU. Used in port identifiers.
N_sensory : int
Number of sensory neurons.
N_local : int
Number of local neurons.
N_proj : int
Number of project neurons.
Returns
-------
g : networkx.MultiDiGraph
Generated graph.
"""
g = create_lpu_graph(lpu_name, N_sensory, N_local, N_proj)
nx.write_gexf(g, file_name)
def create_input(file_name, N_sensory, dt=1e-4, dur=1.0, start=0.3, stop=0.6, I_max=0.6):
"""
Create input stimulus for sensory neurons in artificial LPU.
Creates an HDF5 file containing input signals for the specified number of
neurons. The signals consist of a rectangular pulse of specified duration
and magnitude.
Parameters
----------
file_name : str
Name of output HDF5 file.
g: networkx.MultiDiGraph
NetworkX graph object representing the LPU<|fim▁hole|> Time resolution of generated signal.
dur : float
Duration of generated signal.
start : float
Start time of signal pulse.
stop : float
Stop time of signal pulse.
I_max : float
Pulse magnitude.
"""
Nt = int(dur/dt)
t = np.arange(0, dt*Nt, dt)
uids = ["sensory_"+str(i) for i in range(N_sensory)]
uids = np.array(uids)
I = np.zeros((Nt, N_sensory), dtype=np.float64)
I[np.logical_and(t>start, t<stop)] = I_max
with h5py.File(file_name, 'w') as f:
f.create_dataset('I/uids', data=uids)
f.create_dataset('I/data', (Nt, N_sensory),
dtype=np.float64,
data=I)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('lpu_file_name', nargs='?', default='generic_lpu.gexf.gz',
help='LPU file name')
parser.add_argument('in_file_name', nargs='?', default='generic_input.h5',
help='Input file name')
parser.add_argument('-s', type=int,
help='Seed random number generator')
parser.add_argument('-l', '--lpu', type=str, default='gen',
help='LPU name')
args = parser.parse_args()
if args.s is not None:
np.random.seed(args.s)
dt = 1e-4
dur = 1.0
start = 0.3
stop = 0.6
I_max = 0.6
neu_num = [np.random.randint(31, 40) for i in xrange(3)]
create_lpu(args.lpu_file_name, args.lpu, *neu_num)
g = nx.read_gexf(args.lpu_file_name)
create_input(args.in_file_name, neu_num[0], dt, dur, start, stop, I_max)
create_lpu(args.lpu_file_name, args.lpu, *neu_num)<|fim▁end|> | dt : float |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![feature(auto_traits)]
#![deny(warnings)]
use crate::sync::SyncResult;
/// Mononoke Cross Repo sync job
///
/// This is a special job used to tail "small" Mononoke repo into "large" Mononoke repo when
/// small repo is a source of truth (i.e. "hg push" go directly to small repo).
/// At the moment there two main limitations:
/// 1) Syncing of some merge commits is not supported
/// 2) Root commits and their descendants that are not merged into a main line
/// aren't going to be synced. For example,
///
/// ```text
/// O <- main bookmark
/// |
/// O
/// | A <- new_bookmark, that added a new root commit
/// O |
/// ...
///
/// Commit A, its ancestors and new_bookmark aren't going to be synced to the large repo.
/// However if commit A gets merged into a mainline e.g.
/// O <- main bookmark
/// | \
/// O \
/// | A <- new_bookmark, that added a new root commit
/// O |
/// ...
///
/// Then commit A and all of its ancestors WILL be synced to the large repo, however
/// new_bookmark still WILL NOT be synced to the large repo.
///
/// This job does tailing by following bookmark update log of the small repo and replaying
/// each commit into the large repo. Note that some bookmarks called "common_pushrebase_bookmarks"
/// are treated specially, see comments in the code for more details
/// ```
use anyhow::{format_err, Error, Result};
use backsyncer::format_counter as format_backsyncer_counter;
use blobrepo::BlobRepo;
use bookmarks::{BookmarkName, Freshness};
use cached_config::ConfigStore;
use clap_old::ArgMatches;
use cmdlib::{
args::{self, MononokeClapApp, MononokeMatches},
helpers, monitoring,
};
use cmdlib_x_repo::create_commit_syncer_from_matches;
use context::CoreContext;
use cross_repo_sync::{
types::{Source, Target},
CommitSyncer,
};
use derived_data_utils::derive_data_for_csids;
use fbinit::FacebookInit;
use futures::{
compat::Future01CompatExt,
future::{self, try_join},
stream::{self, TryStreamExt},
StreamExt,
};
use futures_stats::TimedFutureExt;
use live_commit_sync_config::{CfgrLiveCommitSyncConfig, LiveCommitSyncConfig};
use mononoke_api_types::InnerRepo;
use mononoke_hg_sync_job_helper_lib::wait_for_latest_log_id_to_be_synced;
use mononoke_types::{ChangesetId, RepositoryId};
use mutable_counters::{MutableCounters, SqlMutableCounters};
use regex::Regex;
use scuba_ext::MononokeScubaSampleBuilder;
use skiplist::SkiplistIndex;
use slog::{debug, error, info, warn};
use std::{collections::HashSet, sync::Arc, time::Duration};
use synced_commit_mapping::SyncedCommitMapping;
mod cli;
mod reporting;
mod setup;
mod sync;
use crate::cli::{
create_app, ARG_BACKSYNC_BACKPRESSURE_REPOS_IDS, ARG_BOOKMARK_REGEX, ARG_CATCH_UP_ONCE,
ARG_DERIVED_DATA_TYPES, ARG_HG_SYNC_BACKPRESSURE, ARG_ONCE, ARG_TAIL, ARG_TARGET_BOOKMARK,
};
use crate::reporting::{add_common_fields, log_bookmark_update_result, log_noop_iteration};
use crate::setup::{get_scuba_sample, get_sleep_secs, get_starting_commit};
use crate::sync::{sync_commit_and_ancestors, sync_single_bookmark_update_log};
fn print_error(ctx: CoreContext, error: &Error) {
error!(ctx.logger(), "{}", error);
for cause in error.chain().skip(1) {
error!(ctx.logger(), "caused by: {}", cause);
}
}
async fn run_in_single_commit_mode<M: SyncedCommitMapping + Clone + 'static>(
ctx: &CoreContext,
bcs: ChangesetId,
commit_syncer: CommitSyncer<M>,
scuba_sample: MononokeScubaSampleBuilder,
source_skiplist_index: Source<Arc<SkiplistIndex>>,
target_skiplist_index: Target<Arc<SkiplistIndex>>,
maybe_bookmark: Option<BookmarkName>,
common_bookmarks: HashSet<BookmarkName>,
) -> Result<(), Error> {
info!(
ctx.logger(),
"Checking if {} is already synced {}->{}",
bcs,
commit_syncer.repos.get_source_repo().get_repoid(),
commit_syncer.repos.get_target_repo().get_repoid()
);
if commit_syncer
.commit_sync_outcome_exists(ctx, Source(bcs))
.await?
{
info!(ctx.logger(), "{} is already synced", bcs);
return Ok(());
}
let res = sync_commit_and_ancestors(
ctx,
&commit_syncer,
None, // from_cs_id,
bcs,
maybe_bookmark,
&source_skiplist_index,
&target_skiplist_index,
&common_bookmarks,
scuba_sample,
)
.await;
if res.is_ok() {
info!(ctx.logger(), "successful sync");
}
res.map(|_| ())
}
enum TailingArgs<M> {
CatchUpOnce(CommitSyncer<M>),
LoopForever(CommitSyncer<M>, ConfigStore),
}
async fn run_in_tailing_mode<
M: SyncedCommitMapping + Clone + 'static,
C: MutableCounters + Clone + Sync + 'static,
>(
ctx: &CoreContext,
mutable_counters: C,
source_skiplist_index: Source<Arc<SkiplistIndex>>,
target_skiplist_index: Target<Arc<SkiplistIndex>>,
common_pushrebase_bookmarks: HashSet<BookmarkName>,
base_scuba_sample: MononokeScubaSampleBuilder,
backpressure_params: BackpressureParams,
derived_data_types: Vec<String>,
tailing_args: TailingArgs<M>,
sleep_secs: u64,
maybe_bookmark_regex: Option<Regex>,
) -> Result<(), Error> {
match tailing_args {
TailingArgs::CatchUpOnce(commit_syncer) => {
let scuba_sample = MononokeScubaSampleBuilder::with_discard();
tail(
&ctx,
&commit_syncer,
&mutable_counters,
scuba_sample,
&common_pushrebase_bookmarks,
&source_skiplist_index,
&target_skiplist_index,
&backpressure_params,
&derived_data_types,
sleep_secs,
&maybe_bookmark_regex,
)
.await?;
}
TailingArgs::LoopForever(commit_syncer, config_store) => {
let live_commit_sync_config =
Arc::new(CfgrLiveCommitSyncConfig::new(ctx.logger(), &config_store)?);
let source_repo_id = commit_syncer.get_source_repo().get_repoid();
loop {
let scuba_sample = base_scuba_sample.clone();
// We only care about public pushes because draft pushes are not in the bookmark
// update log at all.
let enabled =
live_commit_sync_config.push_redirector_enabled_for_public(source_repo_id);
// Pushredirection is enabled - we need to disable forward sync in that case
if enabled {
log_noop_iteration(scuba_sample);
tokio::time::sleep(Duration::new(sleep_secs, 0)).await;
continue;
}
let synced_something = tail(
&ctx,
&commit_syncer,
&mutable_counters,
scuba_sample.clone(),
&common_pushrebase_bookmarks,
&source_skiplist_index,
&target_skiplist_index,
&backpressure_params,
&derived_data_types,
sleep_secs,
&maybe_bookmark_regex,
)
.await?;
if !synced_something {
log_noop_iteration(scuba_sample);
tokio::time::sleep(Duration::new(sleep_secs, 0)).await;
}
}
}
}
Ok(())
}
async fn tail<
M: SyncedCommitMapping + Clone + 'static,
C: MutableCounters + Clone + Sync + 'static,
>(
ctx: &CoreContext,
commit_syncer: &CommitSyncer<M>,
mutable_counters: &C,
mut scuba_sample: MononokeScubaSampleBuilder,
common_pushrebase_bookmarks: &HashSet<BookmarkName>,
source_skiplist_index: &Source<Arc<SkiplistIndex>>,
target_skiplist_index: &Target<Arc<SkiplistIndex>>,
backpressure_params: &BackpressureParams,
derived_data_types: &[String],
sleep_secs: u64,
maybe_bookmark_regex: &Option<Regex>,
) -> Result<bool, Error> {
let source_repo = commit_syncer.get_source_repo();
let target_repo_id = commit_syncer.get_target_repo_id();
let bookmark_update_log = source_repo.bookmark_update_log();
let counter = format_counter(&commit_syncer);
let maybe_start_id = mutable_counters
.get_counter(ctx.clone(), target_repo_id, &counter)
.compat()
.await?;
let start_id = maybe_start_id.ok_or(format_err!("counter not found"))?;
let limit = 10;
let log_entries = bookmark_update_log
.read_next_bookmark_log_entries(ctx.clone(), start_id as u64, limit, Freshness::MaybeStale)
.try_collect::<Vec<_>>()
.await?;
let remaining_entries = commit_syncer
.get_source_repo()
.count_further_bookmark_log_entries(ctx.clone(), start_id as u64, None)
.await?;
if log_entries.is_empty() {
log_noop_iteration(scuba_sample.clone());
Ok(false)
} else {
scuba_sample.add("queue_size", remaining_entries);
info!(ctx.logger(), "queue size is {}", remaining_entries);
for entry in log_entries {
let entry_id = entry.id;
scuba_sample.add("entry_id", entry.id);
let mut skip = false;
if let Some(regex) = maybe_bookmark_regex {
if !regex.is_match(entry.bookmark_name.as_str()) {
skip = true;
}
}
if !skip {
let (stats, res) = sync_single_bookmark_update_log(
&ctx,
&commit_syncer,
entry,
source_skiplist_index,
target_skiplist_index,
&common_pushrebase_bookmarks,
scuba_sample.clone(),
)
.timed()
.await;
log_bookmark_update_result(&ctx, entry_id, scuba_sample.clone(), &res, stats);
let maybe_synced_css = res?;
if let SyncResult::Synced(synced_css) = maybe_synced_css {
derive_data_for_csids(
&ctx,
&commit_syncer.get_target_repo(),
synced_css,
derived_data_types,
)?
.await?;
maybe_apply_backpressure(
ctx,
mutable_counters,
backpressure_params,
commit_syncer.get_target_repo(),
scuba_sample.clone(),
sleep_secs,
)
.await?;
}
} else {
info!(
ctx.logger(),
"skipping log entry #{} for {}", entry.id, entry.bookmark_name
);
let mut scuba_sample = scuba_sample.clone();
scuba_sample.add("source_bookmark_name", format!("{}", entry.bookmark_name));
scuba_sample.add("skipped", true);
scuba_sample.log();
}
// Note that updating the counter might fail after successful sync of the commits.
// This is expected - next run will try to update the counter again without
// re-syncing the commits.
mutable_counters
.set_counter(ctx.clone(), target_repo_id, &counter, entry_id, None)
.compat()
.await?;
}
Ok(true)
}
}
async fn maybe_apply_backpressure<C>(
ctx: &CoreContext,
mutable_counters: &C,
backpressure_params: &BackpressureParams,
target_repo: &BlobRepo,
scuba_sample: MononokeScubaSampleBuilder,
sleep_secs: u64,
) -> Result<(), Error>
where
C: MutableCounters + Clone + Sync + 'static,
{
let target_repo_id = target_repo.get_repoid();
let limit = 10;
loop {
let max_further_entries = stream::iter(&backpressure_params.backsync_repos)
.map(Ok)
.map_ok(|repo| {
async move {
let repo_id = repo.get_repoid();
let backsyncer_counter = format_backsyncer_counter(&target_repo_id);
let maybe_counter = mutable_counters
.get_counter(ctx.clone(), repo_id, &backsyncer_counter)
.compat()
.await?;
match maybe_counter {
Some(counter) => {
let bookmark_update_log = repo.bookmark_update_log();
debug!(ctx.logger(), "repo {}, counter {}", repo_id, counter);
bookmark_update_log
.count_further_bookmark_log_entries(
ctx.clone(),
counter as u64,
None, // exclude_reason
)
.await
}
None => {
warn!(
ctx.logger(),
"backsyncer counter not found for repo {}!", repo_id,
);
Ok(0)
}
}
}
})
.try_buffer_unordered(100)
.try_fold(0, |acc, x| future::ready(Ok(::std::cmp::max(acc, x))))
.await?;
if max_further_entries > limit {
reporting::log_backpressure(ctx, max_further_entries, scuba_sample.clone());
tokio::time::sleep(Duration::from_secs(sleep_secs)).await;
} else {
break;
}
}
if backpressure_params.wait_for_target_repo_hg_sync {
wait_for_latest_log_id_to_be_synced(ctx, target_repo, mutable_counters, sleep_secs).await?;
}
Ok(())
}
fn format_counter<M: SyncedCommitMapping + Clone + 'static>(
commit_syncer: &CommitSyncer<M>,
) -> String {
let source_repo_id = commit_syncer.get_source_repo_id();
format!("xreposync_from_{}", source_repo_id)
}
async fn run<'a>(
fb: FacebookInit,
ctx: CoreContext,
matches: &'a MononokeMatches<'a>,
) -> Result<(), Error> {
let config_store = matches.config_store();
let mut scuba_sample = get_scuba_sample(ctx.clone(), &matches);
let counters = args::open_source_sql::<SqlMutableCounters>(fb, config_store, &matches)?;
let source_repo_id = args::get_source_repo_id(config_store, &matches)?;
let target_repo_id = args::get_target_repo_id(config_store, &matches)?;
let logger = ctx.logger();
let source_repo = args::open_repo_with_repo_id(fb, &logger, source_repo_id, &matches);
let target_repo = args::open_repo_with_repo_id(fb, &logger, target_repo_id, &matches);
let (source_repo, target_repo): (InnerRepo, InnerRepo) =
try_join(source_repo, target_repo).await?;
let commit_syncer = create_commit_syncer_from_matches(&ctx, &matches).await?;
let live_commit_sync_config = Arc::new(CfgrLiveCommitSyncConfig::new(&logger, &config_store)?);
let common_commit_sync_config =
live_commit_sync_config.get_common_config(source_repo.blob_repo.get_repoid())?;
let common_bookmarks: HashSet<_> = common_commit_sync_config
.common_pushrebase_bookmarks
.clone()
.into_iter()
.collect();
let source_skiplist_index = Source(source_repo.skiplist_index.clone());
let target_skiplist_index = Target(target_repo.skiplist_index.clone());
match matches.subcommand() {
(ARG_ONCE, Some(sub_m)) => {
add_common_fields(&mut scuba_sample, &commit_syncer);
let maybe_target_bookmark = sub_m
.value_of(ARG_TARGET_BOOKMARK)
.map(BookmarkName::new)
.transpose()?;
let bcs = get_starting_commit(&ctx, &sub_m, source_repo.blob_repo.clone()).await?;
run_in_single_commit_mode(
&ctx,
bcs,
commit_syncer,
scuba_sample,
source_skiplist_index,
target_skiplist_index,
maybe_target_bookmark,
common_bookmarks,
)
.await
}
(ARG_TAIL, Some(sub_m)) => {
add_common_fields(&mut scuba_sample, &commit_syncer);
let sleep_secs = get_sleep_secs(sub_m)?;
let tailing_args = if sub_m.is_present(ARG_CATCH_UP_ONCE) {
TailingArgs::CatchUpOnce(commit_syncer)
} else {
let config_store = matches.config_store();
TailingArgs::LoopForever(commit_syncer, config_store.clone())
};
let backpressure_params = BackpressureParams::new(&ctx, matches, sub_m).await?;
let derived_data_types: Vec<String> = match sub_m.values_of(ARG_DERIVED_DATA_TYPES) {
Some(derived_data_types) => derived_data_types
.into_iter()
.map(String::from)
.collect::<Vec<_>>(),
None => vec![],
};
let maybe_bookmark_regex = match sub_m.value_of(ARG_BOOKMARK_REGEX) {
Some(regex) => Some(Regex::new(regex)?),
None => None,
};
run_in_tailing_mode(
&ctx,
counters,
source_skiplist_index,
target_skiplist_index,
common_bookmarks,
scuba_sample,
backpressure_params,
derived_data_types,
tailing_args,
sleep_secs,
maybe_bookmark_regex,
)
.await
}
(incorrect, _) => Err(format_err!(
"Incorrect mode of operation specified: {}",
incorrect
)),
}
}
fn context_and_matches<'a>(
fb: FacebookInit,
app: MononokeClapApp<'a, '_>,
) -> Result<(CoreContext, MononokeMatches<'a>), Error> {
let matches = app.get_matches(fb)?;
let logger = matches.logger();
let ctx = CoreContext::new_with_logger(fb, logger.clone());
Ok((ctx, matches))
}
struct BackpressureParams {
backsync_repos: Vec<BlobRepo>,
wait_for_target_repo_hg_sync: bool,
}
impl BackpressureParams {
async fn new<'a>(
ctx: &CoreContext,
matches: &'a MononokeMatches<'a>,
sub_m: &'a ArgMatches<'a>,
) -> Result<Self, Error> {
let backsync_repos_ids = sub_m.values_of(ARG_BACKSYNC_BACKPRESSURE_REPOS_IDS);
let backsync_repos = match backsync_repos_ids {
Some(backsync_repos_ids) => {
let backsync_repos = stream::iter(backsync_repos_ids.into_iter().map(|repo_id| {
let repo_id = repo_id.parse::<i32>()?;
Ok(repo_id)
}))
.map_ok(|repo_id| {
args::open_repo_with_repo_id(
ctx.fb,
ctx.logger(),
RepositoryId::new(repo_id),
&matches,
)
})
.try_buffer_unordered(100)
.try_collect::<Vec<_>>();
backsync_repos.await?
}
None => vec![],
};
<|fim▁hole|> })
}
}
#[fbinit::main]
fn main(fb: FacebookInit) -> Result<()> {
let (ctx, matches) = context_and_matches(fb, create_app())?;
let res = helpers::block_execute(
run(fb, ctx.clone(), &matches),
fb,
"x_repo_sync_job",
ctx.logger(),
&matches,
monitoring::AliveService,
);
if let Err(ref err) = res {
print_error(ctx, err);
}
res
}<|fim▁end|> | let wait_for_target_repo_hg_sync = sub_m.is_present(ARG_HG_SYNC_BACKPRESSURE);
Ok(Self {
backsync_repos,
wait_for_target_repo_hg_sync, |
<|file_name|>cookie.js<|end_file_name|><|fim▁begin|>/*\
|*|
|*| :: cookies.js ::
|*|
|*| A complete cookies reader/writer framework with full unicode support.
|*|
|*| Revision #1 - September 4, 2014
|*|
|*| https://developer.mozilla.org/en-US/docs/Web/API/document.cookie
|*| https://developer.mozilla.org/User:fusionchess
|*|
|*| This framework is released under the GNU Public License, version 3 or later.
|*| http://www.gnu.org/licenses/gpl-3.0-standalone.html
|*|
|*| Syntaxes:<|fim▁hole|>|*| * docCookies.getItem(name)
|*| * docCookies.removeItem(name[, path[, domain]])
|*| * docCookies.hasItem(name)
|*| * docCookies.keys()
|*|
\*/
export default {
getItem: function (sKey) {
if (!sKey) { return null; }
return decodeURIComponent(document.cookie.replace(new RegExp("(?:(?:^|.*;)\\s*" + encodeURIComponent(sKey).replace(/[\-\.\+\*]/g, "\\$&") + "\\s*\\=\\s*([^;]*).*$)|^.*$"), "$1")) || null;
},
setItem: function (sKey, sValue, vEnd, sPath, sDomain, bSecure) {
if (!sKey || /^(?:expires|max\-age|path|domain|secure)$/i.test(sKey)) { return false; }
var sExpires = "";
if (vEnd) {
switch (vEnd.constructor) {
case Number:
sExpires = vEnd === Infinity ? "; expires=Fri, 31 Dec 9999 23:59:59 GMT" : "; max-age=" + vEnd;
break;
case String:
sExpires = "; expires=" + vEnd;
break;
case Date:
sExpires = "; expires=" + vEnd.toUTCString();
break;
}
}
document.cookie = encodeURIComponent(sKey) + "=" + encodeURIComponent(sValue) + sExpires + (sDomain ? "; domain=" + sDomain : "") + (sPath ? "; path=" + sPath : "") + (bSecure ? "; secure" : "");
return true;
},
removeItem: function (sKey, sPath, sDomain) {
if (!this.hasItem(sKey)) { return false; }
document.cookie = encodeURIComponent(sKey) + "=; expires=Thu, 01 Jan 1970 00:00:00 GMT" + (sDomain ? "; domain=" + sDomain : "") + (sPath ? "; path=" + sPath : "");
return true;
},
hasItem: function (sKey) {
if (!sKey) { return false; }
return (new RegExp("(?:^|;\\s*)" + encodeURIComponent(sKey).replace(/[\-\.\+\*]/g, "\\$&") + "\\s*\\=")).test(document.cookie);
},
keys: function () {
var aKeys = document.cookie.replace(/((?:^|\s*;)[^\=]+)(?=;|$)|^\s*|\s*(?:\=[^;]*)?(?:\1|$)/g, "").split(/\s*(?:\=[^;]*)?;\s*/);
for (var nLen = aKeys.length, nIdx = 0; nIdx < nLen; nIdx++) { aKeys[nIdx] = decodeURIComponent(aKeys[nIdx]); }
return aKeys;
}
};<|fim▁end|> | |*|
|*| * docCookies.setItem(name, value[, end[, path[, domain[, secure]]]]) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import models
import mozdns
from mozdns.domain.models import Domain
from mozdns.view.models import View
from mozdns.mixins import ObjectUrlMixin, DisplayMixin
from mozdns.validation import validate_first_label, validate_name
from mozdns.validation import validate_ttl
class LabelDomainMixin(models.Model):
"""
This class provides common functionality that many DNS record
classes share. This includes a foreign key to the ``domain`` table
and a ``label`` CharField.
If you plan on using the ``unique_together`` constraint on a Model
that inherits from ``LabelDomainMixin``, you must include ``domain`` and
``label`` explicitly if you need them to.
All common records have a ``fqdn`` field. This field is updated
every time the object is saved::
fqdn = name + domain.name
or if name == ''
fqdn = domain.name
This field makes searching for records much easier. Instead of
looking at ``obj.label`` together with ``obj.domain.name``, you can
just search the ``obj.fqdn`` field.
"the total number of octets that represent a name (i.e., the sum of
all label octets and label lengths) is limited to 255" - RFC 4471
"""
domain = models.ForeignKey(Domain, null=False, help_text="FQDN of the "
"domain after the short hostname. "
"(Ex: <i>Vlan</i>.<i>DC</i>.mozilla.com)")
# "The length of any one label is limited to between 1 and 63 octets."
# -- RFC218
label = models.CharField(max_length=63, blank=True, null=True,
validators=[validate_first_label],
help_text="Short name of the fqdn")
fqdn = models.CharField(max_length=255, blank=True, null=True,
validators=[validate_name], db_index=True)
class Meta:
abstract = True
class ViewMixin(models.Model):
def validate_views(instance, views):
for view in views:
instance.clean_views(views)
views = models.ManyToManyField(
View, blank=True, validators=[validate_views]
)
class Meta:
abstract = True
def clean_views(self, views):
"""cleaned_data is the data that is going to be called with for
updating an existing or creating a new object. Classes should implement
this function according to their specific needs.
"""
for view in views:
if hasattr(self, 'domain'):
self.check_no_ns_soa_condition(self.domain, view=view)
if hasattr(self, 'reverse_domain'):
self.check_no_ns_soa_condition(self.reverse_domain, view=view)
def check_no_ns_soa_condition(self, domain, view=None):
if domain.soa:
fail = False
root_domain = domain.soa.root_domain
if root_domain and not root_domain.nameserver_set.exists():
fail = True
elif (view and
not root_domain.nameserver_set.filter(views=view).exists()):
fail = True<|fim▁hole|> "not have an NS record, thus cannnot support other "
"records.")
class MozdnsRecord(ViewMixin, DisplayMixin, ObjectUrlMixin):
ttl = models.PositiveIntegerField(default=3600, blank=True, null=True,
validators=[validate_ttl],
help_text="Time to Live of this record")
description = models.CharField(max_length=1000, blank=True, null=True,
help_text="A description of this record.")
# fqdn = label + domain.name <--- see set_fqdn
def __str__(self):
self.set_fqdn()
return self.bind_render_record()
def __repr__(self):
return "<{0} '{1}'>".format(self.rdtype, str(self))
class Meta:
abstract = True
@classmethod
def get_api_fields(cls):
"""
The purpose of this is to help the API decide which fields to expose
to the user when they are creating and updateing an Object. This
function should be implemented in inheriting models and overriden to
provide additional fields. Tastypie ignores any relational fields on
the model. See the ModelResource definitions for view and domain
fields.
"""
return ['fqdn', 'ttl', 'description', 'views']
def clean(self):
# The Nameserver and subclasses of BaseAddressRecord do not call this
# function
self.set_fqdn()
self.check_TLD_condition()
self.check_no_ns_soa_condition(self.domain)
self.check_for_delegation()
if self.rdtype != 'CNAME':
self.check_for_cname()
def delete(self, *args, **kwargs):
if self.domain.soa:
self.domain.soa.schedule_rebuild()
from mozdns.utils import prune_tree
call_prune_tree = kwargs.pop('call_prune_tree', True)
objs_domain = self.domain
super(MozdnsRecord, self).delete(*args, **kwargs)
if call_prune_tree:
prune_tree(objs_domain)
def save(self, *args, **kwargs):
self.full_clean()
if self.pk:
# We need to get the domain from the db. If it's not our current
# domain, call prune_tree on the domain in the db later.
db_domain = self.__class__.objects.get(pk=self.pk).domain
if self.domain == db_domain:
db_domain = None
else:
db_domain = None
no_build = kwargs.pop("no_build", False)
super(MozdnsRecord, self).save(*args, **kwargs)
if no_build:
pass
else:
# Mark the soa
if self.domain.soa:
self.domain.soa.schedule_rebuild()
if db_domain:
from mozdns.utils import prune_tree
prune_tree(db_domain)
def set_fqdn(self):
try:
if self.label == '':
self.fqdn = self.domain.name
else:
self.fqdn = "{0}.{1}".format(self.label,
self.domain.name)
except ObjectDoesNotExist:
return
def check_for_cname(self):
"""
"If a CNAME RR is preent at a node, no other data should be
present; this ensures that the data for a canonical name and its
aliases cannot be different."
-- `RFC 1034 <http://tools.ietf.org/html/rfc1034>`_
Call this function in models that can't overlap with an existing
CNAME.
"""
CNAME = mozdns.cname.models.CNAME
if hasattr(self, 'label'):
if CNAME.objects.filter(domain=self.domain,
label=self.label).exists():
raise ValidationError("A CNAME with this name already exists.")
else:
if CNAME.objects.filter(label='', domain=self.domain).exists():
raise ValidationError("A CNAME with this name already exists.")
def check_for_delegation(self):
"""
If an object's domain is delegated it should not be able to
be changed. Delegated domains cannot have objects created in
them.
"""
try:
if not self.domain.delegated:
return
except ObjectDoesNotExist:
return
if not self.pk: # We don't exist yet.
raise ValidationError("No objects can be created in the {0}"
"domain. It is delegated."
.format(self.domain.name))
def check_TLD_condition(self):
domain = Domain.objects.filter(name=self.fqdn)
if not domain:
return
if self.label == '' and domain[0] == self.domain:
return # This is allowed
else:
raise ValidationError("You cannot create an record that points "
"to the top level of another domain.")<|fim▁end|> | if fail:
raise ValidationError(
"The zone you are trying to assign this record into does " |
<|file_name|>UserServiceImpl.java<|end_file_name|><|fim▁begin|>package org.currconv.services.impl;
import java.util.List;
import org.currconv.dao.UserDao;
import org.currconv.entities.user.User;
import org.currconv.services.UserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service("userService")
@Transactional
public class UserServiceImpl implements UserService {
@Autowired
private UserDao dao;
public void saveUser(User user) {
dao.saveUser(user);
}
public List<User> findAllUsers(){
return dao.findAllUsers();
}
<|fim▁hole|>}<|fim▁end|> | public User findByUserName(String name){
return dao.findByUserName(name);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.