text
stringlengths
2
99.9k
meta
dict
class B(a: A)
{ "pile_set_name": "Github" }
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build ignore /* Input to cgo -godefs. See README.md */ // +godefs map struct_in_addr [4]byte /* in_addr */ // +godefs map struct_in6_addr [16]byte /* in6_addr */ package unix /* #define _WANT_FREEBSD11_STAT 1 #define _WANT_FREEBSD11_STATFS 1 #define _WANT_FREEBSD11_DIRENT 1 #define _WANT_FREEBSD11_KEVENT 1 #include <dirent.h> #include <fcntl.h> #include <poll.h> #include <signal.h> #include <termios.h> #include <stdio.h> #include <unistd.h> #include <sys/capsicum.h> #include <sys/event.h> #include <sys/mman.h> #include <sys/mount.h> #include <sys/param.h> #include <sys/ptrace.h> #include <sys/resource.h> #include <sys/select.h> #include <sys/signal.h> #include <sys/socket.h> #include <sys/stat.h> #include <sys/time.h> #include <sys/types.h> #include <sys/un.h> #include <sys/utsname.h> #include <sys/wait.h> #include <net/bpf.h> #include <net/if.h> #include <net/if_dl.h> #include <net/route.h> #include <netinet/in.h> #include <netinet/icmp6.h> #include <netinet/tcp.h> enum { sizeofPtr = sizeof(void*), }; union sockaddr_all { struct sockaddr s1; // this one gets used for fields struct sockaddr_in s2; // these pad it out struct sockaddr_in6 s3; struct sockaddr_un s4; struct sockaddr_dl s5; }; struct sockaddr_any { struct sockaddr addr; char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)]; }; // This structure is a duplicate of if_data on FreeBSD 8-STABLE. // See /usr/include/net/if.h. struct if_data8 { u_char ifi_type; u_char ifi_physical; u_char ifi_addrlen; u_char ifi_hdrlen; u_char ifi_link_state; u_char ifi_spare_char1; u_char ifi_spare_char2; u_char ifi_datalen; u_long ifi_mtu; u_long ifi_metric; u_long ifi_baudrate; u_long ifi_ipackets; u_long ifi_ierrors; u_long ifi_opackets; u_long ifi_oerrors; u_long ifi_collisions; u_long ifi_ibytes; u_long ifi_obytes; u_long ifi_imcasts; u_long ifi_omcasts; u_long ifi_iqdrops; u_long ifi_noproto; u_long ifi_hwassist; // FIXME: these are now unions, so maybe need to change definitions? #undef ifi_epoch time_t ifi_epoch; #undef ifi_lastchange struct timeval ifi_lastchange; }; // This structure is a duplicate of if_msghdr on FreeBSD 8-STABLE. // See /usr/include/net/if.h. struct if_msghdr8 { u_short ifm_msglen; u_char ifm_version; u_char ifm_type; int ifm_addrs; int ifm_flags; u_short ifm_index; struct if_data8 ifm_data; }; */ import "C" // Machine characteristics const ( SizeofPtr = C.sizeofPtr SizeofShort = C.sizeof_short SizeofInt = C.sizeof_int SizeofLong = C.sizeof_long SizeofLongLong = C.sizeof_longlong ) // Basic types type ( _C_short C.short _C_int C.int _C_long C.long _C_long_long C.longlong ) // Time type Timespec C.struct_timespec type Timeval C.struct_timeval // Processes type Rusage C.struct_rusage type Rlimit C.struct_rlimit type _Gid_t C.gid_t // Files const ( _statfsVersion = C.STATFS_VERSION _dirblksiz = C.DIRBLKSIZ ) type Stat_t C.struct_stat type stat_freebsd11_t C.struct_freebsd11_stat type Statfs_t C.struct_statfs type statfs_freebsd11_t C.struct_freebsd11_statfs type Flock_t C.struct_flock type Dirent C.struct_dirent type dirent_freebsd11 C.struct_freebsd11_dirent type Fsid C.struct_fsid // File system limits const ( PathMax = C.PATH_MAX ) // Advice to Fadvise const ( FADV_NORMAL = C.POSIX_FADV_NORMAL FADV_RANDOM = C.POSIX_FADV_RANDOM FADV_SEQUENTIAL = C.POSIX_FADV_SEQUENTIAL FADV_WILLNEED = C.POSIX_FADV_WILLNEED FADV_DONTNEED = C.POSIX_FADV_DONTNEED FADV_NOREUSE = C.POSIX_FADV_NOREUSE ) // Sockets type RawSockaddrInet4 C.struct_sockaddr_in type RawSockaddrInet6 C.struct_sockaddr_in6 type RawSockaddrUnix C.struct_sockaddr_un type RawSockaddrDatalink C.struct_sockaddr_dl type RawSockaddr C.struct_sockaddr type RawSockaddrAny C.struct_sockaddr_any type _Socklen C.socklen_t type Linger C.struct_linger type Iovec C.struct_iovec type IPMreq C.struct_ip_mreq type IPMreqn C.struct_ip_mreqn type IPv6Mreq C.struct_ipv6_mreq type Msghdr C.struct_msghdr type Cmsghdr C.struct_cmsghdr type Inet6Pktinfo C.struct_in6_pktinfo type IPv6MTUInfo C.struct_ip6_mtuinfo type ICMPv6Filter C.struct_icmp6_filter const ( SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6 SizeofSockaddrAny = C.sizeof_struct_sockaddr_any SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl SizeofLinger = C.sizeof_struct_linger SizeofIPMreq = C.sizeof_struct_ip_mreq SizeofIPMreqn = C.sizeof_struct_ip_mreqn SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq SizeofMsghdr = C.sizeof_struct_msghdr SizeofCmsghdr = C.sizeof_struct_cmsghdr SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter ) // Ptrace requests const ( PTRACE_TRACEME = C.PT_TRACE_ME PTRACE_CONT = C.PT_CONTINUE PTRACE_KILL = C.PT_KILL ) // Events (kqueue, kevent) type Kevent_t C.struct_kevent_freebsd11 // Select type FdSet C.fd_set // Routing and interface messages const ( sizeofIfMsghdr = C.sizeof_struct_if_msghdr SizeofIfMsghdr = C.sizeof_struct_if_msghdr8 sizeofIfData = C.sizeof_struct_if_data SizeofIfData = C.sizeof_struct_if_data8 SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr SizeofRtMsghdr = C.sizeof_struct_rt_msghdr SizeofRtMetrics = C.sizeof_struct_rt_metrics ) type ifMsghdr C.struct_if_msghdr type IfMsghdr C.struct_if_msghdr8 type ifData C.struct_if_data type IfData C.struct_if_data8 type IfaMsghdr C.struct_ifa_msghdr type IfmaMsghdr C.struct_ifma_msghdr type IfAnnounceMsghdr C.struct_if_announcemsghdr type RtMsghdr C.struct_rt_msghdr type RtMetrics C.struct_rt_metrics // Berkeley packet filter const ( SizeofBpfVersion = C.sizeof_struct_bpf_version SizeofBpfStat = C.sizeof_struct_bpf_stat SizeofBpfZbuf = C.sizeof_struct_bpf_zbuf SizeofBpfProgram = C.sizeof_struct_bpf_program SizeofBpfInsn = C.sizeof_struct_bpf_insn SizeofBpfHdr = C.sizeof_struct_bpf_hdr SizeofBpfZbufHeader = C.sizeof_struct_bpf_zbuf_header ) type BpfVersion C.struct_bpf_version type BpfStat C.struct_bpf_stat type BpfZbuf C.struct_bpf_zbuf type BpfProgram C.struct_bpf_program type BpfInsn C.struct_bpf_insn type BpfHdr C.struct_bpf_hdr type BpfZbufHeader C.struct_bpf_zbuf_header // Terminal handling type Termios C.struct_termios type Winsize C.struct_winsize // fchmodat-like syscalls. const ( AT_FDCWD = C.AT_FDCWD AT_REMOVEDIR = C.AT_REMOVEDIR AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW ) // poll type PollFd C.struct_pollfd const ( POLLERR = C.POLLERR POLLHUP = C.POLLHUP POLLIN = C.POLLIN POLLINIGNEOF = C.POLLINIGNEOF POLLNVAL = C.POLLNVAL POLLOUT = C.POLLOUT POLLPRI = C.POLLPRI POLLRDBAND = C.POLLRDBAND POLLRDNORM = C.POLLRDNORM POLLWRBAND = C.POLLWRBAND POLLWRNORM = C.POLLWRNORM ) // Capabilities type CapRights C.struct_cap_rights // Uname type Utsname C.struct_utsname
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Runtime.InteropServices.WindowsRuntime; using Windows.ApplicationModel; using Windows.ApplicationModel.Activation; using Windows.Foundation; using Windows.Foundation.Collections; using Windows.UI.Xaml; using Windows.UI.Xaml.Controls; using Windows.UI.Xaml.Controls.Primitives; using Windows.UI.Xaml.Data; using Windows.UI.Xaml.Input; using Windows.UI.Xaml.Media; using Windows.UI.Xaml.Navigation; namespace IoTConnector { /// <summary> /// Provides application-specific behavior to supplement the default Application class. /// </summary> sealed partial class App : Application { /// <summary> /// Initializes the singleton application object. This is the first line of authored code /// executed, and as such is the logical equivalent of main() or WinMain(). /// </summary> public App() { this.InitializeComponent(); this.Suspending += OnSuspending; } /// <summary> /// Invoked when the application is launched normally by the end user. Other entry points /// will be used such as when the application is launched to open a specific file. /// </summary> /// <param name="e">Details about the launch request and process.</param> protected override void OnLaunched(LaunchActivatedEventArgs e) { #if DEBUG if (System.Diagnostics.Debugger.IsAttached) { this.DebugSettings.EnableFrameRateCounter = true; } #endif Frame rootFrame = Window.Current.Content as Frame; // Do not repeat app initialization when the Window already has content, // just ensure that the window is active if (rootFrame == null) { // Create a Frame to act as the navigation context and navigate to the first page rootFrame = new Frame(); rootFrame.NavigationFailed += OnNavigationFailed; if (e.PreviousExecutionState == ApplicationExecutionState.Terminated) { //TODO: Load state from previously suspended application } // Place the frame in the current Window Window.Current.Content = rootFrame; } if (e.PrelaunchActivated == false) { if (rootFrame.Content == null) { // When the navigation stack isn't restored navigate to the first page, // configuring the new page by passing required information as a navigation // parameter rootFrame.Navigate(typeof(MainPage), e.Arguments); } // Ensure the current window is active Window.Current.Activate(); } } /// <summary> /// Invoked when Navigation to a certain page fails /// </summary> /// <param name="sender">The Frame which failed navigation</param> /// <param name="e">Details about the navigation failure</param> void OnNavigationFailed(object sender, NavigationFailedEventArgs e) { throw new Exception("Failed to load Page " + e.SourcePageType.FullName); } /// <summary> /// Invoked when application execution is being suspended. Application state is saved /// without knowing whether the application will be terminated or resumed with the contents /// of memory still intact. /// </summary> /// <param name="sender">The source of the suspend request.</param> /// <param name="e">Details about the suspend request.</param> private void OnSuspending(object sender, SuspendingEventArgs e) { var deferral = e.SuspendingOperation.GetDeferral(); //TODO: Save application state and stop any background activity deferral.Complete(); } } }
{ "pile_set_name": "Github" }
/* * General DV muxer/demuxer * Copyright (c) 2003 Roman Shaposhnik * * Many thanks to Dan Dennedy <[email protected]> for providing wealth * of DV technical info. * * Raw DV format * Copyright (c) 2002 Fabrice Bellard * * 50 Mbps (DVCPRO50) support * Copyright (c) 2006 Daniel Maas <[email protected]> * * This file is part of FFmpeg. * * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #include <time.h> #include <stdarg.h> #include "avformat.h" #include "internal.h" #include "libavcodec/dv_profile.h" #include "libavcodec/dv.h" #include "dv.h" #include "libavutil/avassert.h" #include "libavutil/fifo.h" #include "libavutil/mathematics.h" #include "libavutil/intreadwrite.h" #include "libavutil/opt.h" #include "libavutil/timecode.h" #define MAX_AUDIO_FRAME_SIZE 192000 // 1 second of 48khz 32-bit audio struct DVMuxContext { AVClass *av_class; const AVDVProfile* sys; /* current DV profile, e.g.: 525/60, 625/50 */ int n_ast; /* number of stereo audio streams (up to 2) */ AVStream *ast[2]; /* stereo audio streams */ AVFifoBuffer *audio_data[2]; /* FIFO for storing excessive amounts of PCM */ int frames; /* current frame number */ int64_t start_time; /* recording start time */ int has_audio; /* frame under construction has audio */ int has_video; /* frame under construction has video */ uint8_t frame_buf[DV_MAX_FRAME_SIZE]; /* frame under construction */ AVTimecode tc; /* timecode context */ }; static const int dv_aaux_packs_dist[12][9] = { { 0xff, 0xff, 0xff, 0x50, 0x51, 0x52, 0x53, 0xff, 0xff }, { 0x50, 0x51, 0x52, 0x53, 0xff, 0xff, 0xff, 0xff, 0xff }, { 0xff, 0xff, 0xff, 0x50, 0x51, 0x52, 0x53, 0xff, 0xff }, { 0x50, 0x51, 0x52, 0x53, 0xff, 0xff, 0xff, 0xff, 0xff }, { 0xff, 0xff, 0xff, 0x50, 0x51, 0x52, 0x53, 0xff, 0xff }, { 0x50, 0x51, 0x52, 0x53, 0xff, 0xff, 0xff, 0xff, 0xff }, { 0xff, 0xff, 0xff, 0x50, 0x51, 0x52, 0x53, 0xff, 0xff }, { 0x50, 0x51, 0x52, 0x53, 0xff, 0xff, 0xff, 0xff, 0xff }, { 0xff, 0xff, 0xff, 0x50, 0x51, 0x52, 0x53, 0xff, 0xff }, { 0x50, 0x51, 0x52, 0x53, 0xff, 0xff, 0xff, 0xff, 0xff }, { 0xff, 0xff, 0xff, 0x50, 0x51, 0x52, 0x53, 0xff, 0xff }, { 0x50, 0x51, 0x52, 0x53, 0xff, 0xff, 0xff, 0xff, 0xff }, }; static int dv_audio_frame_size(const AVDVProfile* sys, int frame, int sample_rate) { if ((sys->time_base.den == 25 || sys->time_base.den == 50) && sys->time_base.num == 1) { if (sample_rate == 32000) return 1280; else if (sample_rate == 44100) return 1764; else return 1920; } av_assert0(sample_rate == 48000); return sys->audio_samples_dist[frame % (sizeof(sys->audio_samples_dist) / sizeof(sys->audio_samples_dist[0]))]; } static int dv_write_pack(enum dv_pack_type pack_id, DVMuxContext *c, uint8_t* buf, ...) { struct tm tc; time_t ct; uint32_t timecode; va_list ap; int audio_type = 0; int channel; buf[0] = (uint8_t)pack_id; switch (pack_id) { case dv_timecode: timecode = av_timecode_get_smpte_from_framenum(&c->tc, c->frames); timecode |= 1<<23 | 1<<15 | 1<<7 | 1<<6; // biphase and binary group flags AV_WB32(buf + 1, timecode); break; case dv_audio_source: /* AAUX source pack */ va_start(ap, buf); channel = va_arg(ap, int); if (c->ast[channel]->codecpar->sample_rate == 44100) { audio_type = 1; } else if (c->ast[channel]->codecpar->sample_rate == 32000) audio_type = 2; buf[1] = (1 << 7) | /* locked mode -- SMPTE only supports locked mode */ (1 << 6) | /* reserved -- always 1 */ (dv_audio_frame_size(c->sys, c->frames, c->ast[channel]->codecpar->sample_rate) - c->sys->audio_min_samples[audio_type]); /* # of samples */ buf[2] = (0 << 7) | /* multi-stereo */ (0 << 5) | /* #of audio channels per block: 0 -- 1 channel */ (0 << 4) | /* pair bit: 0 -- one pair of channels */ !!va_arg(ap, int); /* audio mode */ buf[3] = (1 << 7) | /* res */ (1 << 6) | /* multi-language flag */ (c->sys->dsf << 5) | /* system: 60fields/50fields */ (c->sys->n_difchan & 2); /* definition: 0 -- 25Mbps, 2 -- 50Mbps */ buf[4] = (1 << 7) | /* emphasis: 1 -- off */ (0 << 6) | /* emphasis time constant: 0 -- reserved */ (audio_type << 3) | /* frequency: 0 -- 48kHz, 1 -- 44,1kHz, 2 -- 32kHz */ 0; /* quantization: 0 -- 16-bit linear, 1 -- 12-bit nonlinear */ va_end(ap); break; case dv_audio_control: buf[1] = (0 << 6) | /* copy protection: 0 -- unrestricted */ (1 << 4) | /* input source: 1 -- digital input */ (3 << 2) | /* compression: 3 -- no information */ 0; /* misc. info/SMPTE emphasis off */ buf[2] = (1 << 7) | /* recording start point: 1 -- no */ (1 << 6) | /* recording end point: 1 -- no */ (1 << 3) | /* recording mode: 1 -- original */ 7; buf[3] = (1 << 7) | /* direction: 1 -- forward */ (c->sys->pix_fmt == AV_PIX_FMT_YUV420P ? 0x20 : /* speed */ c->sys->ltc_divisor * 4); buf[4] = (1 << 7) | /* reserved -- always 1 */ 0x7f; /* genre category */ break; case dv_audio_recdate: case dv_video_recdate: /* VAUX recording date */ ct = c->start_time + av_rescale_rnd(c->frames, c->sys->time_base.num, c->sys->time_base.den, AV_ROUND_DOWN); ff_brktimegm(ct, &tc); buf[1] = 0xff; /* ds, tm, tens of time zone, units of time zone */ /* 0xff is very likely to be "unknown" */ buf[2] = (3 << 6) | /* reserved -- always 1 */ ((tc.tm_mday / 10) << 4) | /* Tens of day */ (tc.tm_mday % 10); /* Units of day */ buf[3] = /* we set high 4 bits to 0, shouldn't we set them to week? */ ((tc.tm_mon / 10) << 4) | /* Tens of month */ (tc.tm_mon % 10); /* Units of month */ buf[4] = (((tc.tm_year % 100) / 10) << 4) | /* Tens of year */ (tc.tm_year % 10); /* Units of year */ break; case dv_audio_rectime: /* AAUX recording time */ case dv_video_rectime: /* VAUX recording time */ ct = c->start_time + av_rescale_rnd(c->frames, c->sys->time_base.num, c->sys->time_base.den, AV_ROUND_DOWN); ff_brktimegm(ct, &tc); buf[1] = (3 << 6) | /* reserved -- always 1 */ 0x3f; /* tens of frame, units of frame: 0x3f - "unknown" ? */ buf[2] = (1 << 7) | /* reserved -- always 1 */ ((tc.tm_sec / 10) << 4) | /* Tens of seconds */ (tc.tm_sec % 10); /* Units of seconds */ buf[3] = (1 << 7) | /* reserved -- always 1 */ ((tc.tm_min / 10) << 4) | /* Tens of minutes */ (tc.tm_min % 10); /* Units of minutes */ buf[4] = (3 << 6) | /* reserved -- always 1 */ ((tc.tm_hour / 10) << 4) | /* Tens of hours */ (tc.tm_hour % 10); /* Units of hours */ break; default: buf[1] = buf[2] = buf[3] = buf[4] = 0xff; } return 5; } static void dv_inject_audio(DVMuxContext *c, int channel, uint8_t* frame_ptr) { int i, j, d, of, size; size = 4 * dv_audio_frame_size(c->sys, c->frames, c->ast[channel]->codecpar->sample_rate); frame_ptr += channel * c->sys->difseg_size * 150 * 80; for (i = 0; i < c->sys->difseg_size; i++) { frame_ptr += 6 * 80; /* skip DIF segment header */ for (j = 0; j < 9; j++) { dv_write_pack(dv_aaux_packs_dist[i][j], c, &frame_ptr[3], channel, i >= c->sys->difseg_size/2); for (d = 8; d < 80; d+=2) { of = c->sys->audio_shuffle[i][j] + (d - 8)/2 * c->sys->audio_stride; if (of*2 >= size) continue; frame_ptr[d] = *av_fifo_peek2(c->audio_data[channel], of*2+1); // FIXME: maybe we have to admit frame_ptr[d+1] = *av_fifo_peek2(c->audio_data[channel], of*2); // that DV is a big-endian PCM } frame_ptr += 16 * 80; /* 15 Video DIFs + 1 Audio DIF */ } } } static void dv_inject_metadata(DVMuxContext *c, uint8_t* frame) { int j, k; uint8_t* buf; for (buf = frame; buf < frame + c->sys->frame_size; buf += 150 * 80) { /* DV subcode: 2nd and 3d DIFs */ for (j = 80; j < 80 * 3; j += 80) { for (k = 6; k < 6 * 8; k += 8) dv_write_pack(dv_timecode, c, &buf[j+k]); if (((long)(buf-frame)/(c->sys->frame_size/(c->sys->difseg_size*c->sys->n_difchan))%c->sys->difseg_size) > 5) { /* FIXME: is this really needed ? */ dv_write_pack(dv_video_recdate, c, &buf[j+14]); dv_write_pack(dv_video_rectime, c, &buf[j+22]); dv_write_pack(dv_video_recdate, c, &buf[j+38]); dv_write_pack(dv_video_rectime, c, &buf[j+46]); } } /* DV VAUX: 4th, 5th and 6th 3DIFs */ for (j = 80*3 + 3; j < 80*6; j += 80) { dv_write_pack(dv_video_recdate, c, &buf[j+5*2]); dv_write_pack(dv_video_rectime, c, &buf[j+5*3]); dv_write_pack(dv_video_recdate, c, &buf[j+5*11]); dv_write_pack(dv_video_rectime, c, &buf[j+5*12]); } } } /* * The following 3 functions constitute our interface to the world */ static int dv_assemble_frame(AVFormatContext *s, DVMuxContext *c, AVStream* st, uint8_t* data, int data_size, uint8_t** frame) { int i, reqasize; *frame = &c->frame_buf[0]; switch (st->codecpar->codec_type) { case AVMEDIA_TYPE_VIDEO: /* FIXME: we have to have more sensible approach than this one */ if (c->has_video) av_log(s, AV_LOG_ERROR, "Can't process DV frame #%d. Insufficient audio data or severe sync problem.\n", c->frames); if (data_size != c->sys->frame_size) { av_log(s, AV_LOG_ERROR, "Unexpected frame size, %d != %d\n", data_size, c->sys->frame_size); return AVERROR(ENOSYS); } memcpy(*frame, data, c->sys->frame_size); c->has_video = 1; break; case AVMEDIA_TYPE_AUDIO: for (i = 0; i < c->n_ast && st != c->ast[i]; i++); /* FIXME: we have to have more sensible approach than this one */ if (av_fifo_size(c->audio_data[i]) + data_size >= 100*MAX_AUDIO_FRAME_SIZE) av_log(s, AV_LOG_ERROR, "Can't process DV frame #%d. Insufficient video data or severe sync problem.\n", c->frames); av_fifo_generic_write(c->audio_data[i], data, data_size, NULL); reqasize = 4 * dv_audio_frame_size(c->sys, c->frames, st->codecpar->sample_rate); /* Let us see if we've got enough audio for one DV frame. */ c->has_audio |= ((reqasize <= av_fifo_size(c->audio_data[i])) << i); break; default: break; } /* Let us see if we have enough data to construct one DV frame. */ if (c->has_video == 1 && c->has_audio + 1 == 1 << c->n_ast) { dv_inject_metadata(c, *frame); c->has_audio = 0; for (i=0; i < c->n_ast; i++) { dv_inject_audio(c, i, *frame); reqasize = 4 * dv_audio_frame_size(c->sys, c->frames, c->ast[i]->codecpar->sample_rate); av_fifo_drain(c->audio_data[i], reqasize); c->has_audio |= ((reqasize <= av_fifo_size(c->audio_data[i])) << i); } c->has_video = 0; c->frames++; return c->sys->frame_size; } return 0; } static DVMuxContext* dv_init_mux(AVFormatContext* s) { DVMuxContext *c = s->priv_data; AVStream *vst = NULL; int i; /* we support at most 1 video and 2 audio streams */ if (s->nb_streams > 3) return NULL; c->n_ast = 0; c->ast[0] = c->ast[1] = NULL; /* We have to sort out where audio and where video stream is */ for (i=0; i<s->nb_streams; i++) { switch (s->streams[i]->codecpar->codec_type) { case AVMEDIA_TYPE_VIDEO: if (vst) return NULL; vst = s->streams[i]; break; case AVMEDIA_TYPE_AUDIO: if (c->n_ast > 1) return NULL; c->ast[c->n_ast++] = s->streams[i]; break; default: goto bail_out; } } /* Some checks -- DV format is very picky about its incoming streams */ if (!vst || vst->codecpar->codec_id != AV_CODEC_ID_DVVIDEO) goto bail_out; for (i=0; i<c->n_ast; i++) { if (c->ast[i]) { if(c->ast[i]->codecpar->codec_id != AV_CODEC_ID_PCM_S16LE || c->ast[i]->codecpar->channels != 2) goto bail_out; if (c->ast[i]->codecpar->sample_rate != 48000 && c->ast[i]->codecpar->sample_rate != 44100 && c->ast[i]->codecpar->sample_rate != 32000 ) goto bail_out; } } c->sys = av_dv_codec_profile2(vst->codecpar->width, vst->codecpar->height, vst->codecpar->format, vst->time_base); if (!c->sys) goto bail_out; if ((c->sys->time_base.den != 25 && c->sys->time_base.den != 50) || c->sys->time_base.num != 1) { if (c->ast[0] && c->ast[0]->codecpar->sample_rate != 48000) goto bail_out; if (c->ast[1] && c->ast[1]->codecpar->sample_rate != 48000) goto bail_out; } if ((c->n_ast > 1) && (c->sys->n_difchan < 2)) { /* only 1 stereo pair is allowed in 25Mbps mode */ goto bail_out; } /* Ok, everything seems to be in working order */ c->frames = 0; c->has_audio = 0; c->has_video = 0; ff_parse_creation_time_metadata(s, &c->start_time, 1); for (i=0; i < c->n_ast; i++) { if (c->ast[i] && !(c->audio_data[i]=av_fifo_alloc_array(100, MAX_AUDIO_FRAME_SIZE))) { while (i > 0) { i--; av_fifo_freep(&c->audio_data[i]); } goto bail_out; } } return c; bail_out: return NULL; } static void dv_delete_mux(DVMuxContext *c) { int i; for (i=0; i < c->n_ast; i++) av_fifo_freep(&c->audio_data[i]); } static int dv_write_header(AVFormatContext *s) { AVRational rate; DVMuxContext *dvc = s->priv_data; AVDictionaryEntry *tcr = av_dict_get(s->metadata, "timecode", NULL, 0); if (!dv_init_mux(s)) { av_log(s, AV_LOG_ERROR, "Can't initialize DV format!\n" "Make sure that you supply exactly two streams:\n" " video: 25fps or 29.97fps, audio: 2ch/48|44|32kHz/PCM\n" " (50Mbps allows an optional second audio stream)\n"); return -1; } rate.num = dvc->sys->ltc_divisor; rate.den = 1; if (!tcr) { // no global timecode, look into the streams int i; for (i = 0; i < s->nb_streams; i++) { tcr = av_dict_get(s->streams[i]->metadata, "timecode", NULL, 0); if (tcr) break; } } if (tcr && av_timecode_init_from_string(&dvc->tc, rate, tcr->value, s) >= 0) return 0; return av_timecode_init(&dvc->tc, rate, 0, 0, s); } static int dv_write_packet(struct AVFormatContext *s, AVPacket *pkt) { uint8_t* frame; int fsize; fsize = dv_assemble_frame(s, s->priv_data, s->streams[pkt->stream_index], pkt->data, pkt->size, &frame); if (fsize > 0) { avio_write(s->pb, frame, fsize); } return 0; } /* * We might end up with some extra A/V data without matching counterpart. * E.g. video data without enough audio to write the complete frame. * Currently we simply drop the last frame. I don't know whether this * is the best strategy of all */ static int dv_write_trailer(struct AVFormatContext *s) { dv_delete_mux(s->priv_data); return 0; } AVOutputFormat ff_dv_muxer = { .name = "dv", .long_name = NULL_IF_CONFIG_SMALL("DV (Digital Video)"), .extensions = "dv", .priv_data_size = sizeof(DVMuxContext), .audio_codec = AV_CODEC_ID_PCM_S16LE, .video_codec = AV_CODEC_ID_DVVIDEO, .write_header = dv_write_header, .write_packet = dv_write_packet, .write_trailer = dv_write_trailer, };
{ "pile_set_name": "Github" }
; ; BIND data file for local loopback interface ; $TTL 604800 @ IN SOA localhost. root.localhost. ( 1 ; Serial 604800 ; Refresh 86400 ; Retry 2419200 ; Expire 604800 ) ; Negative Cache TTL ; @ IN NS localhost. @ IN A 127.0.0.1 @ IN AAAA ::1
{ "pile_set_name": "Github" }
<div class='fossil-doc' data-title='mime - Mime'> <style> HTML { background: #FFFFFF; color: black; } BODY { background: #FFFFFF; color: black; } DIV.doctools { margin-left: 10%; margin-right: 10%; } DIV.doctools H1,DIV.doctools H2 { margin-left: -5%; } H1, H2, H3, H4 { margin-top: 1em; font-family: sans-serif; font-size: large; color: #005A9C; background: transparent; text-align: left; } H1.doctools_title { text-align: center; } UL,OL { margin-right: 0em; margin-top: 3pt; margin-bottom: 3pt; } UL LI { list-style: disc; } OL LI { list-style: decimal; } DT { padding-top: 1ex; } UL.doctools_toc,UL.doctools_toc UL, UL.doctools_toc UL UL { font: normal 12pt/14pt sans-serif; list-style: none; } LI.doctools_section, LI.doctools_subsection { list-style: none; margin-left: 0em; text-indent: 0em; padding: 0em; } PRE { display: block; font-family: monospace; white-space: pre; margin: 0%; padding-top: 0.5ex; padding-bottom: 0.5ex; padding-left: 1ex; padding-right: 1ex; width: 100%; } PRE.doctools_example { color: black; background: #f5dcb3; border: 1px solid black; } UL.doctools_requirements LI, UL.doctools_syntax LI { list-style: none; margin-left: 0em; text-indent: 0em; padding: 0em; } DIV.doctools_synopsis { color: black; background: #80ffff; border: 1px solid black; font-family: serif; margin-top: 1em; margin-bottom: 1em; } UL.doctools_syntax { margin-top: 1em; border-top: 1px solid black; } UL.doctools_requirements { margin-bottom: 1em; border-bottom: 1px solid black; } </style> <hr> [ <a href="../../../../toc.html">Main Table Of Contents</a> | <a href="../../../toc.html">Table Of Contents</a> | <a href="../../../../index.html">Keyword Index</a> | <a href="../../../../toc0.html">Categories</a> | <a href="../../../../toc1.html">Modules</a> | <a href="../../../../toc2.html">Applications</a> ] <hr> <div class="doctools"> <h1 class="doctools_title">mime(n) 1.6 tcllib &quot;Mime&quot;</h1> <div id="name" class="doctools_section"><h2><a name="name">Name</a></h2> <p>mime - Manipulation of MIME body parts</p> </div> <div id="toc" class="doctools_section"><h2><a name="toc">Table Of Contents</a></h2> <ul class="doctools_toc"> <li class="doctools_section"><a href="#toc">Table Of Contents</a></li> <li class="doctools_section"><a href="#synopsis">Synopsis</a></li> <li class="doctools_section"><a href="#section1">Description</a></li> <li class="doctools_section"><a href="#section2">KNOWN BUGS</a></li> <li class="doctools_section"><a href="#section3">Bugs, Ideas, Feedback</a></li> <li class="doctools_section"><a href="#see-also">See Also</a></li> <li class="doctools_section"><a href="#keywords">Keywords</a></li> <li class="doctools_section"><a href="#category">Category</a></li> <li class="doctools_section"><a href="#copyright">Copyright</a></li> </ul> </div> <div id="synopsis" class="doctools_section"><h2><a name="synopsis">Synopsis</a></h2> <div class="doctools_synopsis"> <ul class="doctools_requirements"> <li>package require <b class="pkgname">Tcl 8.5</b></li> <li>package require <b class="pkgname">mime <span class="opt">?1.6?</span></b></li> </ul> <ul class="doctools_syntax"> <li><a href="#1"><b class="cmd">::mime::initialize</b> <span class="opt">?<b class="option">-canonical</b> <i class="arg">type/subtype</i> <span class="opt">?<b class="option">-param</b> {<i class="arg">key value</i>}...?</span> <span class="opt">?<b class="option">-encoding</b> <i class="arg">value</i>?</span> <span class="opt">?<b class="option">-header</b> {<i class="arg">key value</i>}...?</span>?</span> (<b class="option">-file</b> <i class="arg">name</i> | <b class="option">-string</b> <i class="arg">value</i> | <b class="option">-parts</b> {<i class="arg">token1</i> ... <i class="arg">tokenN</i>})</a></li> <li><a href="#2"><b class="cmd">::mime::finalize</b> <i class="arg">token</i> <span class="opt">?<b class="option">-subordinates</b> <b class="const">all</b> | <b class="const">dynamic</b> | <b class="const">none</b>?</span></a></li> <li><a href="#3"><b class="cmd">::mime::getproperty</b> <i class="arg">token</i> <span class="opt">?<i class="arg">property</i> | <b class="option">-names</b>?</span></a></li> <li><a href="#4"><b class="cmd">::mime::getheader</b> <i class="arg">token</i> <span class="opt">?<i class="arg">key</i> | <b class="option">-names</b>?</span></a></li> <li><a href="#5"><b class="cmd">::mime::setheader</b> <i class="arg">token</i> <i class="arg">key value</i> <span class="opt">?<b class="option">-mode</b> <b class="const">write</b> | <b class="const">append</b> | <b class="const">delete</b>?</span></a></li> <li><a href="#6"><b class="cmd">::mime::getbody</b> <i class="arg">token</i> <span class="opt">?<b class="option">-decode</b>?</span> <span class="opt">?<b class="option">-command</b> <i class="arg">callback</i> <span class="opt">?<b class="option">-blocksize</b> <i class="arg">octets</i>?</span>?</span></a></li> <li><a href="#7"><b class="cmd">::mime::copymessage</b> <i class="arg">token</i> <i class="arg">channel</i></a></li> <li><a href="#8"><b class="cmd">::mime::buildmessage</b> <i class="arg">token</i></a></li> <li><a href="#9"><b class="cmd">::mime::parseaddress</b> <i class="arg">string</i></a></li> <li><a href="#10"><b class="cmd">::mime::parsedatetime</b> (<i class="arg">string</i> | <b class="option">-now</b>) <i class="arg">property</i></a></li> <li><a href="#11"><b class="cmd">::mime::mapencoding</b> <i class="arg">encoding_name</i></a></li> <li><a href="#12"><b class="cmd">::mime::reversemapencoding</b> <i class="arg">charset_type</i></a></li> </ul> </div> </div> <div id="section1" class="doctools_section"><h2><a name="section1">Description</a></h2> <p>The <b class="package">mime</b> library package provides the commands to create and manipulate MIME body parts.</p> <dl class="doctools_definitions"> <dt><a name="1"><b class="cmd">::mime::initialize</b> <span class="opt">?<b class="option">-canonical</b> <i class="arg">type/subtype</i> <span class="opt">?<b class="option">-param</b> {<i class="arg">key value</i>}...?</span> <span class="opt">?<b class="option">-encoding</b> <i class="arg">value</i>?</span> <span class="opt">?<b class="option">-header</b> {<i class="arg">key value</i>}...?</span>?</span> (<b class="option">-file</b> <i class="arg">name</i> | <b class="option">-string</b> <i class="arg">value</i> | <b class="option">-parts</b> {<i class="arg">token1</i> ... <i class="arg">tokenN</i>})</a></dt> <dd><p>This command creates a MIME part and returns a token representing it.</p> <ul class="doctools_itemized"> <li><p>If the <b class="option">-canonical</b> option is present, then the body is in canonical (raw) form and is found by consulting either the <b class="option">-file</b>, <b class="option">-string</b>, or <b class="option">-parts</b> option.</p> <p>In addition, both the <b class="option">-param</b> and <b class="option">-header</b> options may occur zero or more times to specify <b class="const">Content-Type</b> parameters (e.g., <b class="const">charset</b>) and header keyword/values (e.g., <b class="const">Content-Disposition</b>), respectively.</p> <p>Also, <b class="option">-encoding</b>, if present, specifies the <b class="const">Content-Transfer-Encoding</b> when copying the body.</p></li> <li><p>If the <b class="option">-canonical</b> option is not present, then the MIME part contained in either the <b class="option">-file</b> or the <b class="option">-string</b> option is parsed, dynamically generating subordinates as appropriate.</p></li> </ul></dd> <dt><a name="2"><b class="cmd">::mime::finalize</b> <i class="arg">token</i> <span class="opt">?<b class="option">-subordinates</b> <b class="const">all</b> | <b class="const">dynamic</b> | <b class="const">none</b>?</span></a></dt> <dd><p>This command destroys the MIME part represented by <i class="arg">token</i>. It returns an empty string.</p> <p>If the <b class="option">-subordinates</b> option is present, it specifies which subordinates should also be destroyed. The default value is <b class="const">dynamic</b>, destroying all subordinates which were created by <b class="cmd">::mime::initialize</b> together with the containing body part.</p></dd> <dt><a name="3"><b class="cmd">::mime::getproperty</b> <i class="arg">token</i> <span class="opt">?<i class="arg">property</i> | <b class="option">-names</b>?</span></a></dt> <dd><p>This command returns a string or a list of strings containing the properties of a MIME part. If the command is invoked with the name of a specific property, then the corresponding value is returned; instead, if <b class="option">-names</b> is specified, a list of all properties is returned; otherwise, a serialized array of properties and values is returned.</p> <p>The possible properties are:</p> <dl class="doctools_definitions"> <dt><b class="const">content</b></dt> <dd><p>The type/subtype describing the content</p></dd> <dt><b class="const">encoding</b></dt> <dd><p>The &quot;Content-Transfer-Encoding&quot;</p></dd> <dt><b class="const">params</b></dt> <dd><p>A list of &quot;Content-Type&quot; parameters</p></dd> <dt><b class="const">parts</b></dt> <dd><p>A list of tokens for the part's subordinates. This property is present only if the MIME part has subordinates.</p></dd> <dt><b class="const">size</b></dt> <dd><p>The approximate size of the content (unencoded)</p></dd> </dl></dd> <dt><a name="4"><b class="cmd">::mime::getheader</b> <i class="arg">token</i> <span class="opt">?<i class="arg">key</i> | <b class="option">-names</b>?</span></a></dt> <dd><p>This command returns the header of a MIME part, as a list of strings.</p> <p>A header consists of zero or more key/value pairs. Each value is a list containing one or more strings.</p> <p>If this command is invoked with the name of a specific <i class="arg">key</i>, then a list containing the corresponding value(s) is returned; instead, if -names is specified, a list of all keys is returned; otherwise, a serialized array of keys and values is returned. Note that when a key is specified (e.g., &quot;Subject&quot;), the list returned usually contains exactly one string; however, some keys (e.g., &quot;Received&quot;) often occur more than once in the header, accordingly the list returned usually contains more than one string.</p></dd> <dt><a name="5"><b class="cmd">::mime::setheader</b> <i class="arg">token</i> <i class="arg">key value</i> <span class="opt">?<b class="option">-mode</b> <b class="const">write</b> | <b class="const">append</b> | <b class="const">delete</b>?</span></a></dt> <dd><p>This command writes, appends to, or deletes the <i class="arg">value</i> associated with a <i class="arg">key</i> in the header. It returns a list of strings containing the previous value associated with the key.</p> <p>The value for <b class="option">-mode</b> is one of:</p> <dl class="doctools_definitions"> <dt><b class="const">write</b></dt> <dd><p>The <i class="arg">key</i>/<i class="arg">value</i> is either created or overwritten (the default).</p></dd> <dt><b class="const">append</b></dt> <dd><p>A new <i class="arg">value</i> is appended for the <i class="arg">key</i> (creating it as necessary).</p></dd> <dt><b class="const">delete</b></dt> <dd><p>All values associated with the key are removed (the <i class="arg">value</i> parameter is ignored).</p></dd> </dl></dd> <dt><a name="6"><b class="cmd">::mime::getbody</b> <i class="arg">token</i> <span class="opt">?<b class="option">-decode</b>?</span> <span class="opt">?<b class="option">-command</b> <i class="arg">callback</i> <span class="opt">?<b class="option">-blocksize</b> <i class="arg">octets</i>?</span>?</span></a></dt> <dd><p>This command returns a string containing the body of the leaf MIME part represented by <i class="arg">token</i> in canonical form.</p> <p>If the <b class="option">-command</b> option is present, then it is repeatedly invoked with a fragment of the body as this:</p> <pre class="doctools_example"> uplevel #0 $callback [list &quot;data&quot; $fragment] </pre> <p>(The <b class="option">-blocksize</b> option, if present, specifies the maximum size of each fragment passed to the callback.)</p> <p>When the end of the body is reached, the callback is invoked as:</p> <pre class="doctools_example"> uplevel #0 $callback &quot;end&quot; </pre> <p>Alternatively, if an error occurs, the callback is invoked as:</p> <pre class="doctools_example"> uplevel #0 $callback [list &quot;error&quot; reason] </pre> <p>Regardless, the return value of the final invocation of the callback is propagated upwards by <b class="cmd">::mime::getbody</b>.</p> <p>If the <b class="option">-command</b> option is absent, then the return value of <b class="cmd">::mime::getbody</b> is a string containing the MIME part's entire body.</p> <p>If the option <b class="option">-decode</b> is absent the return value computed above is returned as is. This means that it will be in the charset specified for the token and not the usual utf-8. If the option <b class="option">-decode</b> is present however the command will use the charset information associated with the token to convert the string from its encoding into utf-8 before returning it.</p></dd> <dt><a name="7"><b class="cmd">::mime::copymessage</b> <i class="arg">token</i> <i class="arg">channel</i></a></dt> <dd><p>This command copies the MIME represented by <i class="arg">token</i> part to the specified <i class="arg">channel</i>. The command operates synchronously, and uses fileevent to allow asynchronous operations to proceed independently. It returns an empty string.</p></dd> <dt><a name="8"><b class="cmd">::mime::buildmessage</b> <i class="arg">token</i></a></dt> <dd><p>This command returns the MIME part represented by <i class="arg">token</i> as a string. It is similar to <b class="cmd">::mime::copymessage</b>, only it returns the data as a return string instead of writing to a channel.</p></dd> <dt><a name="9"><b class="cmd">::mime::parseaddress</b> <i class="arg">string</i></a></dt> <dd><p>This command takes a string containing one or more 822-style address specifications and returns a list of serialized arrays, one element for each address specified in the argument. If the string contains more than one address they will be separated by commas.</p> <p>Each serialized array contains the properties below. Note that one or more of these properties may be empty.</p> <dl class="doctools_definitions"> <dt><b class="const">address</b></dt> <dd><p>local@domain</p></dd> <dt><b class="const">comment</b></dt> <dd><p>822-style comment</p></dd> <dt><b class="const">domain</b></dt> <dd><p>the domain part (rhs)</p></dd> <dt><b class="const">error</b></dt> <dd><p>non-empty on a parse error</p></dd> <dt><b class="const">group</b></dt> <dd><p>this address begins a group</p></dd> <dt><b class="const">friendly</b></dt> <dd><p>user-friendly rendering</p></dd> <dt><b class="const">local</b></dt> <dd><p>the local part (lhs)</p></dd> <dt><b class="const">memberP</b></dt> <dd><p>this address belongs to a group</p></dd> <dt><b class="const">phrase</b></dt> <dd><p>the phrase part</p></dd> <dt><b class="const">proper</b></dt> <dd><p>822-style address specification</p></dd> <dt><b class="const">route</b></dt> <dd><p>822-style route specification (obsolete)</p></dd> </dl></dd> <dt><a name="10"><b class="cmd">::mime::parsedatetime</b> (<i class="arg">string</i> | <b class="option">-now</b>) <i class="arg">property</i></a></dt> <dd><p>This command takes a string containing an 822-style date-time specification and returns the specified property as a serialized array.</p> <p>The list of properties and their ranges are:</p> <dl class="doctools_definitions"> <dt><b class="const">hour</b></dt> <dd><p>0 .. 23</p></dd> <dt><b class="const">lmonth</b></dt> <dd><p>January, February, ..., December</p></dd> <dt><b class="const">lweekday</b></dt> <dd><p>Sunday, Monday, ... Saturday</p></dd> <dt><b class="const">mday</b></dt> <dd><p>1 .. 31</p></dd> <dt><b class="const">min</b></dt> <dd><p>0 .. 59</p></dd> <dt><b class="const">mon</b></dt> <dd><p>1 .. 12</p></dd> <dt><b class="const">month</b></dt> <dd><p>Jan, Feb, ..., Dec</p></dd> <dt><b class="const">proper</b></dt> <dd><p>822-style date-time specification</p></dd> <dt><b class="const">rclock</b></dt> <dd><p>elapsed seconds between then and now</p></dd> <dt><b class="const">sec</b></dt> <dd><p>0 .. 59</p></dd> <dt><b class="const">wday</b></dt> <dd><p>0 .. 6 (Sun .. Mon)</p></dd> <dt><b class="const">weekday</b></dt> <dd><p>Sun, Mon, ..., Sat</p></dd> <dt><b class="const">yday</b></dt> <dd><p>1 .. 366</p></dd> <dt><b class="const">year</b></dt> <dd><p>1900 ...</p></dd> <dt><b class="const">zone</b></dt> <dd><p>-720 .. 720 (minutes east of GMT)</p></dd> </dl></dd> <dt><a name="11"><b class="cmd">::mime::mapencoding</b> <i class="arg">encoding_name</i></a></dt> <dd><p>This commansd maps tcl encodings onto the proper names for their MIME charset type. This is only done for encodings whose charset types were known. The remaining encodings return &quot;&quot; for now.</p></dd> <dt><a name="12"><b class="cmd">::mime::reversemapencoding</b> <i class="arg">charset_type</i></a></dt> <dd><p>This command maps MIME charset types onto tcl encoding names. Those that are unknown return &quot;&quot;.</p></dd> </dl> </div> <div id="section2" class="doctools_section"><h2><a name="section2">KNOWN BUGS</a></h2> <dl class="doctools_definitions"> <dt>Tcllib Bug #447037</dt> <dd><p>This problem affects only people which are using Tcl and Mime on a 64-bit system. The currently recommended fix for this problem is to upgrade to Tcl version 8.4. This version has extended 64 bit support and the bug does not appear anymore.</p> <p>The problem could have been generally solved by requiring the use of Tcl 8.4 for this package. We decided against this solution as it would force a large number of unaffected users to upgrade their Tcl interpreter for no reason.</p> <p>See <a href="/tktview?name=447037">Ticket 447037</a> for additional information.</p></dd> </dl> </div> <div id="section3" class="doctools_section"><h2><a name="section3">Bugs, Ideas, Feedback</a></h2> <p>This document, and the package it describes, will undoubtedly contain bugs and other problems. Please report such in the category <em>mime</em> of the <a href="http://core.tcl.tk/tcllib/reportlist">Tcllib Trackers</a>. Please also report any ideas for enhancements you may have for either package and/or documentation.</p> </div> <div id="see-also" class="doctools_section"><h2><a name="see-also">See Also</a></h2> <p><a href="../ftp/ftp.html">ftp</a>, <a href="../../../../index.html#key445">http</a>, <a href="../pop3/pop3.html">pop3</a>, <a href="smtp.html">smtp</a></p> </div> <div id="keywords" class="doctools_section"><h2><a name="keywords">Keywords</a></h2> <p><a href="../../../../index.html#key335">email</a>, <a href="../../../../index.html#key131">internet</a>, <a href="../../../../index.html#key330">mail</a>, <a href="../../../../index.html#key230">mime</a>, <a href="../../../../index.html#key295">net</a>, <a href="../../../../index.html#key811">rfc 2045</a>, <a href="../../../../index.html#key812">rfc 2046</a>, <a href="../../../../index.html#key810">rfc 2049</a>, <a href="../../../../index.html#key332">rfc 821</a>, <a href="../../../../index.html#key333">rfc 822</a>, <a href="../../../../index.html#key337">smtp</a></p> </div> <div id="category" class="doctools_section"><h2><a name="category">Category</a></h2> <p>Text processing</p> </div> <div id="copyright" class="doctools_section"><h2><a name="copyright">Copyright</a></h2> <p>Copyright &copy; 1999-2000 Marshall T. Rose</p> </div> </div>
{ "pile_set_name": "Github" }
/** * Created on 13-09-09 18:16 */ package com.alicp.jetcache; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; /** * @author <a href="mailto:[email protected]">huangli</a> */ public class CacheGetResult<V> extends CacheResult { private V value; private CacheValueHolder<V> holder; public static final CacheGetResult NOT_EXISTS_WITHOUT_MSG = new CacheGetResult(CacheResultCode.NOT_EXISTS, null, null); public static final CacheGetResult EXPIRED_WITHOUT_MSG = new CacheGetResult(CacheResultCode.EXPIRED, null ,null); public CacheGetResult(CacheResultCode resultCode, String message, CacheValueHolder<V> holder) { super(CompletableFuture.completedFuture(new ResultData(resultCode, message, holder))); } public CacheGetResult(CompletionStage<ResultData> future) { super(future); } public CacheGetResult(Throwable ex) { super(ex); } public V getValue() { waitForResult(); return value; } @Override protected void fetchResultSuccess(ResultData resultData) { super.fetchResultSuccess(resultData); holder = (CacheValueHolder<V>) resultData.getOriginData(); value = (V) unwrapValue(holder); } static Object unwrapValue(Object holder) { // if @Cached or @CacheCache change type from REMOTE to BOTH (or from BOTH to REMOTE), // during the dev/publish process, the value type which different application server put into cache server will be different // (CacheValueHolder<V> and CacheValueHolder<CacheValueHolder<V>>, respectively). // So we need correct the problem at here and in MultiLevelCache.unwrapHolder Object v = holder; while (v != null && v instanceof CacheValueHolder) { v = ((CacheValueHolder) v).getValue(); } return v; } @Override protected void fetchResultFail(Throwable e) { super.fetchResultFail(e); value = null; } protected CacheValueHolder<V> getHolder() { waitForResult(); return holder; } }
{ "pile_set_name": "Github" }
/*--------------------------------*- C++ -*----------------------------------*\ ========= | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / O peration | Website: https://openfoam.org \\ / A nd | Version: dev \\/ M anipulation | \*---------------------------------------------------------------------------*/ FoamFile { version 2.0; format ascii; class dictionary; object createPatchDict; } // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // Do a synchronisation of coupled points after creation of any patches. // Note: this does not work with points that are on multiple coupled patches // with transformations (i.e. cyclics). pointSync false; // Optional: Write cyclic matches into .obj format; defaults to false. writeCyclicMatch false; // Patches to create. patches ( { // Name of new patch name floatingObjectBottom; // Dictionary to construct new patch from patchInfo { type wall; } // How to construct: either from 'patches' or 'set' constructFrom set; // If constructFrom = set : name of faceSet set floatingObjectBottom; } ); // ************************************************************************* //
{ "pile_set_name": "Github" }
e1c11bface333fb29b8c4de776084582748494e355dffdc8bebe6322be58371d2ae39042781a579bea5ecf84a30431c6f98b6c9fd6eca47d338d2fae0a37bac2
{ "pile_set_name": "Github" }
package testapp.model; import java.util.List; public class Bar { private String id; private int n; private List<Foo> fooList; public String getId() { return id; } public void setId(String id) { this.id = id; } public int getN() { return n; } public void setN(int n) { this.n = n; } public List<Foo> getFooList() { return fooList; } public void setFooList(List<Foo> fooList) { this.fooList = fooList; } }
{ "pile_set_name": "Github" }
# ansi-regex [![Build Status](https://travis-ci.org/sindresorhus/ansi-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-regex) > Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) ## Install ```sh $ npm install --save ansi-regex ``` ## Usage ```js var ansiRegex = require('ansi-regex'); ansiRegex().test('\u001b[4mcake\u001b[0m'); //=> true ansiRegex().test('cake'); //=> false '\u001b[4mcake\u001b[0m'.match(ansiRegex()); //=> ['\u001b[4m', '\u001b[0m'] ``` *It's a function so you can create multiple instances. Regexes with the global flag will have the `.lastIndex` property changed for each call to methods on the instance. Therefore reusing the instance with multiple calls will not work as expected for `.test()`.* ## License MIT © [Sindre Sorhus](http://sindresorhus.com)
{ "pile_set_name": "Github" }
// Copyright (c) 2017, the gRPC project authors. Please see the AUTHORS file // for details. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import 'dart:async'; import '../shared/status.dart'; import 'call.dart'; /// Definition of a gRPC service method. class ServiceMethod<Q, R> { final String name; final bool streamingRequest; final bool streamingResponse; final Q Function(List<int> request) requestDeserializer; final List<int> Function(R response) responseSerializer; final Function handler; ServiceMethod( this.name, this.handler, this.streamingRequest, this.streamingResponse, this.requestDeserializer, this.responseSerializer); StreamController<Q> createRequestStream(StreamSubscription incoming) => StreamController<Q>( onListen: incoming.resume, onPause: incoming.pause, onResume: incoming.resume); Q deserialize(List<int> data) => requestDeserializer(data); List<int> serialize(dynamic response) => responseSerializer(response as R); Stream<R> handle(ServiceCall call, Stream<Q> requests) { if (streamingResponse) { if (streamingRequest) { return handler(call, requests); } else { return handler(call, _toSingleFuture(requests)); } } else { Future<R> response; if (streamingRequest) { response = handler(call, requests); } else { response = handler(call, _toSingleFuture(requests)); } return response.asStream(); } } Future<Q> _toSingleFuture(Stream<Q> stream) { Q _ensureOnlyOneRequest(Q previous, Q element) { if (previous != null) { throw GrpcError.unimplemented('More than one request received'); } return element; } Q _ensureOneRequest(Q value) { if (value == null) throw GrpcError.unimplemented('No requests received'); return value; } final future = stream.fold(null, _ensureOnlyOneRequest).then(_ensureOneRequest); // Make sure errors on the future aren't unhandled, but return the original // future so the request handler can also get the error. future.catchError((_) {}); return future; } } /// Definition of a gRPC service. abstract class Service { final Map<String, ServiceMethod> _$methods = {}; String get $name; void $addMethod(ServiceMethod method) { _$methods[method.name] = method; } /// Client metadata handler. /// /// Services can override this method to provide common handling of incoming /// metadata from the client. void $onMetadata(ServiceCall context) {} ServiceMethod $lookupMethod(String name) => _$methods[name]; }
{ "pile_set_name": "Github" }
### 4.2.3. Specifying Program Options 为MySQL指定选项有好几种方式 * 在命令行上紧跟着程序号列出选项,这种做法对于应用于一次特定调用的程序选项是很常见的。 * 在选项文件中列出选项,当程序启动时会读取这些选项。这种做法对于你想每次调用程序都使用这些选项时是很常见的。 * 在环境变量中列出选项,(参考[Section 4.2.4, “Setting Environment Variables”][04.02.04]).当你运行程序时都想应用这些选项时非常有用。在实际中选项文件更多的被用来达到这种目的。但是[Section 5.6.3, “Running Multiple MySQL Instances on Unix”][05.06.03],讨论了环境变量可以是非常有帮助的一种情况。它描述了一个小技巧,就是用这些变量为服务端和客户端程序指定TCP/IP端口和UNIX套接字文件。 选项按顺序被处理,所以如果一个选项多次指定,最后出现的优先级高。下面的命令使mysql连接运行在localhost上的服务器。 ```shell shell> mysql -h example.com -h localhost ``` 如果使用冲突或相关的选项,后来选项优先于先前的选项。下面的命令用"没有列名"模式运行mysql。 ```shell shell> mysql --column-names --skip-column-names ``` MySQL 程序那一个选项首先被应用,通过检查环境变量,然后通过读选项文件,最后通过检查命令行。这意味着环境变量具有最低的优先权,命令行选项具有最高的优先权。 你可以利用MySQL程序处理选项的方式,在一个选项文件中为一个程序指定默认值。这避免你每次运行程序时都要输入选项,并且你也可以在必要时在命令行覆盖他们。 指定选项时可以用他们的完全形式,也可以用一个没有歧义的前缀。例如mysqldump的选项--compress可以用--compr代替。但是不能用--comp因为后面的形式具有歧义。 ```shell shell> mysqldump --comp mysqldump: ambiguous option '--comp' (compatible, compress) ``` 注意选项前缀的用法可能引起问题,当一个程序引入新的选项时。一个现在没有歧义的选项可能在以后变得有歧义。 >**Note** > >在MySQL 5.6.13版本中,明确的前缀是过时的,如果给出一个明确的前缀,会反馈回一个警告,选项 前缀在MySQL 5.7不再支持;只接受完全的选项。 #### 4.2.3.1. Using Options on the Command Line 在命令行上指定程序选项遵循这些规则 * 选项在命令名的后面给出 * 所有选项参数前面以一个或两个破折号开始,这取决于选项是否是一个简短的格式或长的格式。许多选项有短和长的格式。例如,-? 和 --help 分别是选项的短格式和长格式。来用显示一个MySQL程序的帮助信息。 * 选项的名称是区分大小写的,-v 和 -V都是合法的并且有不同含义。(他们分别是--verbose 和 --version 选项的短格式)。 * 一些选项带有值在选项名的后面,例如,-h localhost 或 --host=localhost表示客户端程序连接MySQL服务器的主机。选项值告诉程序MySQL服务器运行的主机名。 * 对于一个长格式的带有值的选项,需要用一个“=”符号把选项名和值分开。对于一个短格式的带有值的选项。选项值可以紧跟着选项,或者在选项和值之间有一个空格,-hlocalhost 和 -h localhost 是相等的。对于这个规则例外的情况是当你指定MySQL密码选项的时候。这个选项可以用长格式--password=pass_val或短格式--password给出。在后面的例子中(没有给出密码值),会提示你输入密码。密码选项也可以以-ppass_val 或 -p的短形式给出。但是对于短格式,如果给出密码值。密码值和选项字母之间不能有空格。原因是如果有空格在选项字母后面,程序没法识别随后的参数是密码值或一些其它形式的参数。因此,下面的两个命令具有完全不同的含义。 ```shell shell> mysql -ptest shell> mysql -p test ``` 第一个命令表示mysql使用值为“test”的密码,但是没有指出使用那一个数据库,第二个命令会提示你输入密码,并且使用“test”做为默认的数据库。 * 在选项名称内部,破折号(“-”)和下划线(“_”)可以交替地被使用。例如,--skip-grant-tables和 --skip_grant_tables [453] 是一样的。(但是, 开头的破折号不能换成下划线) * 对于带有数字值的选项,数字可以带有K,M或G后缀(大写或小写)表示乘数为1024,10242或10243。例如,下面的例子告诉mysqladmin去ping服务器1024次,每一次ping之间间隔10秒。 ```shell mysql> mysqladmin --count=1K --sleep=10 ping ``` 如果在命令行上选项值带有空格,必须把选项值用引号括起来。例如,选项--execute (或者 -e)可以被mysql用来传递SQL语句到服务器。当这个选项被使用时,mysql执行选项值里的语句然后退出。该语句必须用引号括起来,例如,你可以用下面的命令得到一个用户账号的列表, ```shell mysql> mysql -u root -p --execute="SELECT User, Host FROM mysql.user" Enter password: ****** +------+-----------+ | User | Host | +------+-----------+ | | gigan | | root | gigan | | | localhost | | jon | localhost | | root | localhost | +------+-----------+ shell> ``` 注意长格式(--execute [261])后面紧跟着“=”。 如果你想在表达式中使用用引号引起来的值。你要么转义内部的引号符号,要么用另一种与括起的表达式不同类型的引号。 你的命令处理器决定你是否可以使用单引号或双引号和转义引号的语法,例如,如果你的命令处理器支持单引号或双引号,你可以使用双引号包裹表达式,并且使用单引号包裹表达式里面的带引号的值。 在命令行上可以给选项值传递多个SQL语句,语句之间用分号隔开, ```shell shell> mysql -u root -p -e "SELECT VERSION();SELECT NOW()" Enter password: ****** +-----------------+ | VERSION() | +-----------------+ | 5.1.5-alpha-log | +-----------------+ +---------------------+ | NOW() | +---------------------+ | 2006-01-05 21:19:04 | +---------------------+ ``` #### 4.2.3.2. Program Option Modifiers 一些选项是“布尔值”并且控制行为可以打开和关闭。例如,客户端程序mysql有一个--column-names选项,这个选项决定是否在查询结果的头部显示列名。默认情况下这个选项是启用的,然而你可能在一些情况下想要关闭它,例如当你传送mysql的输出到另一个程序,并且只想让他看到数据而不是标题行。 为了不显示列名,你可以用下面的形式指定选项, ```shell --disable-column-names --skip-column-names --column-names=0 ``` 前缀--disable,--skip和后缀=0有相同的效果,他们将关闭选项。 选项的“启用”形式可以用下面的任何形式指定。 ```shell --column-names --enable-column-names --column-names=1 ``` 在MySQL 5.6.2中,ON, TRUE, OFF, 和 FALSE的值也被认为是布尔选项(不区分大小写). 如果一个选项以--loose开头,如果程序不能识别这个选项程序不会出错退出,只是会产生一个警告. ```shell shell> mysql --loose-no-such-option mysql: WARNING: unknown option '--no-such-option' ``` 当你在安装了多个MySQL的机器上运行程序并使用选项文件里的选项时--loose前缀将非常有用。可能不被所有版本的程序识别的选项可以加上--loose前缀(或者在选项文件中使用loose)。在能识别选项的版本中,程序正常处理选项,而不能识别选项的版本中,程序产生一个警告并忽略它。 mysqld可以根据系统变量设置一个限制关于客户端程序能设置多大,为了实现这个目的,在选项名前面加一个--maximum前缀,例如,--maximumquery_cache_size=4M防止任何程序端设置查询缓存值大于4M. #### 4.2.3.3. Using Option Files 大多数MySQL程序可以读取选项文件里的启动选项,(有时候也叫配置文件)。选项文件提供一个便利的方式来指定经常使用的选项,以便他们不用每次运行程序的时候都在命令行上输入选项。MySQL为MySQL服务器提供了一些(预配置选项文件)preconfigured option files 为了确定一个程序是否读取选项文件,用--help选项调用它,(对于 mysqld, 使用--verbose [461] 和 --help [427].),如果程序读取选项文件,帮助信息会显示它查找的文件和它识别的选项组。 包含登录路径的选项的.mylogin.cnf文件,被mysql_config_editor工具创建。参考[Section 4.6.6, “mysql_config_editor — MySQL Configuration Utility”][04.06.06].“登录路径”是一组只包含host,user, 和 password选项的选项组。客户端程序用--login-path [230] 选项指定从读取.mylogin.cnf中那个登录路径被读取。 想指定另一个文件名,设置 MYSQL_TEST_LOGIN_FILE 环境变量。这个变量被mysql-test-run.pl测试工具使用。但是也被mysql_config_editor和MySQL客户端mysql, mysqladmin等等识别。 >**Note** >MySQL集群使用的程序的选项文件在[Section 17.3,“Configuration of MySQL Cluster NDB 7.3”][17.03.00]部分。 在Windows平台中,MySQL程序从下面的文件中读取启动选项,按照列出的先后顺序,(第一条被首先使用)。 <table border="1"> <thead> <tr> <th> File Name </th> <th> Purpose </th> </tr> </thead> <tbody> <tr> <td> %PROGRAMDATA% \MySQL\MySQL Server 5.6\my.ini , %PROGRAMDATA% \MySQL\MySQL Server 5.6\my.cnf </td> <td> Global options </td> </tr> <tr> <td> %WINDIR% \my.ini , %WINDIR% \my.cnf </td> <td> Global options </td> </tr> <tr> <td> C:\my.ini , C:\my.cnf </td> <td> Global options </td> </tr> <tr> <td> <em> INSTALLDIR </em>\my.ini <em> INSTALLDIR </em>\my.cnf </td> <td> Global options </td> </tr> <tr> <td> defaults-extra-file </td> <td> The file specified with <a class="link" href="option-file- options.html#option_general_defaults-extra-file"><code class="option"> -- defaults-extra-file= <em> path </em> </a>, if any </td> </tr> <tr> <td> %APPDATA% \MySQL\.mylogin.cnf </td> <td> Login path options </td> </tr> </tbody> </table> %PROGRAMDATA%表示包含主机上的所有用户的应用程序数据的文件系统目录,在Microsoft Vista或更高版本中默认的路径是C:\ProgramData,在更早的Windows版本中是C:\Documents and Settings\All Users\Application Data %WINDIR%代表你的Windows目录的位置,通常来说是C:\ Windows,你可以使用下面的命令从WINDIR环境变量值确定其确切位置 ```shell C:\> echo %WINDIR% ``` INSTALLDIR代表MySQL的安装目录,当MySQL 5.6使用安装和配置向导被安装时,典型的目录是C:\PROGRAMDIR\MySQL\MySQL 5.6 Server,这里的PROGRAMDIR代表程序目录(通常在英文版本的Windows中是Program Files)。参考[Section 2.3.3, “Installing MySQL on Microsoft Windows Using MySQL Installer”][02.03.03] %APPDATA%代表Windows应用程序的数据目录的值,你可以从APPDATA环境变量使用下面的命令值确定其确切位置 ```shell C:\> echo %APPDATA% ``` 在Unix,linux和Mac OS X平台上,MySQL 程序从下面的文件按指定的顺序(上面的首先被使用)读取启动选项。 <table border="1"> <thead> <tr> <th> File Name </th> <th> Purpose </th> </tr> </thead> <tbody> <tr> <td> /etc/my.cnf </td> <td> Global options </td> </tr> <tr> <td> /etc/mysql/my.cnf </td> <td> Global options </td> </tr> <tr> <td> <em> SYSCONFDIR </em>/my.cnf </td> <td> Global options </td> </tr> <tr> <td> $MYSQL_HOME/my.cnf </td> <td> Server-specific options </td> </tr> <tr> <td> <code> defaults-extra-file </td> <td> The file specified with <a href="option- file-options.html#option_general_defaults-extra-file"> --defaults-extra-file= <em> path </em> </a>, if any </td> </tr> <tr> <td> ~/.my.cnf </td> <td> User- specific options </td> </tr> <tr> <td> ~/.mylogin.cnf </td> <td> Login path options </td> </tr> </tbody> </table> ~表示当前用户的主目录($HOME的值). SYSCONFDIR表示当MySQL创建时由CMake的SYSCONFDIR选项指定的目录。默认是安装的目录下的etc目录。 MYSQL_HOME是一个环境变量,指定了my.cnf文件所在的目录。如果MYSQL_HOME没有设置并且你用mysqld_safe程序启动服务器,mysqld_safe试图用按照下面的顺序设置MYSQL_HOME。 * 让basedir和datadir分别代表MySQL的根目录和数据目录的路径名称, * 如果有一个my.cnf文件在DATADIR目录下而没有在BASEDIR目录,mysqld_safe就把MYSQL_HOME设置成DATADIR * 否则,如果MYSQL_HOME没有设置,并且my.cnf文件没有在DATADIR目录,mysqld_safe就把MYSQL_HOME设置成BASEDIR, 在MySQL 5.6版本中,把DATADIR作为文件my.cnf的目录的用法是过时的。 通常,对于二进制安装DATADIR是/usr/local/mysql/data,或者对于源文件安装是/usr/local/var。 请注意,这是在配置时间指定的数据目录的位置,而不是在mysqld启动时用--datadir选项指定的。在运行时使用--datadir对服务器在那里搜索选项文件没有影响,因为搜索选项文件是在处理选项前面发生的。 MySQL按照刚才描述的顺序寻找选项文件,并读取存在的选项文件,如果你想使用的选项文件没有存在,使用纯文本编辑器创建它。 如果发现一个给定的选项的多个实例,最后的实例起作用,但有一个例外,对于mysqld为了安全起见第一个--user选项的实例被使用。为了防止在选项文件里指定的选项在命令行中被覆盖。 >**Note** > >在Unix平台,MySQL忽视全域可写的配置文件,这是有意设定的一种安全措施。 任何运行程序在命令行上给出的选项同样可以在选项文件里给出。想得到一个程序的可用选项,可以用--help选项运行它。 在选项文件里指定选项的语法类似于命令行的语法,(参考[Section 4.2.3.1, “Using Options on the Command Line”][04.02.03.01]),然后在选项文件里,在取消了选项名前面的两个破折号并且每行只能指定一个选项,例如,在命令行上的--quick和--host=localhost在选项文件里以quick和 host=localhost的形式指定,并且在不同的行。在选项文件里指定--loose-opt_name形式的只需写成loose-opt_name即可。 空行在在选项文件里被忽略。非空行可以用下面任何形式。 * #comment, ;comment 注释行以“#” 或“;”开始,“#”形式的注释可以从一行的中间开始。 * [group] group是程序的名称或你想要设置选项的组。在 [group]行之后,任何设置选项的行应于到这个组直到文件末,或者到另一个[group]行。 * opt_name 这相当于在命令行上的--opt_name。 * opt_name=value 这相当于在命令行上的--opt_name=value。在一个选择的文件,你可以有空格在“=”字符前后。有时候这种写法在命令行上是不对的。您可以将值在单引号或者双引号包起来,当值里面包含“#”字符时这很有用。 前导和尾随空格自动从选项的名称和值中删除。 你可以在选项值里使用转义序列“\b”, “\t”, “\n”, “\r”, “\\”, 和 “\s”来代表退格键,制表符,换行符,回车,反斜杠,与空格。在选项文件里的转义规则是这样的: * 如果一个反斜杠后跟一个有效的转义序列字符,序列被转换成序列打表的字符,例如,“\s”被转换为空格。 * 如果一个反斜杠后面不是一个有效的转义序列字符,它就保持不变,例如“\S” 还是“\S” 。 前面的规则意味着反斜杠可以用“\\”表示,或者一个后面没有有效转义序列字符的“\”。 在选项文件里的转义序列规则与在SQL表达式里的字符串的转义序列规则有点不同。在SQL表达式里,如果 “x”不是一个有效的转义序列字符,“\x” 变成 “x” 而不是 “\x”,参考[Section 9.1.1, “String Literals”][09.01.01]. 在选项文件里转换规则与Windows的路径名息息相关。在windows中,用“\”用作路径名分隔符,在Windows的路径名的分隔符必须被写成“\\”如果分隔符后面跟着一个转义序列字符,如果后面没有转义序列字符,可以写作“\\” 或 “\”。作为另一种选择,“/”可以被用在Windows路径名中并且被当作“\”。假如你想在选项文中指定根目录为C:\Program Files\MySQL\MySQL Server 5.6,可以用下面几种方式实现。例如 ```shell basedir="C:\Program Files\MySQL\MySQL Server 5.6" basedir="C:\\Program Files\\MySQL\\MySQL Server 5.6" basedir="C:/Program Files/MySQL/MySQL Server 5.6" basedir=C:\\Program\sFiles\\MySQL\\MySQL\sServer\s5.6 ``` 如果一个选项组的名字与作为程序的名字相同,这个组里的选项只会应用于这个程序。例如,[mysqld] 和 [mysql] 组分别应用于mysqld服务端和mysql客户端程序。 选项组[client]被所有客户端读取(但不包括mysqld)。这使你能够指定应用于所有客户端程序的选项。例如,[client]是被用来指定来用连接服务器的密码的最好的组(要确保选项文件只有你自己是可读写的,以便让别人无法找到你的密码)。要保证不要在[client] 组放置不能被所有客户端识别的选项。如果你运行不能识别选项的程序时会退出并显示一个错误。 下面是一个典型的全局选项文件 ```shell [client] port=3306 socket=/tmp/mysql.sock [mysqld] port=3306 socket=/tmp/mysql.sock key_buffer_size=16M max_allowed_packet=8M [mysqldump] quick ``` 上面的选项文件使用var_name=value的语法设置key_buffer_size [506] 和 max_allowed_packet的值。 下面是一个典型的用户选项文件 ```shell [client] \# The following password will be sent to all standard MySQL clients password="my_password" [mysql] no-auto-rehash connect_timeout=2 [mysqlhotcopy] interactive-timeout ``` 如果你只想创建一个只想让特定的MySQL版本的mysqld服务读取的选项组。你可以使用组名为[mysqld-5.5],[mysqld-5.6]等等的形式来达到这样的目的。下面的组表明--new只能被版本号的5.6.x的MySQL服务器使用。 ```shell [mysqld-5.6] new ``` 可以在选项文件里使用!include指令来包括其它选项文件。使用!includedir指令来在指定的目录里搜索选项文件。例如,想要包括/home/mydir/myopt.cnf文件,使用下面的指令。 ```shell !include /home/mydir/myopt.cnf ``` 想要在/home/mydir搜索选项文件并使用它们,使用下面的指令。 ```shell !includedir /home/mydir ``` 在目录里选项文件的读取顺序没有先后之分。 >**note** > >现在在Unix操作系统上任何被找到的文件和被用!includedir包括的文件必须以.cnf结尾。在Window平台上,指令则会查找以.ini 或 .cnf结尾的文件。 写一个被包括的选项的内容与其它选项文件一样。也就是说,它应该包含选项组。[group]行表明后面的选项将要应用的程序。 当一个被包含的选项谁的被读取时,只有被当前程序识别的选项组被使用,其它选项组被忽略,假如my.cnf文件包含下面一行。 ```shell !include /home/mydir/myopt.cnf ``` 假如/home/mydir/myopt.cnf文件内容像下面这样 ```shell [mysqladmin] force [mysqld] key_buffer_size=16M ``` 如果my.cnf文件被mysqld使用,只有/home/mydir/myopt.cnf里的组[mysqld]被使用。如果my.cnf文件被mysqladmin使用,只有组 [mysqladmin]被使用。如果文件被其它程序使用,/home/mydir/myopt.cnf里没有选项被使用。 !includedir指令以相似的方式处理,只是在目录里的所有选项文件被读取。 #### 4.2.3.4. Command-Line Options that Affect Option-File Handling 支持选项文件的大部分MySQL程序处理下面的的选项。它们影响选项文件的处理,所以它们必须在命令行中被给出,而不是在选项文件中。为了正确的工作,每一个下面的选项必须在其它选项前面被指定。有一些例外: * --print-defaults可以紧跟在--defaults-file和--defaults-extra-file后面。 * On Windows, 如果服务器用--defaults-file和--install选项启动。--install选项必须在前面,参考[Section 2.3.5.7, “Starting MySQL as a Windows Service”][02.03.05.07] 当指定文件名时,你应该避免使用shell元符号“~”因为它可能不能按你期望的被解释。 * --defaults-extra-file=file_name 在全局选项文件之后读取这个选项文件,但是(on Unix)在用户选项文件之前。如果这个选项文件不存在或是不可访问,程序报错并退出,file_name 被解释为相对于当前目录的路径,如果给出的是相对路径名而不是路径全名。 * --defaults-file=file_name [230] 使用指定的选项文件,如果文件不存在或无法访问,程序报错并退出。file_name 被解释为相对于当前目录的路径,如果给出的是相对路径名而不是路径全名。 * --defaults-group-suffix=str [230] 如果有这个选项,程序不仅读取通常的选项组,而且以选项名加str后缀的的组。例如mysql客户端通常读取[client] 和 [mysql] 组。如果指定 --defaults-group-suffix=_other [230] 选项。mysql 也读取[client_other] 和 [mysql_other] 组.。 * --login-path=name 从登录文件.mylogin.cnf 中的命名登录路径读取选项,“login path”是一个选项组,它只包含host, user, 和 password几个选项。可以想像为一组标示服务器主机和用于验证服务器主机的凭证的值。要创建登录文件,可以使用mysql_config_editor工具,See Section 4.6.6, “mysql_config_editor — MySQL Configuration Utility”,这个选项在MySQL 5.6.6版本中被加入。 * --no-defaults 不读取任何选项文件,如果一个程序因为正在从选项文件中读取未知选项而没有启动,--no-defaults可以用来防止程序读取未知选项。例外情况是.mylogin.cnf 文件,如果有这个文件,在任何情况都会读这个文件,这允许以一种比命令行更安全的方法指定密码选项,即使使用了--no-defaults选项。(.mylogin.cnf被mysql_config_editor工具创建,参考[Section 4.6.6,“mysql_config_editor — MySQL Configuration Utility”][04.06.06].) * --print-defaults [230] 打印程序的名字和所有从选项文件读取的选项 #### 4.2.3.5. Using Options to Set Program Variables 许多MySQL程序有内部变量,这些变量可以用SET表达式在运行设置。参考[Section 13.7.4, “SET Syntax”][13.07.04], and [Section 5.1.5, “Using System Variables”][05.01.05]. 大部的程序变量也可以在启动的时候以指定程序选项的语法被设置。例如,mysql有一个max_allowed_packet变量用来控制通信缓冲器的最大值。为了设置mysql的max_allowed_packet变量的最大值为16M,可以使用下面的命令。 ```shell shell> mysql --max_allowed_packet=16777216 shell> mysql --max_allowed_packet=16M ``` 第一个命令以字节的形式指定值。第二个命令以兆字节的形式指定值,对于带有数字值的变量,数字可以带有K,M或G后缀(大写或小写)表示乘数为1024,10242或10243(例如,当用来设置max_allowed_packet时后缀表示单位为千字节,兆字节,千兆字节)。 在选项文件中,设置变量时前面不使用破折号。 ```shell [mysql] max_allowed_packet=16777216 ``` 或者 ```shell [mysql] max_allowed_packet=16M ``` 如果你喜欢,变量名字中的下划线可以用破折号代替。下列选项组是等价的,都把服务器的密钥缓冲区大小设为512MB ```shell [mysqld] key_buffer_size=512M [mysqld] key-buffer-size=512M ``` 一个变量可以用它的完全形式指定,也可以用一个明确的前缀形式指定,例如mysql的max_allowed_packet变量可以用--max_a设置。但是不能用--max因为后者的形式是不明确的。 ```shell shell> mysql --max=1000000 mysql: ambiguous option '--max=1000000' (max_allowed_packet, max_join_size) ``` 注意变量前缀的用法可能引起问题,当一个程序的新变量被引入的时候。一个现在明确的前缀在以后不一定还是明确的。 乘数因子后缀可以用来在服务器启动进设置变量值,但是不能在运行时候用SET设置值。另一方面,用SET语法你可以用表达式设置一个变量值。这种方式不适用于服务器启动时设置变量值。例如,下面的第一行在服务器启动时是合法的,但是第二行不是。 ```shell shell> mysql --max_allowed_packet=16M shell> mysql --max_allowed_packet=16*1024*1024 ``` 相反的,下面的第二行在服务器运行时是合法的,但是第一行不是 ```shell mysql> SET GLOBAL max_allowed_packet=16M; mysql> SET GLOBAL max_allowed_packet=16*1024*1024; ``` #### 4.2.3.6. Option Defaults, Options Expecting Values, and the = Sign 按照惯例,长形式的选项设置值时使用一个等号(=),像下面的这样, ```shell shell> mysql --host=tonfisk --user=jon ``` 对于一人需要值(也就是没有默认值)的选项,等号不是必需的,因此下面的写法也是合法的, ```shell shell> mysql --host tonfisk --user jon ``` 在两种例子中,mysql客户端试图用用户名为“jon”的账号连接运行在主机名为“tonfisk”的服务器 由于这种行为,当一个选项需要值的时候没有值时偶尔会出现问题。考虑下面的例子,一个用户以用户名“jon”的账号连接主机名为“tonfisk”的服务器, ```shell shell> mysql --host 85.224.35.45 --user jon Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 3 Server version: 5.6.14 Source distribution Type 'help;' or '\h' for help. Type '\c' to clear the buffer. mysql> SELECT CURRENT_USER(); +----------------+ | CURRENT_USER() | +----------------+ | jon@% | +----------------+ 1 row in set (0.00 sec) ``` 删除其中一个选项的值后会产生一个错误,像下面显示的那样, ```shell shell> mysql --host 85.224.35.45 --user mysql: option '--user' requires an argument ``` 在这个例子中,mysql不能在--user [222] 选项后面找到一个值,因为在命令行上它后面没有任何内容。如果你删除 不是最后一个选项的值,你会得到一个不同的不是期望的错误信息。 ```shell shell> mysql --host --user jon ERROR 2005 (HY000): Unknown MySQL server host '--user' (1) ``` 因为mysql把命令行上--host后面的内容当成了主机名。--host--user被解释成了--host=--user,并且客户端试图连接运行在主机名为”--user“的服务器。 具有默认值的选项当给它分配值的时候总是需要一个等号。如果不这样做将会产生错误,例如,MySQL 服务器--log-error选项有默认值host_name.err,host_name是MySQL服务器正在运行所在的主机名。假设你的MySQL服务器正在运行在主机名为“tonfisk”的电脑上,考虑下面mysqld_safe的调用 ```shell shell> mysqld_safe & [1] 11699 shell> 080112 12:53:40 mysqld_safe Logging to '/usr/local/mysql/var/tonfisk.err'. 080112 12:53:40 mysqld_safe Starting mysqld daemon with databases from /usr/local/mysql/var shell> ``` 关闭服务之后,用下面的方式启动它, ```shell shell> mysqld_safe --log-error & [1] 11699 shell> 080112 12:53:40 mysqld_safe Logging to '/usr/local/mysql/var/tonfisk.err'. 080112 12:53:40 mysqld_safe Starting mysqld daemon with databases from /usr/local/mysql/var shell> ``` 结果是一样的,因为紧跟--log-error后面没有任何内容,并且它使用了它的默认值。(&符号告诉操作系统在后台运行MySQL,它被MySQL本身忽略),现在假如你想把错误信息记录到my-errors.err文件中,你可能用--log-error my-errors方式启动服务器。但是这并没有想要的目的。正如下面显示的那样。 ```shell shell> mysqld_safe --log-error my-errors & [1] 31357 shell> 080111 22:53:31 mysqld_safe Logging to '/usr/local/mysql/var/tonfisk.err'. 080111 22:53:32 mysqld_safe Starting mysqld daemon with databases from /usr/local/mysql/var 080111 22:53:34 mysqld_safe mysqld from pid file /usr/local/mysql/var/tonfisk.pid ended [1]+ Done ./mysqld_safe --log-error my-errors ``` 服务器启动时试图用/usr/local/mysql/var/tonfisk.err作为错误日志文件。但是然后关闭服务器。检查这个文件的最后几行将会找到原因。 ```shell shell> tail /usr/local/mysql/var/tonfisk.err 080111 22:53:32 InnoDB: Started; log sequence number 0 46409 /usr/local/mysql/libexec/mysqld: Too many arguments (first extra is 'my-errors'). Use --verbose --help to get a list of available options 080111 22:53:32 [ERROR] Aborting 080111 22:53:32 InnoDB: Starting shutdown... 080111 22:53:34 InnoDB: Shutdown completed; log sequence number 0 46409 080111 22:53:34 [Note] /usr/local/mysql/libexec/mysqld: Shutdown complete ``` 因为--log-error选项提供有默认值,想要分配一个不同的值你必须用一个等号。正如下面的的那样。 ```shell shell> mysqld_safe --log-error=my-errors & [1] 31437 shell> 080111 22:54:15 mysqld_safe Logging to '/usr/local/mysql/var/my-errors.err'. 080111 22:54:15 mysqld_safe Starting mysqld daemon with databases from /usr/local/mysql/var shell> ``` 现在服务器已成功启动,而且错误信息正记录到/usr/local/mysql/var/my-errors.err文件。 当在选项文件里指定选项里也可能出现类似的问题。例如,考虑包含如下内容的my.cnf文件。 ```shell [mysql] host user ``` 当mysql读取这个文件的时候,这两条选项被解析成--host --user或--host=--user,结果如下所示 ```shell shell> mysql ERROR 2005 (HY000): Unknown MySQL server host '--user' (1) ``` 然而,在选项文件里等号不是默认的,考虑下面的的my.cnf文件。 ```shell [mysql] user jon ``` 这个例子中启动mysql将引发一个不同的错误 , ```shell shell> mysql mysql: unknown option '--user jon' ``` 如果你在选项文件里写host tonfisk而不是host=tonfisk,将会产生一个类似的错误,相反地,你应该使用等号。 ```shell [mysql] user=jon ``` 现在可以成功登录, ```shell shell> mysql Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 5 Server version: 5.6.14 Source distribution Type 'help;' or '\h' for help. Type '\c' to clear the buffer. mysql> SELECT USER(); +---------------+ | USER() | +---------------+ | jon@localhost | +---------------+ 1 row in set (0.00 sec) ``` 在命令行上则不需要这样做,等号不是必需的。 ```shell shell> mysql --user jon --host tonfisk Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 6 Server version: 5.6.14 Source distribution Type 'help;' or '\h' for help. Type '\c' to clear the buffer. mysql> SELECT USER(); +---------------+ | USER() | +---------------+ | jon@tonfisk | +---------------+ 1 row in set (0.00 sec) ``` 在 MySQL 5.6中,在选项文件中指定选项需要赋一个值,没有值将会使服务出错并中断,假如my.cnf包含下面的内容。 ```shell [mysqld] log_error relay_log relay_log_index ``` 这导致服务器在启动时失败,正如下面所示。 ```shell shell> mysqld_safe & 090514 09:48:39 mysqld_safe Logging to '/home/jon/bin/mysql-5.5/var/tonfisk.err'. 090514 09:48:39 mysqld_safe Starting mysqld daemon with databases from /home/jon/bin/mysql-5.5/var 090514 09:48:39 mysqld_safe mysqld from pid file /home/jon/bin/mysql-5.5/var/tonfisk.pid ended ``` --log-error选项不需要一个参数值。然而,--relay-log选项需要。正如在错误日志里显示的那样(因为没有指定值,默认为datadir/hostname.err)。 ```shell shell> tail -n 3 ../var/tonfisk.err 090514 09:48:39 mysqld_safe Starting mysqld daemon with databases from /home/jon/bin/mysql-5.5/var 090514 9:48:39 [ERROR] /home/jon/bin/mysql-5.5/libexec/mysqld: option '--relay-log' requires an argument 090514 9:48:39 [ERROR] Aborting ``` 从前面的表现来看有一点差别,服务器本应该解释my.cnf文件的后两行为--relay-log=relay_log_index并且创建一个以“relay_log_index”为根目录的中断日志文件(relay log file)。(Bug #25192) [04.02.04]:./Chapter_04/04.02.04_Setting_Environment_Variables.md [05.06.03]:./Chapter_05/05.05.03_Running_Multiple_MySQL_Instances_on_Unix.md [04.06.06]:./Chapter_04/04.06.06_mysql_config_editor—MySQL_Configuration_Utility.md [17.03.00]:./Chapter_17/17.03.00_Configuration_of_MySQL_Cluster_NDB_7.3.md [02.03.03]:./Chapter_02/02.03.03_Installing_MySQL_on_Microsoft_Windows_Using_MySQL_Installer.md [04.02.03.01]:./Chapter_04/04.02.03.01_Using_Options_on_the_Command_Line.md [09.01.01]:./Chapter_09/09.01.01_String_Literals.md [02.03.05.07]:./Chapter_02/02.03.05.07_Starting_MySQL_as_a_Windows_Service.md [04.06.06]:./Chapter_04/04.06.06_mysql_config_editor—MySQL_Configuration_Utility.md [13.07.04]:./Chapter_13/13.07.04_SET_Syntax.md [05.01.05]:./Chapter_05/05.01.05_Using_System_Variables.md
{ "pile_set_name": "Github" }
{ "images" : [ { "idiom" : "universal", "scale" : "1x" }, { "idiom" : "universal", "filename" : "[email protected]", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } }
{ "pile_set_name": "Github" }
{ "translatorID": "3ddda662-ec86-448a-9979-9ee1e567c848", "label": "Japanese Diet Laws", "creator": "Frank Bennett", "target": "http://www.shugiin.go.jp/internet/itdb_housei.nsf/html/(houritsu|housei)/[0-9]+\\.htm", "minVersion": "3.0", "maxVersion": "", "priority": 100, "inRepository": true, "translatorType": 4, "browserSupport": "gcsv", "lastUpdated": "2018-06-17 14:39:36" } function detectWeb(doc, url) { return "statute"; } var kanjiNum = { "◯": 0, "一": 1, "二": 2, "三": 3, "四": 4, "五": 5, "六": 6, "七": 7, "八": 8, "九": 9 } var multipliers = [ 100, 10, 1 ] function convertNumerals(number) { return number.split("").map(function(chr){ if (kanjiNum[chr]) { return kanjiNum[chr]; } else { return chr; } }).join(""); } function fixNumber(number) { // Convert kanji numbers to arabic number = convertNumerals(number); // Break into parts nums = number.split(/[\u767e\u5341]/); // Give value to bare hundred and bare ten var defaultNum = "1"; for (var i=0,ilen=3; i<ilen; i++) { if (!nums[i]) { if (i === (ilen-1)) { nums[i] = "0"; } else { nums[i] = "1"; } } } // Pad out the array while (nums.length < 3) { nums = [0].concat(nums); } // Calculate the number var number = 0; for (var i=0,ilen=3; i<3; i++) { number = number + multipliers[i] * parseInt(nums[i], 10); } return number; } var imperialOffset = { "明": 1867, "大": 1911, "昭": 1925, "平": 1988 } function fixDate(date) { var m = date.match(/(明|大|昭|平)(.*)/); if (m) { var offset = imperialOffset[m[1]]; date = convertNumerals(m[2]); date = date.replace(/\u30fb/g, "-") date = date.split("-"); date[0] = parseInt(date[0], 10) + offset; for (var i=1,ilen=3; i<ilen; i++) { while (date[i].length < 2) { date[i] = "0" + date[i]; } } date = date.join("-"); } return date; } function doWeb(doc, url) { var item = new Zotero.Item("statute"); item.jurisdiction = "jp"; item.url = url; var details = ZU.xpath(doc, "//title")[0].textContent; var nameNode = ZU.xpath(doc, "//p[contains(text(),'\u25ce')]")[0]; item.nameOfAct = nameNode.textContent.replace(/^[\u3000\u25ce ]+/g, ""); var m = details.match(/法律第(.*)号\((.*)\)/); var number = ""; var date = ""; if (m) { item.publicLawNumber = fixNumber(m[1]); item.dateEnacted = fixDate(m[2]); } var layoutNode = doc.getElementById("mainlayout"); if (layoutNode) { var breadcrumbNode = doc.getElementById("breadcrumb"); if (breadcrumbNode) { breadcrumbNode.parentNode.removeChild(breadcrumbNode); } var anchorNodes = ZU.xpath(layoutNode, ".//a"); for (var anchorNode of anchorNodes) { if (anchorNode.children.length === 0) { anchorNode.parentNode.removeChild(anchorNode); } } item.notes.push({ note: layoutNode.innerHTML.replace(/[\s\S]*?\<p/, "<p") }); } item.complete(); }
{ "pile_set_name": "Github" }
var assert = require('assert'); var SandboxedModule = require('../..'); var locals = SandboxedModule.load('../fixture/local', { locals: { __filename: 'my filename' } }).exports; assert.strictEqual(locals.__filename, 'my filename');
{ "pile_set_name": "Github" }
{ "type": "bundle", "id": "bundle--4457a102-f307-4443-a31d-5062e30a6701", "spec_version": "2.0", "objects": [ { "type": "course-of-action", "id": "course-of-action--0257f904-bcb7-445e-9ef7-f9d294e49f67", "created_by_ref": "identity--e50ab59c-5c4f-4d40-bf6a-d58418d89bcd", "created": "2019-04-04T00:00:00.000Z", "modified": "2020-07-30T00:00:00.000Z", "name": "coa-509-0", "description": "Monitor system and domain logs for abnormal access.", "object_marking_refs": [ "marking-definition--17d82bb2-eeeb-4898-bda5-3ddbcd2b799d" ], "x_capec_version": "3.3" } ] }
{ "pile_set_name": "Github" }
<?php use my\space\AnotherClass; // Noncompliant {{Move the use declarations after the namespace declarations.}} //^^^ use my\space\MyClass; // Noncompliant {{Add a blank line after this "use" declaration.}} //^^^ namespace another\bar; // Noncompliant {{Add a blank line after this "namespace another\bar" declaration.}} //^^^^^^^^^ { } namespace {} // Noncompliant {{Add a blank line after this "namespace" declaration.}} { } use my\space\MyClass; /* Noncompliant */ // no blank line after use { } use my\space\MyClass; /* Noncompliant */ // no blank line after use // use { } namespace another\bar; /* Noncompliant */ // no blank line after namespace // namespace { } namespace // OK { use x; // OK } namespace foo\bar; // OK use my\space\MyClass; // OK { }
{ "pile_set_name": "Github" }
import os import cPickle from blocks.initialization import IsotropicGaussian, Constant import data from model.bidirectional_tgtcls import Model, Stream with open(os.path.join(data.path, 'arrival-clusters.pkl')) as f: tgtcls = cPickle.load(f) dim_embeddings = [ ('origin_call', data.origin_call_train_size, 10), ('origin_stand', data.stands_size, 10), ('week_of_year', 52, 10), ('day_of_week', 7, 10), ('qhour_of_day', 24 * 4, 10), ('taxi_id', data.taxi_id_size, 10), ] hidden_state_dim = 20 dim_hidden = [100] embed_weights_init = IsotropicGaussian(0.01) weights_init = IsotropicGaussian(0.1) biases_init = Constant(0.01) batch_size = 50 batch_sort_size = 40 max_splits = 200
{ "pile_set_name": "Github" }
#! FIELDS time parameter cav cavnum 0.000000 0 0.0000 0.0000 0.000000 1 0.0000 0.0000 0.000000 2 -0.0000 -0.0000 0.000000 3 0.0000 0.0000 0.000000 4 0.0000 0.0000 0.000000 5 -0.0000 -0.0000 0.000000 6 0.0000 0.0000 0.000000 7 0.0000 0.0000 0.000000 8 -0.0000 -0.0000 0.000000 9 -0.0000 -0.0000 0.000000 10 -0.0000 -0.0000 0.000000 11 0.0000 0.0000 0.000000 12 -0.0000 -0.0000 0.000000 13 -0.0000 -0.0000 0.000000 14 -0.0000 -0.0000 0.000000 15 0.0000 0.0000 0.000000 16 0.0000 0.0000 0.000000 17 0.0000 0.0000 0.000000 18 0.0000 0.0000 0.000000 19 0.0000 0.0000 0.000000 20 0.0000 0.0000 0.000000 21 0.0000 0.0000 0.000000 22 0.0000 0.0000 0.000000 23 0.0000 0.0000 0.050000 0 0.0001 0.0001 0.050000 1 -0.0003 -0.0003 0.050000 2 3.2901 3.2901 0.050000 3 -0.0000 -0.0000 0.050000 4 0.0000 0.0000 0.050000 5 0.0001 0.0001 0.050000 6 0.0000 0.0000 0.050000 7 -0.0001 -0.0001 0.050000 8 -0.0003 -0.0003 0.050000 9 0.0001 0.0001 0.050000 10 -0.0003 -0.0003 0.050000 11 0.2043 0.2043 0.050000 12 -0.0001 -0.0001 0.050000 13 0.0001 0.0001 0.050000 14 0.2299 0.2299 0.050000 15 0.0000 0.0000 0.050000 16 -0.0001 -0.0001 0.050000 17 -1.5628 -1.5628 0.050000 18 0.0000 0.0000 0.050000 19 0.0000 0.0000 0.050000 20 -1.9571 -1.9571 0.050000 21 -0.0001 -0.0001 0.050000 22 0.0003 0.0003 0.050000 23 0.0000 0.0000 0.100000 0 2.1069 2.1069 0.100000 1 -2.9029 -2.9029 0.100000 2 -0.6058 -0.6058 0.100000 3 0.0269 0.0269 0.100000 4 -0.0379 -0.0379 0.100000 5 0.2979 0.2979 0.100000 6 -0.0379 -0.0379 0.100000 7 0.0516 0.0516 0.100000 8 -0.4105 -0.4105 0.100000 9 0.2979 0.2979 0.100000 10 -0.4105 -0.4105 0.100000 11 -0.0857 -0.0857 0.100000 12 -0.7768 -0.7768 0.100000 13 1.0692 1.0692 0.100000 14 -0.2976 -0.2976 0.100000 15 0.7768 0.7768 0.100000 16 -1.0692 -1.0692 0.100000 17 -0.2229 -0.2229 0.100000 18 0.0000 0.0000 0.100000 19 0.0000 0.0000 0.100000 20 0.5206 0.5206 0.100000 21 -2.1069 -2.1069 0.100000 22 2.9029 2.9029 0.100000 23 0.6058 0.6058 0.150000 0 1.0917 1.0917 0.150000 1 1.5027 1.5027 0.150000 2 -0.0001 -0.0001 0.150000 3 0.7408 0.7408 0.150000 4 0.1573 0.1573 0.150000 5 -0.5882 -0.5882 0.150000 6 0.1573 0.1573 0.150000 7 -0.9705 -0.9705 0.150000 8 -0.8097 -0.8097 0.150000 9 -0.5882 -0.5882 0.150000 10 -0.8097 -0.8097 0.150000 11 -0.0000 -0.0000 0.150000 12 -0.3940 -0.3940 0.150000 13 -2.0096 -2.0096 0.150000 14 -1.0007 -1.0007 0.150000 15 -0.6977 -0.6977 0.150000 16 0.5069 0.5069 0.150000 17 1.0007 1.0007 0.150000 18 0.0000 0.0000 0.150000 19 -0.0000 0.0000 0.150000 20 -0.0000 -0.0000 0.150000 21 -0.0000 -0.0000 0.150000 22 0.0000 0.0000 0.150000 23 0.0001 0.0001 0.200000 0 0.0000 0.0000 0.200000 1 0.0000 0.0000 0.200000 2 0.0000 0.0000 0.200000 3 0.0000 0.0000 0.200000 4 0.0000 0.0000 0.200000 5 0.0000 0.0000 0.200000 6 0.0000 0.0000 0.200000 7 0.0000 0.0000 0.200000 8 0.0000 0.0000 0.200000 9 0.0000 0.0000 0.200000 10 0.0000 0.0000 0.200000 11 0.0000 0.0000 0.200000 12 0.0000 0.0000 0.200000 13 0.0000 0.0000 0.200000 14 0.0000 0.0000 0.200000 15 0.0000 0.0000 0.200000 16 0.0000 0.0000 0.200000 17 0.0000 0.0000 0.200000 18 0.0000 0.0000 0.200000 19 0.0000 0.0000 0.200000 20 0.0000 0.0000 0.200000 21 0.0000 0.0000 0.200000 22 0.0000 0.0000 0.200000 23 0.0000 0.0000 0.250000 0 0.0000 0.0000 0.250000 1 -0.0000 -0.0000 0.250000 2 0.0000 0.0000 0.250000 3 -0.0000 -0.0000 0.250000 4 0.0000 0.0000 0.250000 5 0.0000 0.0000 0.250000 6 0.0000 0.0000 0.250000 7 -0.0000 -0.0000 0.250000 8 -0.0000 -0.0000 0.250000 9 0.0000 0.0000 0.250000 10 -0.0000 -0.0000 0.250000 11 0.0000 0.0000 0.250000 12 0.0000 0.0000 0.250000 13 -0.0000 0.0000 0.250000 14 0.0000 0.0000 0.250000 15 -0.0000 -0.0000 0.250000 16 0.0000 0.0000 0.250000 17 -0.0000 -0.0000 0.250000 18 0.0000 0.0000 0.250000 19 0.0000 0.0000 0.250000 20 -0.0000 -0.0000 0.250000 21 -0.0000 -0.0000 0.250000 22 0.0000 0.0000 0.250000 23 0.0000 0.0000 0.300000 0 -0.6016 -0.6016 0.300000 1 1.8508 1.8508 0.300000 2 0.0017 0.0017 0.300000 3 -0.4267 -0.4267 0.300000 4 0.5494 0.5494 0.300000 5 0.2015 0.2015 0.300000 6 0.5494 0.5494 0.300000 7 0.6598 0.6598 0.300000 8 -0.6200 -0.6200 0.300000 9 0.2015 0.2015 0.300000 10 -0.6200 -0.6200 0.300000 11 -0.0006 -0.0006 0.300000 12 1.3280 1.3280 0.300000 13 -1.6148 -1.6148 0.300000 14 -0.6532 -0.6532 0.300000 15 -0.7264 -0.7264 0.300000 16 -0.2360 -0.2360 0.300000 17 0.6521 0.6521 0.300000 18 0.0000 0.0000 0.300000 19 0.0000 0.0000 0.300000 20 -0.0006 -0.0006 0.300000 21 -0.0000 -0.0000 0.300000 22 -0.0000 -0.0000 0.300000 23 0.0000 0.0000 0.350000 0 0.0004 0.0004 0.350000 1 0.0001 0.0001 0.350000 2 -0.0047 -0.0047 0.350000 3 -0.0000 -0.0000 0.350000 4 -0.0000 -0.0000 0.350000 5 -0.0001 -0.0001 0.350000 6 -0.0000 -0.0000 0.350000 7 -0.0000 -0.0000 0.350000 8 -0.0000 -0.0000 0.350000 9 -0.0001 -0.0001 0.350000 10 -0.0000 -0.0000 0.350000 11 0.0017 0.0017 0.350000 12 -0.0000 -0.0000 0.350000 13 -0.0000 -0.0000 0.350000 14 0.0014 0.0014 0.350000 15 0.0000 0.0000 0.350000 16 0.0000 0.0000 0.350000 17 -0.0008 -0.0008 0.350000 18 0.0000 0.0000 0.350000 19 0.0000 0.0000 0.350000 20 -0.0006 -0.0006 0.350000 21 -0.0004 -0.0004 0.350000 22 -0.0001 -0.0001 0.350000 23 0.0047 0.0047 0.400000 0 0.0000 0.0000 0.400000 1 -0.0000 -0.0000 0.400000 2 0.0000 0.0000 0.400000 3 0.0000 0.0000 0.400000 4 -0.0000 -0.0000 0.400000 5 0.0000 0.0000 0.400000 6 -0.0000 -0.0000 0.400000 7 0.0000 0.0000 0.400000 8 -0.0000 -0.0000 0.400000 9 0.0000 0.0000 0.400000 10 -0.0000 -0.0000 0.400000 11 0.0000 0.0000 0.400000 12 0.0000 0.0000 0.400000 13 0.0000 0.0000 0.400000 14 -0.0000 -0.0000 0.400000 15 -0.0000 -0.0000 0.400000 16 -0.0000 -0.0000 0.400000 17 -0.0000 -0.0000 0.400000 18 0.0000 0.0000 0.400000 19 0.0000 0.0000 0.400000 20 -0.0000 -0.0000 0.400000 21 -0.0000 -0.0000 0.400000 22 0.0000 0.0000 0.400000 23 0.0000 0.0000 0.450000 0 1.9659 1.9659 0.450000 1 -0.6388 -0.6388 0.450000 2 -2.0706 -2.0706 0.450000 3 -0.7693 -0.7693 0.450000 4 -0.7479 -0.7479 0.450000 5 0.1919 0.1919 0.450000 6 -0.7479 -0.7479 0.450000 7 0.5672 0.5672 0.450000 8 -0.0624 -0.0624 0.450000 9 0.1919 0.1919 0.450000 10 -0.0624 -0.0624 0.450000 11 -0.2021 -0.2021 0.450000 12 0.3083 0.3083 0.450000 13 0.9490 0.9490 0.450000 14 1.0002 1.0001 0.450000 15 -0.3083 -0.3083 0.450000 16 -0.9490 -0.9490 0.450000 17 -0.0006 -0.0006 0.450000 18 0.0000 0.0000 0.450000 19 0.0000 0.0000 0.450000 20 -0.9995 -0.9995 0.450000 21 -1.9659 -1.9659 0.450000 22 0.6388 0.6388 0.450000 23 2.0706 2.0706 0.500000 0 0.0056 0.0056 0.500000 1 0.0016 0.0016 0.500000 2 0.0010 0.0010 0.500000 3 0.0018 0.0018 0.500000 4 0.0001 0.0001 0.500000 5 0.0055 0.0055 0.500000 6 0.0001 0.0001 0.500000 7 0.0000 0.0000 0.500000 8 0.0015 0.0015 0.500000 9 0.0055 0.0055 0.500000 10 0.0015 0.0015 0.500000 11 -0.0000 -0.0000 0.500000 12 0.0000 0.0000 0.500000 13 0.0004 0.0004 0.500000 14 -0.0056 -0.0056 0.500000 15 0.0000 0.0000 0.500000 16 -0.0004 -0.0004 0.500000 17 0.0041 0.0041 0.500000 18 0.0000 0.0000 0.500000 19 0.0000 0.0000 0.500000 20 0.0004 0.0004 0.500000 21 -0.0056 -0.0056 0.500000 22 -0.0016 -0.0016 0.500000 23 0.0000 0.0000 0.550000 0 -0.0624 -0.0624 0.550000 1 0.1557 0.1557 0.550000 2 0.0075 0.0075 0.550000 3 0.0108 0.0108 0.550000 4 -0.0238 -0.0238 0.550000 5 -0.0419 -0.0419 0.550000 6 -0.0238 -0.0238 0.550000 7 0.0288 0.0288 0.550000 8 0.1348 0.1348 0.550000 9 -0.0419 -0.0419 0.550000 10 0.1348 0.1348 0.550000 11 -0.0012 -0.0012 0.550000 12 0.0513 0.0513 0.550000 13 -0.1215 -0.1215 0.550000 14 -0.1401 -0.1401 0.550000 15 -0.0406 -0.0406 0.550000 16 0.1250 0.1250 0.550000 17 0.0008 0.0008 0.550000 18 0.0000 0.0000 0.550000 19 -0.0000 0.0000 0.550000 20 0.1318 0.1318 0.550000 21 0.0517 0.0517 0.550000 22 -0.1591 -0.1591 0.550000 23 0.0000 0.0000 0.600000 0 -0.0000 -0.0000 0.600000 1 0.0000 0.0000 0.600000 2 -0.0000 -0.0000 0.600000 3 -0.0000 -0.0000 0.600000 4 -0.0000 -0.0000 0.600000 5 0.0000 0.0000 0.600000 6 -0.0000 -0.0000 0.600000 7 0.0000 0.0000 0.600000 8 -0.0000 -0.0000 0.600000 9 0.0000 0.0000 0.600000 10 -0.0000 -0.0000 0.600000 11 0.0000 0.0000 0.600000 12 0.0000 0.0000 0.600000 13 -0.0000 -0.0000 0.600000 14 0.0000 0.0000 0.600000 15 -0.0000 -0.0000 0.600000 16 0.0000 0.0000 0.600000 17 -0.0000 -0.0000 0.600000 18 0.0000 0.0000 0.600000 19 0.0000 0.0000 0.600000 20 -0.0000 -0.0000 0.600000 21 0.0000 0.0000 0.600000 22 -0.0000 -0.0000 0.600000 23 0.0000 0.0000 0.650000 0 -0.0000 -0.0000 0.650000 1 -0.0000 -0.0000 0.650000 2 -0.0000 -0.0000 0.650000 3 0.0000 0.0000 0.650000 4 0.0000 0.0000 0.650000 5 0.0000 0.0000 0.650000 6 0.0000 0.0000 0.650000 7 -0.0000 -0.0000 0.650000 8 0.0000 0.0000 0.650000 9 0.0000 0.0000 0.650000 10 0.0000 0.0000 0.650000 11 0.0000 0.0000 0.650000 12 0.0000 0.0000 0.650000 13 0.0000 0.0000 0.650000 14 0.0000 0.0000 0.650000 15 0.0000 0.0000 0.650000 16 -0.0000 -0.0000 0.650000 17 -0.0000 -0.0000 0.650000 18 0.0000 0.0000 0.650000 19 0.0000 0.0000 0.650000 20 -0.0000 -0.0000 0.650000 21 0.0000 0.0000 0.650000 22 -0.0000 0.0000 0.650000 23 0.0000 0.0000 0.700000 0 -0.7671 -0.7671 0.700000 1 2.7451 2.7451 0.700000 2 -0.1514 -0.1514 0.700000 3 0.0033 0.0033 0.700000 4 0.0866 0.0866 0.700000 5 -0.1511 -0.1511 0.700000 6 0.0866 0.0866 0.700000 7 -0.0070 -0.0070 0.700000 8 0.5407 0.5407 0.700000 9 -0.1511 -0.1511 0.700000 10 0.5407 0.5407 0.700000 11 -0.0298 -0.0298 0.700000 12 -0.0978 -0.0978 0.700000 13 0.0318 0.0318 0.700000 14 -0.7765 -0.7765 0.700000 15 0.0978 0.0978 0.700000 16 -0.0318 -0.0318 0.700000 17 0.4704 0.4704 0.700000 18 -0.0000 0.0000 0.700000 19 0.0000 0.0000 0.700000 20 0.3061 0.3061 0.700000 21 0.7671 0.7671 0.700000 22 -2.7451 -2.7451 0.700000 23 0.1514 0.1514 0.750000 0 0.0000 0.0000 0.750000 1 0.0000 0.0000 0.750000 2 -0.0000 -0.0000 0.750000 3 0.0000 0.0000 0.750000 4 0.0000 0.0000 0.750000 5 -0.0000 -0.0000 0.750000 6 0.0000 0.0000 0.750000 7 -0.0000 -0.0000 0.750000 8 -0.0000 -0.0000 0.750000 9 -0.0000 -0.0000 0.750000 10 -0.0000 -0.0000 0.750000 11 0.0000 0.0000 0.750000 12 -0.0000 -0.0000 0.750000 13 0.0000 0.0000 0.750000 14 -0.0000 -0.0000 0.750000 15 -0.0000 -0.0000 0.750000 16 0.0000 0.0000 0.750000 17 -0.0000 -0.0000 0.750000 18 0.0000 0.0000 0.750000 19 0.0000 0.0000 0.750000 20 0.0000 0.0000 0.750000 21 0.0000 0.0000 0.750000 22 -0.0000 -0.0000 0.750000 23 0.0000 0.0000 0.800000 0 1.3798 1.3798 0.800000 1 0.4159 0.4159 0.800000 2 -1.5851 -1.5851 0.800000 3 0.0251 0.0251 0.800000 4 0.0100 0.0100 0.800000 5 -1.4253 -1.4253 0.800000 6 0.0100 0.0100 0.800000 7 -0.0045 -0.0045 0.800000 8 -0.4296 -0.4296 0.800000 9 -1.4253 -1.4253 0.800000 10 -0.4296 -0.4296 0.800000 11 0.0523 0.0523 0.800000 12 -1.0430 -1.0431 0.800000 13 -0.3065 -0.3065 0.800000 14 1.2921 1.2921 0.800000 15 -0.3368 -0.3368 0.800000 16 -0.1094 -0.1094 0.800000 17 -1.1644 -1.1644 0.800000 18 0.0000 0.0000 0.800000 19 0.0000 0.0000 0.800000 20 -0.1277 -0.1277 0.800000 21 0.0000 0.0000 0.800000 22 -0.0000 0.0000 0.800000 23 1.5851 1.5851 0.850000 0 0.0017 0.0017 0.850000 1 -0.0024 -0.0024 0.850000 2 3.5236 3.5236 0.850000 3 -0.0019 -0.0019 0.850000 4 0.0010 0.0010 0.850000 5 -0.0001 -0.0001 0.850000 6 0.0010 0.0010 0.850000 7 0.0007 0.0007 0.850000 8 0.0001 0.0001 0.850000 9 -0.0001 -0.0001 0.850000 10 0.0001 0.0001 0.850000 11 -0.1755 -0.1755 0.850000 12 -0.0030 -0.0030 0.850000 13 0.0015 0.0015 0.850000 14 -0.3968 -0.3968 0.850000 15 0.0012 0.0012 0.850000 16 0.0009 0.0009 0.850000 17 -1.3021 -1.3021 0.850000 18 -0.0000 0.0000 0.850000 19 -0.0000 0.0000 0.850000 20 -1.8247 -1.8247 0.850000 21 0.0000 0.0000 0.850000 22 0.0000 0.0000 0.850000 23 0.0000 0.0000 0.900000 0 0.8454 0.8454 0.900000 1 0.9922 0.9922 0.900000 2 0.0000 0.0000 0.900000 3 -0.0992 -0.0992 0.900000 4 -0.0842 -0.0842 0.900000 5 -0.3999 -0.3999 0.900000 6 -0.0842 -0.0842 0.900000 7 -0.1222 -0.1222 0.900000 8 -0.4694 -0.4694 0.900000 9 -0.3999 -0.3999 0.900000 10 -0.4694 -0.4694 0.900000 11 -0.0000 -0.0000 0.900000 12 -0.6491 -0.6491 0.900000 13 -0.7219 -0.7219 0.900000 14 -0.6625 -0.6625 0.900000 15 -0.1964 -0.1964 0.900000 16 -0.2703 -0.2703 0.900000 17 0.0476 0.0476 0.900000 18 0.0000 0.0000 0.900000 19 0.0000 0.0000 0.900000 20 0.6148 0.6148 0.900000 21 0.0000 0.0000 0.900000 22 -0.0000 0.0000 0.900000 23 0.0000 0.0000 0.950000 0 -0.0000 -0.0000 0.950000 1 -0.0000 -0.0000 0.950000 2 0.0000 0.0000 0.950000 3 0.0000 0.0000 0.950000 4 0.0000 0.0000 0.950000 5 -0.0000 -0.0000 0.950000 6 0.0000 0.0000 0.950000 7 0.0000 0.0000 0.950000 8 -0.0000 -0.0000 0.950000 9 -0.0000 -0.0000 0.950000 10 -0.0000 -0.0000 0.950000 11 0.0000 0.0000 0.950000 12 0.0000 0.0000 0.950000 13 0.0000 0.0000 0.950000 14 -0.0000 -0.0000 0.950000 15 -0.0000 -0.0000 0.950000 16 -0.0000 -0.0000 0.950000 17 -0.0000 -0.0000 0.950000 18 0.0000 0.0000 0.950000 19 0.0000 0.0000 0.950000 20 -0.0000 -0.0000 0.950000 21 0.0000 0.0000 0.950000 22 0.0000 0.0000 0.950000 23 0.0000 0.0000 1.000000 0 -0.0000 -0.0000 1.000000 1 -0.0334 -0.0334 1.000000 2 -0.0000 -0.0000 1.000000 3 -0.0000 -0.0000 1.000000 4 0.0000 0.0000 1.000000 5 -0.0000 -0.0000 1.000000 6 0.0000 0.0000 1.000000 7 0.0103 0.0103 1.000000 8 -0.0164 -0.0164 1.000000 9 -0.0000 -0.0000 1.000000 10 -0.0164 -0.0164 1.000000 11 -0.0000 -0.0000 1.000000 12 -0.0000 -0.0000 1.000000 13 0.0156 0.0156 1.000000 14 -0.0164 -0.0164 1.000000 15 0.0000 0.0000 1.000000 16 -0.0156 -0.0156 1.000000 17 0.0000 0.0000 1.000000 18 0.0000 0.0000 1.000000 19 0.0000 0.0000 1.000000 20 0.0164 0.0164 1.000000 21 0.0000 0.0000 1.000000 22 0.0334 0.0334 1.000000 23 0.0000 0.0000 1.050000 0 0.0000 0.0000 1.050000 1 0.0000 -0.0000 1.050000 2 -0.0000 -0.0000 1.050000 3 -0.0000 -0.0000 1.050000 4 -0.0000 -0.0000 1.050000 5 -0.0000 -0.0000 1.050000 6 -0.0000 -0.0000 1.050000 7 0.0000 -0.0000 1.050000 8 -0.0000 0.0000 1.050000 9 -0.0000 -0.0000 1.050000 10 -0.0000 0.0000 1.050000 11 0.0000 0.0000 1.050000 12 -0.0000 -0.0000 1.050000 13 -0.0000 0.0000 1.050000 14 0.0000 0.0000 1.050000 15 -0.0000 0.0000 1.050000 16 0.0000 -0.0000 1.050000 17 -0.0000 -0.0000 1.050000 18 0.0000 0.0000 1.050000 19 0.0000 0.0000 1.050000 20 -0.0000 -0.0000 1.050000 21 0.0000 0.0000 1.050000 22 -0.0000 0.0000 1.050000 23 0.0000 0.0000 1.100000 0 -0.0000 -0.0000 1.100000 1 0.0000 0.0000 1.100000 2 0.0000 0.0000 1.100000 3 0.0000 0.0000 1.100000 4 -0.0000 -0.0000 1.100000 5 -0.0000 -0.0000 1.100000 6 -0.0000 -0.0000 1.100000 7 -0.0000 -0.0000 1.100000 8 0.0000 0.0000 1.100000 9 -0.0000 -0.0000 1.100000 10 0.0000 0.0000 1.100000 11 0.0000 0.0000 1.100000 12 -0.0000 -0.0000 1.100000 13 -0.0000 -0.0000 1.100000 14 -0.0000 -0.0000 1.100000 15 0.0000 -0.0000 1.100000 16 0.0000 0.0000 1.100000 17 0.0000 0.0000 1.100000 18 0.0000 0.0000 1.100000 19 0.0000 0.0000 1.100000 20 -0.0000 -0.0000 1.100000 21 0.0000 0.0000 1.100000 22 0.0000 0.0000 1.100000 23 0.0000 0.0000 1.150000 0 0.1091 0.1091 1.150000 1 -0.0087 -0.0087 1.150000 2 0.0036 0.0036 1.150000 3 -0.0293 -0.0293 1.150000 4 -0.0085 -0.0085 1.150000 5 -0.0764 -0.0764 1.150000 6 -0.0085 -0.0085 1.150000 7 -0.0010 -0.0010 1.150000 8 0.0016 0.0016 1.150000 9 -0.0764 -0.0764 1.150000 10 0.0016 0.0016 1.150000 11 -0.0030 -0.0030 1.150000 12 -0.1091 -0.1091 1.150000 13 -0.0055 -0.0055 1.150000 14 -0.0766 -0.0766 1.150000 15 -0.0000 -0.0000 1.150000 16 0.0096 0.0096 1.150000 17 0.0737 0.0737 1.150000 18 0.0000 0.0000 1.150000 19 0.0034 0.0034 1.150000 20 -0.0025 -0.0025 1.150000 21 0.0000 0.0000 1.150000 22 0.0013 0.0013 1.150000 23 0.0018 0.0018 1.200000 0 -0.0000 -0.0000 1.200000 1 -0.0000 -0.0000 1.200000 2 -0.0000 -0.0000 1.200000 3 -0.0000 -0.0000 1.200000 4 0.0000 0.0000 1.200000 5 0.0000 0.0000 1.200000 6 0.0000 0.0000 1.200000 7 0.0000 0.0000 1.200000 8 0.0000 0.0000 1.200000 9 0.0000 0.0000 1.200000 10 0.0000 0.0000 1.200000 11 0.0000 0.0000 1.200000 12 -0.0000 -0.0000 1.200000 13 0.0000 0.0000 1.200000 14 0.0000 0.0000 1.200000 15 -0.0000 0.0000 1.200000 16 -0.0000 -0.0000 1.200000 17 -0.0000 -0.0000 1.200000 18 0.0000 0.0000 1.200000 19 -0.0000 -0.0000 1.200000 20 0.0000 0.0000 1.200000 21 0.0000 0.0000 1.200000 22 0.0000 0.0000 1.200000 23 0.0000 0.0000 1.250000 0 0.0000 0.0000 1.250000 1 -0.0000 -0.0000 1.250000 2 -0.0000 -0.0000 1.250000 3 0.0000 0.0000 1.250000 4 0.0000 0.0000 1.250000 5 -0.0000 -0.0000 1.250000 6 0.0000 0.0000 1.250000 7 -0.0000 -0.0000 1.250000 8 0.0000 0.0000 1.250000 9 -0.0000 -0.0000 1.250000 10 0.0000 0.0000 1.250000 11 0.0000 0.0000 1.250000 12 -0.0000 -0.0000 1.250000 13 0.0000 0.0000 1.250000 14 -0.0000 -0.0000 1.250000 15 0.0000 0.0000 1.250000 16 -0.0000 -0.0000 1.250000 17 0.0000 0.0000 1.250000 18 0.0000 0.0000 1.250000 19 -0.0000 -0.0000 1.250000 20 0.0000 0.0000 1.250000 21 0.0000 0.0000 1.250000 22 -0.0000 -0.0000 1.250000 23 0.0000 0.0000 1.300000 0 -0.1849 -0.1849 1.300000 1 0.1114 0.1114 1.300000 2 -0.0639 -0.0639 1.300000 3 0.0288 0.0288 1.300000 4 -0.0054 -0.0054 1.300000 5 0.0048 0.0048 1.300000 6 -0.0054 -0.0054 1.300000 7 0.0029 0.0029 1.300000 8 -0.0020 -0.0020 1.300000 9 0.0048 0.0048 1.300000 10 -0.0020 -0.0020 1.300000 11 0.0020 0.0020 1.300000 12 0.0000 0.0000 1.300000 13 0.0130 0.0130 1.300000 14 -0.0048 -0.0048 1.300000 15 -0.0000 0.0000 1.300000 16 -0.0120 -0.0120 1.300000 17 0.0052 0.0052 1.300000 18 0.0000 0.0000 1.300000 19 -0.0010 -0.0010 1.300000 20 -0.0003 -0.0003 1.300000 21 0.1849 0.1849 1.300000 22 -0.1114 -0.1114 1.300000 23 0.0639 0.0639 1.350000 0 -0.0000 -0.0000 1.350000 1 -0.0009 -0.0009 1.350000 2 0.0003 0.0003 1.350000 3 -0.0000 -0.0000 1.350000 4 0.0001 0.0001 1.350000 5 -0.0000 -0.0000 1.350000 6 0.0001 0.0001 1.350000 7 0.0003 0.0003 1.350000 8 -0.0001 -0.0001 1.350000 9 -0.0000 -0.0000 1.350000 10 -0.0001 -0.0001 1.350000 11 0.0000 0.0000 1.350000 12 0.0000 0.0000 1.350000 13 0.0002 0.0002 1.350000 14 -0.0000 -0.0000 1.350000 15 -0.0000 -0.0000 1.350000 16 0.0007 0.0007 1.350000 17 -0.0002 -0.0002 1.350000 18 0.0000 0.0000 1.350000 19 -0.0000 -0.0000 1.350000 20 -0.0000 -0.0000 1.350000 21 0.0000 0.0000 1.350000 22 -0.0000 0.0000 1.350000 23 -0.0000 0.0000 1.400000 0 -0.0004 -0.0004 1.400000 1 0.0021 0.0021 1.400000 2 -0.0002 -0.0002 1.400000 3 -0.0000 -0.0000 1.400000 4 -0.0001 -0.0001 1.400000 5 0.0000 0.0000 1.400000 6 -0.0001 -0.0001 1.400000 7 0.0007 0.0007 1.400000 8 -0.0001 -0.0001 1.400000 9 0.0000 0.0000 1.400000 10 -0.0001 -0.0001 1.400000 11 0.0000 0.0000 1.400000 12 -0.0000 0.0000 1.400000 13 -0.0001 -0.0001 1.400000 14 0.0001 0.0001 1.400000 15 0.0000 0.0000 1.400000 16 0.0002 0.0002 1.400000 17 -0.0000 -0.0000 1.400000 18 0.0000 0.0000 1.400000 19 -0.0000 -0.0000 1.400000 20 -0.0000 -0.0000 1.400000 21 0.0004 0.0004 1.400000 22 -0.0021 -0.0021 1.400000 23 0.0002 0.0002 1.450000 0 -0.0002 -0.0002 1.450000 1 0.0001 0.0001 1.450000 2 0.0002 0.0002 1.450000 3 0.0000 0.0000 1.450000 4 0.0000 0.0000 1.450000 5 -0.0001 -0.0001 1.450000 6 0.0000 0.0000 1.450000 7 0.0000 0.0000 1.450000 8 0.0000 0.0000 1.450000 9 -0.0001 -0.0001 1.450000 10 0.0000 0.0000 1.450000 11 0.0000 0.0000 1.450000 12 -0.0000 0.0000 1.450000 13 0.0000 0.0000 1.450000 14 -0.0000 -0.0000 1.450000 15 0.0000 0.0000 1.450000 16 -0.0000 -0.0000 1.450000 17 0.0000 0.0000 1.450000 18 0.0000 0.0000 1.450000 19 0.0000 0.0000 1.450000 20 0.0000 0.0000 1.450000 21 0.0002 0.0002 1.450000 22 -0.0001 -0.0001 1.450000 23 -0.0002 -0.0002 1.500000 0 0.1661 0.1661 1.500000 1 -3.2091 -3.2091 1.500000 2 -0.7624 -0.7624 1.500000 3 -0.0336 -0.0336 1.500000 4 -0.0056 -0.0056 1.500000 5 0.0187 0.0187 1.500000 6 -0.0056 -0.0056 1.500000 7 0.1079 0.1079 1.500000 8 -0.3615 -0.3615 1.500000 9 0.0187 0.0187 1.500000 10 -0.3615 -0.3615 1.500000 11 -0.0859 -0.0859 1.500000 12 -0.1661 -0.1661 1.500000 13 2.5552 2.5552 1.500000 14 1.0142 1.0142 1.500000 15 0.0000 0.0000 1.500000 16 0.6539 0.6539 1.500000 17 0.1353 0.1353 1.500000 18 0.0000 0.0000 1.500000 19 0.0000 0.0000 1.500000 20 -0.3871 -0.3871 1.500000 21 0.0000 0.0000 1.500000 22 -0.0000 0.0000 1.500000 23 -0.0000 0.0000 1.550000 0 -0.6307 -0.6307 1.550000 1 -0.0005 -0.0005 1.550000 2 0.0017 0.0017 1.550000 3 0.1211 0.1211 1.550000 4 0.1917 0.1917 1.550000 5 0.2526 0.2526 1.550000 6 0.1917 0.1917 1.550000 7 -0.0001 -0.0001 1.550000 8 0.0001 0.0001 1.550000 9 0.2526 0.2526 1.550000 10 0.0001 0.0001 1.550000 11 -0.0005 -0.0005 1.550000 12 0.0000 0.0000 1.550000 13 0.1919 0.1919 1.550000 14 0.2522 0.2522 1.550000 15 -0.0000 0.0000 1.550000 16 -0.1916 -0.1916 1.550000 17 -0.2529 -0.2529 1.550000 18 0.0000 0.0000 1.550000 19 -0.0002 -0.0002 1.550000 20 0.0007 0.0007 1.550000 21 0.6307 0.6307 1.550000 22 0.0005 0.0005 1.550000 23 -0.0017 -0.0017 1.600000 0 0.0023 0.0023 1.600000 1 -0.0044 -0.0044 1.600000 2 0.0128 0.0128 1.600000 3 -0.0001 -0.0001 1.600000 4 0.0002 0.0002 1.600000 5 0.0038 0.0038 1.600000 6 0.0002 0.0002 1.600000 7 0.0003 0.0003 1.600000 8 -0.0014 -0.0014 1.600000 9 0.0038 0.0038 1.600000 10 -0.0014 -0.0014 1.600000 11 0.0037 0.0037 1.600000 12 -0.0023 -0.0023 1.600000 13 0.0049 0.0049 1.600000 14 -0.0092 -0.0092 1.600000 15 0.0000 0.0000 1.600000 16 -0.0044 -0.0044 1.600000 17 0.0086 0.0086 1.600000 18 0.0000 0.0000 1.600000 19 -0.0005 -0.0005 1.600000 20 0.0006 0.0006 1.600000 21 0.0000 0.0000 1.600000 22 0.0044 0.0044 1.600000 23 -0.0128 -0.0128 1.650000 0 0.0003 0.0003 1.650000 1 0.0010 0.0010 1.650000 2 0.0014 0.0014 1.650000 3 -0.0001 -0.0001 1.650000 4 -0.0000 -0.0000 1.650000 5 0.0002 0.0002 1.650000 6 -0.0000 -0.0000 1.650000 7 0.0005 0.0005 1.650000 8 -0.0001 -0.0001 1.650000 9 0.0002 0.0002 1.650000 10 -0.0001 -0.0001 1.650000 11 -0.0012 -0.0012 1.650000 12 -0.0003 -0.0003 1.650000 13 -0.0012 -0.0012 1.650000 14 -0.0001 -0.0001 1.650000 15 -0.0000 -0.0000 1.650000 16 0.0006 0.0006 1.650000 17 0.0006 0.0006 1.650000 18 0.0000 0.0000 1.650000 19 0.0006 0.0006 1.650000 20 -0.0005 -0.0005 1.650000 21 0.0000 0.0000 1.650000 22 -0.0010 -0.0010 1.650000 23 -0.0014 -0.0014 1.700000 0 0.0089 0.0089 1.700000 1 0.0005 0.0005 1.700000 2 -0.0093 -0.0093 1.700000 3 0.0017 0.0017 1.700000 4 -0.0004 -0.0004 1.700000 5 -0.0016 -0.0016 1.700000 6 -0.0004 -0.0004 1.700000 7 0.0001 0.0001 1.700000 8 0.0004 0.0004 1.700000 9 -0.0016 -0.0016 1.700000 10 0.0004 0.0004 1.700000 11 0.0015 0.0015 1.700000 12 -0.0089 -0.0089 1.700000 13 -0.0015 -0.0015 1.700000 14 0.0097 0.0097 1.700000 15 0.0000 0.0000 1.700000 16 0.0005 0.0005 1.700000 17 -0.0002 -0.0002 1.700000 18 0.0000 0.0000 1.700000 19 0.0005 0.0005 1.700000 20 -0.0002 -0.0002 1.700000 21 0.0000 0.0000 1.700000 22 0.0000 0.0000 1.700000 23 -0.0000 0.0000 1.750000 0 0.0000 0.0000 1.750000 1 -0.0000 -0.0000 1.750000 2 0.0000 0.0000 1.750000 3 0.0000 0.0000 1.750000 4 -0.0000 -0.0000 1.750000 5 0.0001 0.0001 1.750000 6 -0.0000 -0.0000 1.750000 7 -0.0000 -0.0000 1.750000 8 0.0000 0.0000 1.750000 9 0.0001 0.0001 1.750000 10 0.0000 0.0000 1.750000 11 0.0000 0.0000 1.750000 12 -0.0000 -0.0000 1.750000 13 -0.0000 -0.0000 1.750000 14 0.0000 0.0000 1.750000 15 0.0000 0.0000 1.750000 16 0.0000 0.0000 1.750000 17 -0.0000 -0.0000 1.750000 18 0.0000 0.0000 1.750000 19 0.0000 0.0000 1.750000 20 0.0000 0.0000 1.750000 21 0.0000 0.0000 1.750000 22 0.0000 0.0000 1.750000 23 -0.0000 -0.0000 1.800000 0 -0.0009 -0.0009 1.800000 1 -0.0000 -0.0000 1.800000 2 -0.0000 -0.0000 1.800000 3 0.0004 0.0004 1.800000 4 -0.0005 -0.0005 1.800000 5 0.0003 0.0003 1.800000 6 -0.0005 -0.0005 1.800000 7 -0.0000 -0.0000 1.800000 8 -0.0000 -0.0000 1.800000 9 0.0003 0.0003 1.800000 10 -0.0000 -0.0000 1.800000 11 -0.0000 -0.0000 1.800000 12 0.0000 0.0000 1.800000 13 -0.0005 -0.0005 1.800000 14 0.0003 0.0003 1.800000 15 -0.0000 0.0000 1.800000 16 0.0005 0.0005 1.800000 17 -0.0003 -0.0003 1.800000 18 0.0000 0.0000 1.800000 19 -0.0000 -0.0000 1.800000 20 -0.0000 -0.0000 1.800000 21 0.0009 0.0009 1.800000 22 0.0000 0.0000 1.800000 23 0.0000 0.0000 1.850000 0 -0.0186 -0.0186 1.850000 1 -0.0068 -0.0068 1.850000 2 -0.0020 -0.0020 1.850000 3 0.0052 0.0052 1.850000 4 -0.0001 -0.0001 1.850000 5 0.0016 0.0016 1.850000 6 -0.0001 -0.0001 1.850000 7 0.0003 0.0003 1.850000 8 0.0002 0.0002 1.850000 9 0.0016 0.0016 1.850000 10 0.0002 0.0002 1.850000 11 -0.0001 -0.0001 1.850000 12 0.0000 -0.0000 1.850000 13 -0.0015 -0.0015 1.850000 14 0.0014 0.0014 1.850000 15 0.0000 -0.0000 1.850000 16 0.0020 0.0020 1.850000 17 -0.0010 -0.0010 1.850000 18 0.0000 0.0000 1.850000 19 -0.0005 -0.0005 1.850000 20 -0.0004 -0.0004 1.850000 21 0.0186 0.0186 1.850000 22 0.0068 0.0068 1.850000 23 0.0020 0.0020 1.900000 0 0.7491 0.7490 1.900000 1 -0.0198 -0.0198 1.900000 2 -0.0252 -0.0252 1.900000 3 0.1352 0.1352 1.900000 4 -0.7844 -0.7844 1.900000 5 -0.1962 -0.1962 1.900000 6 -0.7844 -0.7844 1.900000 7 -0.0024 -0.0024 1.900000 8 -0.0025 -0.0025 1.900000 9 -0.1962 -0.1962 1.900000 10 -0.0025 -0.0025 1.900000 11 -0.0035 -0.0035 1.900000 12 -0.7491 -0.7491 1.900000 13 -0.7554 -0.7554 1.900000 14 -0.1588 -0.1588 1.900000 15 0.0000 0.0000 1.900000 16 0.7610 0.7610 1.900000 17 0.1665 0.1665 1.900000 18 0.0000 0.0000 1.900000 19 -0.0056 -0.0056 1.900000 20 -0.0078 -0.0078 1.900000 21 0.0000 0.0000 1.900000 22 0.0198 0.0198 1.900000 23 0.0252 0.0252 1.950000 0 0.0007 0.0007 1.950000 1 0.0008 0.0008 1.950000 2 -0.0002 -0.0002 1.950000 3 0.0002 0.0002 1.950000 4 0.0001 0.0001 1.950000 5 -0.0004 -0.0004 1.950000 6 0.0001 0.0001 1.950000 7 -0.0001 -0.0001 1.950000 8 -0.0004 -0.0004 1.950000 9 -0.0004 -0.0004 1.950000 10 -0.0004 -0.0004 1.950000 11 0.0003 0.0003 1.950000 12 -0.0007 -0.0007 1.950000 13 -0.0010 -0.0010 1.950000 14 -0.0005 -0.0005 1.950000 15 -0.0000 0.0000 1.950000 16 0.0001 0.0001 1.950000 17 0.0004 0.0004 1.950000 18 0.0000 0.0000 1.950000 19 0.0001 0.0001 1.950000 20 0.0004 0.0004 1.950000 21 0.0000 0.0000 1.950000 22 0.0000 0.0000 1.950000 23 0.0000 0.0000 2.000000 0 0.0000 0.0000 2.000000 1 0.0000 0.0000 2.000000 2 0.0000 0.0000 2.000000 3 0.0000 0.0000 2.000000 4 0.0000 0.0000 2.000000 5 -0.0000 -0.0000 2.000000 6 0.0000 0.0000 2.000000 7 0.0000 0.0000 2.000000 8 -0.0000 -0.0000 2.000000 9 -0.0000 -0.0000 2.000000 10 -0.0000 -0.0000 2.000000 11 -0.0000 -0.0000 2.000000 12 -0.0000 -0.0000 2.000000 13 -0.0000 -0.0000 2.000000 14 -0.0000 -0.0000 2.000000 15 0.0000 0.0000 2.000000 16 -0.0000 -0.0000 2.000000 17 0.0000 0.0000 2.000000 18 0.0000 0.0000 2.000000 19 0.0000 0.0000 2.000000 20 0.0000 0.0000 2.000000 21 0.0000 0.0000 2.000000 22 0.0000 0.0000 2.000000 23 0.0000 0.0000 2.050000 0 -0.0000 -0.0000 2.050000 1 -0.0000 -0.0000 2.050000 2 0.0000 0.0000 2.050000 3 0.0000 -0.0000 2.050000 4 -0.0000 -0.0000 2.050000 5 -0.0000 0.0000 2.050000 6 -0.0000 -0.0000 2.050000 7 -0.0000 -0.0000 2.050000 8 -0.0000 -0.0000 2.050000 9 -0.0000 0.0000 2.050000 10 -0.0000 -0.0000 2.050000 11 0.0000 0.0000 2.050000 12 -0.0000 -0.0000 2.050000 13 -0.0000 -0.0000 2.050000 14 -0.0000 -0.0000 2.050000 15 0.0000 0.0000 2.050000 16 0.0000 0.0000 2.050000 17 -0.0000 -0.0000 2.050000 18 0.0000 0.0000 2.050000 19 -0.0000 0.0000 2.050000 20 -0.0000 -0.0000 2.050000 21 0.0000 0.0000 2.050000 22 0.0000 0.0000 2.050000 23 0.0000 0.0000 2.100000 0 -0.0249 -0.0249 2.100000 1 0.0332 0.0332 2.100000 2 0.0352 0.0352 2.100000 3 0.0028 0.0028 2.100000 4 -0.0039 -0.0039 2.100000 5 -0.0042 -0.0042 2.100000 6 -0.0039 -0.0039 2.100000 7 0.0046 0.0046 2.100000 8 0.0055 0.0055 2.100000 9 -0.0042 -0.0042 2.100000 10 0.0055 0.0055 2.100000 11 0.0057 0.0057 2.100000 12 0.0186 0.0186 2.100000 13 -0.0255 -0.0255 2.100000 14 -0.0265 -0.0265 2.100000 15 0.0059 0.0059 2.100000 16 -0.0077 -0.0077 2.100000 17 -0.0081 -0.0081 2.100000 18 0.0004 0.0004 2.100000 19 0.0000 0.0000 2.100000 20 -0.0006 -0.0006 2.100000 21 0.0000 0.0000 2.100000 22 0.0000 0.0000 2.100000 23 0.0000 0.0000 2.150000 0 -0.0000 -0.0000 2.150000 1 -0.0000 -0.0000 2.150000 2 -0.0001 -0.0001 2.150000 3 0.0000 0.0000 2.150000 4 -0.0000 -0.0000 2.150000 5 0.0000 0.0000 2.150000 6 -0.0000 -0.0000 2.150000 7 -0.0000 -0.0000 2.150000 8 -0.0000 -0.0000 2.150000 9 0.0000 0.0000 2.150000 10 -0.0000 -0.0000 2.150000 11 0.0000 0.0000 2.150000 12 -0.0000 -0.0000 2.150000 13 -0.0000 -0.0000 2.150000 14 0.0000 0.0000 2.150000 15 -0.0000 -0.0000 2.150000 16 0.0000 0.0000 2.150000 17 0.0000 0.0000 2.150000 18 0.0000 0.0000 2.150000 19 -0.0000 0.0000 2.150000 20 -0.0000 -0.0000 2.150000 21 0.0000 0.0000 2.150000 22 0.0000 0.0000 2.150000 23 0.0001 0.0001 2.200000 0 -1.0117 -1.0117 2.200000 1 0.0227 0.0227 2.200000 2 -2.8133 -2.8133 2.200000 3 1.3832 1.3832 2.200000 4 -0.6680 -0.6680 2.200000 5 1.9700 1.9700 2.200000 6 -0.6680 -0.6680 2.200000 7 -0.0062 -0.0062 2.200000 8 -2.0715 -2.0715 2.200000 9 1.9700 1.9700 2.200000 10 -2.0715 -2.0715 2.200000 11 -1.1837 -1.1837 2.200000 12 2.2074 2.2074 2.200000 13 -2.1748 -2.1748 2.200000 14 -0.7172 -0.7172 2.200000 15 -2.1384 -2.1385 2.200000 16 2.1521 2.1521 2.200000 17 0.6948 0.6948 2.200000 18 0.0193 0.0193 2.200000 19 0.0000 0.0000 2.200000 20 -0.0063 -0.0063 2.200000 21 0.9234 0.9234 2.200000 22 0.0000 0.0000 2.200000 23 2.8420 2.8420 2.250000 0 -0.0018 -0.0018 2.250000 1 0.4871 0.4871 2.250000 2 0.0000 0.0000 2.250000 3 -0.0005 -0.0005 2.250000 4 0.1342 0.1342 2.250000 5 0.0000 0.0000 2.250000 6 0.1342 0.1342 2.250000 7 0.0998 0.0998 2.250000 8 -0.0000 -0.0000 2.250000 9 0.0000 0.0000 2.250000 10 -0.0000 -0.0000 2.250000 11 -0.0000 -0.0000 2.250000 12 0.1355 0.1355 2.250000 13 -0.2438 -0.2438 2.250000 14 -0.0000 -0.0000 2.250000 15 0.0009 0.0009 2.250000 16 -0.2433 -0.2433 2.250000 17 0.0000 0.0000 2.250000 18 -0.1346 -0.1346 2.250000 19 0.0000 0.0000 2.250000 20 0.0000 0.0000 2.250000 21 -0.0000 0.0000 2.250000 22 0.0000 0.0000 2.250000 23 0.0000 0.0000 2.300000 0 1.2259 1.2259 2.300000 1 -0.0130 -0.0130 2.300000 2 -3.7723 -3.7723 2.300000 3 -1.0021 -1.0021 2.300000 4 0.3972 0.3972 2.300000 5 1.3856 1.3856 2.300000 6 0.3972 0.3972 2.300000 7 -0.0041 -0.0041 2.300000 8 -1.2046 -1.2046 2.300000 9 1.3856 1.3856 2.300000 10 -1.2046 -1.2046 2.300000 11 0.9621 0.9621 2.300000 12 1.6204 1.6203 2.300000 13 -1.2683 -1.2683 2.300000 14 0.5265 0.5265 2.300000 15 -1.6151 -1.6151 2.300000 16 1.2683 1.2683 2.300000 17 -0.5248 -0.5248 2.300000 18 -0.0052 -0.0052 2.300000 19 0.0000 0.0000 2.300000 20 -0.0017 -0.0017 2.300000 21 -1.2259 -1.2259 2.300000 22 0.0130 0.0130 2.300000 23 3.7723 3.7723 2.350000 0 0.0000 0.0000 2.350000 1 0.0000 0.0000 2.350000 2 0.0000 0.0000 2.350000 3 0.0000 0.0000 2.350000 4 0.0000 0.0000 2.350000 5 0.0000 0.0000 2.350000 6 0.0000 0.0000 2.350000 7 0.0000 0.0000 2.350000 8 -0.0000 -0.0000 2.350000 9 0.0000 0.0000 2.350000 10 -0.0000 -0.0000 2.350000 11 0.0000 0.0000 2.350000 12 -0.0000 -0.0000 2.350000 13 -0.0000 -0.0000 2.350000 14 -0.0000 -0.0000 2.350000 15 0.0000 0.0000 2.350000 16 0.0000 0.0000 2.350000 17 0.0000 0.0000 2.350000 18 0.0000 0.0000 2.350000 19 -0.0000 0.0000 2.350000 20 0.0000 0.0000 2.350000 21 -0.0000 -0.0000 2.350000 22 0.0000 0.0000 2.350000 23 -0.0000 -0.0000 2.400000 0 0.0000 0.0000 2.400000 1 0.0000 0.0000 2.400000 2 -0.0000 -0.0000 2.400000 3 0.0000 0.0000 2.400000 4 0.0000 0.0000 2.400000 5 -0.0000 -0.0000 2.400000 6 0.0000 0.0000 2.400000 7 0.0000 0.0000 2.400000 8 -0.0000 -0.0000 2.400000 9 -0.0000 -0.0000 2.400000 10 -0.0000 -0.0000 2.400000 11 0.0000 0.0000 2.400000 12 -0.0000 0.0000 2.400000 13 -0.0000 -0.0000 2.400000 14 -0.0000 0.0000 2.400000 15 -0.0000 0.0000 2.400000 16 0.0000 0.0000 2.400000 17 -0.0000 0.0000 2.400000 18 0.0000 0.0000 2.400000 19 0.0000 0.0000 2.400000 20 0.0000 0.0000 2.400000 21 -0.0000 -0.0000 2.400000 22 0.0000 0.0000 2.400000 23 0.0000 0.0000 2.450000 0 0.1257 0.1257 2.450000 1 0.0000 0.0000 2.450000 2 -0.2340 -0.2340 2.450000 3 -0.1319 -0.1319 2.450000 4 0.0954 0.0954 2.450000 5 -0.1163 -0.1163 2.450000 6 0.0954 0.0954 2.450000 7 -0.0000 -0.0000 2.450000 8 -0.0310 -0.0310 2.450000 9 -0.1163 -0.1163 2.450000 10 -0.0310 -0.0310 2.450000 11 0.0391 0.0391 2.450000 12 0.0315 0.0315 2.450000 13 -0.1003 -0.1003 2.450000 14 0.0969 0.0969 2.450000 15 -0.0018 -0.0018 2.450000 16 0.1003 0.1003 2.450000 17 -0.0055 -0.0055 2.450000 18 0.0271 0.0271 2.450000 19 -0.0000 0.0000 2.450000 20 0.0833 0.0833 2.450000 21 -0.1825 -0.1825 2.450000 22 0.0000 0.0000 2.450000 23 0.0593 0.0593 2.500000 0 -0.0000 -0.0000 2.500000 1 0.0000 0.0000 2.500000 2 0.0000 0.0000 2.500000 3 -0.0000 -0.0000 2.500000 4 -0.0000 -0.0000 2.500000 5 -0.0000 -0.0000 2.500000 6 -0.0000 -0.0000 2.500000 7 0.0000 0.0000 2.500000 8 0.0000 0.0000 2.500000 9 -0.0000 -0.0000 2.500000 10 0.0000 0.0000 2.500000 11 0.0000 0.0000 2.500000 12 0.0000 0.0000 2.500000 13 -0.0000 -0.0000 2.500000 14 -0.0000 -0.0000 2.500000 15 0.0000 0.0000 2.500000 16 -0.0000 -0.0000 2.500000 17 0.0000 0.0000 2.500000 18 0.0000 0.0000 2.500000 19 0.0000 0.0000 2.500000 20 0.0000 0.0000 2.500000 21 -0.0000 -0.0000 2.500000 22 0.0000 0.0000 2.500000 23 -0.0000 -0.0000 2.550000 0 -3.5335 -3.5335 2.550000 1 -0.0985 -0.0985 2.550000 2 -1.4958 -1.4958 2.550000 3 0.4376 0.4376 2.550000 4 2.7330 2.7330 2.550000 5 -0.5486 -0.5486 2.550000 6 2.7330 2.7330 2.550000 7 -0.0238 -0.0238 2.550000 8 0.9723 0.9723 2.550000 9 -0.5486 -0.5486 2.550000 10 0.9723 0.9723 2.550000 11 -0.4706 -0.4706 2.550000 12 3.3658 3.3658 2.550000 13 -2.8005 -2.8005 2.550000 14 2.0117 2.0117 2.550000 15 0.2204 0.2204 2.550000 16 2.8005 2.8005 2.550000 17 -0.6783 -0.6783 2.550000 18 -0.0527 -0.0527 2.550000 19 -0.0000 0.0000 2.550000 20 0.1623 0.1623 2.550000 21 0.0000 0.0000 2.550000 22 0.0985 0.0985 2.550000 23 -0.0000 0.0000 2.600000 0 -1.9345 -1.9345 2.600000 1 -0.1078 -0.1078 2.600000 2 2.6655 2.6655 2.600000 3 -0.0698 -0.0698 2.600000 4 -0.0035 -0.0035 2.600000 5 0.0953 0.0953 2.600000 6 -0.0035 -0.0035 2.600000 7 -0.0277 -0.0277 2.600000 8 0.0056 0.0056 2.600000 9 0.0953 0.0953 2.600000 10 0.0056 0.0056 2.600000 11 -0.1320 -0.1320 2.600000 12 1.2389 1.2389 2.600000 13 0.0411 0.0411 2.600000 14 -1.7052 -1.7052 2.600000 15 -0.7457 -0.7457 2.600000 16 -0.0411 -0.0411 2.600000 17 1.0264 1.0264 2.600000 18 -0.4931 -0.4931 2.600000 19 -0.0000 0.0000 2.600000 20 0.6787 0.6787 2.600000 21 1.9345 1.9345 2.600000 22 0.1078 0.1078 2.600000 23 -2.6655 -2.6655 2.650000 0 -0.7225 -0.7225 2.650000 1 0.0000 0.0000 2.650000 2 -1.0647 -1.0647 2.650000 3 0.2986 0.2986 2.650000 4 0.3423 0.3423 2.650000 5 0.1720 0.1720 2.650000 6 0.3423 0.3423 2.650000 7 -0.0000 -0.0000 2.650000 8 0.4712 0.4712 2.650000 9 0.1720 0.1720 2.650000 10 0.4712 0.4712 2.650000 11 -0.1154 -0.1154 2.650000 12 0.5304 0.5304 2.650000 13 -0.5824 -0.5824 2.650000 14 1.2043 1.2043 2.650000 15 0.2073 0.2073 2.650000 16 0.5824 0.5824 2.650000 17 -0.1506 -0.1506 2.650000 18 -0.0151 -0.0151 2.650000 19 -0.0000 -0.0000 2.650000 20 0.0110 0.0110 2.650000 21 0.0000 0.0000 2.650000 22 0.0000 0.0000 2.650000 23 -0.0000 0.0000 2.700000 0 -0.0000 -0.0000 2.700000 1 0.0000 0.0000 2.700000 2 -0.0000 -0.0000 2.700000 3 0.0000 0.0000 2.700000 4 -0.0000 -0.0000 2.700000 5 0.0000 0.0000 2.700000 6 -0.0000 -0.0000 2.700000 7 0.0000 0.0000 2.700000 8 -0.0000 -0.0000 2.700000 9 0.0000 0.0000 2.700000 10 -0.0000 -0.0000 2.700000 11 0.0000 0.0000 2.700000 12 -0.0000 -0.0000 2.700000 13 -0.0000 -0.0000 2.700000 14 0.0000 0.0000 2.700000 15 0.0000 0.0000 2.700000 16 -0.0000 -0.0000 2.700000 17 -0.0000 -0.0000 2.700000 18 0.0000 0.0000 2.700000 19 0.0000 0.0000 2.700000 20 -0.0000 -0.0000 2.700000 21 0.0000 0.0000 2.700000 22 0.0000 0.0000 2.700000 23 -0.0000 0.0000 2.750000 0 -0.0006 -0.0006 2.750000 1 0.0005 0.0005 2.750000 2 -0.0000 -0.0000 2.750000 3 0.0002 0.0002 2.750000 4 -0.0006 -0.0006 2.750000 5 0.0000 0.0000 2.750000 6 -0.0006 -0.0006 2.750000 7 0.0001 0.0001 2.750000 8 -0.0000 -0.0000 2.750000 9 0.0000 0.0000 2.750000 10 -0.0000 -0.0000 2.750000 11 -0.0000 -0.0000 2.750000 12 0.0005 0.0005 2.750000 13 -0.0003 -0.0003 2.750000 14 0.0000 0.0000 2.750000 15 -0.0004 -0.0004 2.750000 16 -0.0002 -0.0002 2.750000 17 0.0000 0.0000 2.750000 18 -0.0001 -0.0001 2.750000 19 0.0000 0.0000 2.750000 20 0.0000 0.0000 2.750000 21 0.0006 0.0006 2.750000 22 0.0000 0.0000 2.750000 23 -0.0000 0.0000 2.800000 0 -0.0000 -0.0000 2.800000 1 0.0000 0.0000 2.800000 2 -0.0000 -0.0000 2.800000 3 0.0000 0.0000 2.800000 4 -0.0000 -0.0000 2.800000 5 0.0000 -0.0000 2.800000 6 -0.0000 -0.0000 2.800000 7 0.0000 0.0000 2.800000 8 -0.0000 -0.0000 2.800000 9 0.0000 -0.0000 2.800000 10 -0.0000 -0.0000 2.800000 11 0.0000 -0.0000 2.800000 12 0.0000 0.0000 2.800000 13 -0.0000 -0.0000 2.800000 14 0.0000 0.0000 2.800000 15 -0.0000 -0.0000 2.800000 16 -0.0000 -0.0000 2.800000 17 -0.0000 -0.0000 2.800000 18 -0.0000 -0.0000 2.800000 19 0.0000 0.0000 2.800000 20 -0.0000 -0.0000 2.800000 21 0.0000 0.0000 2.800000 22 0.0000 0.0000 2.800000 23 0.0000 0.0000 2.850000 0 -0.0000 -0.0000 2.850000 1 0.0000 0.0000 2.850000 2 -0.0000 -0.0000 2.850000 3 0.0000 0.0000 2.850000 4 -0.0000 -0.0000 2.850000 5 0.0000 0.0000 2.850000 6 -0.0000 -0.0000 2.850000 7 0.0000 0.0000 2.850000 8 -0.0000 -0.0000 2.850000 9 0.0000 0.0000 2.850000 10 -0.0000 -0.0000 2.850000 11 0.0000 0.0000 2.850000 12 0.0000 0.0000 2.850000 13 -0.0000 -0.0000 2.850000 14 0.0000 0.0000 2.850000 15 -0.0000 -0.0000 2.850000 16 -0.0000 -0.0000 2.850000 17 -0.0000 -0.0000 2.850000 18 -0.0000 -0.0000 2.850000 19 0.0000 -0.0000 2.850000 20 -0.0000 -0.0000 2.850000 21 0.0000 0.0000 2.850000 22 0.0000 0.0000 2.850000 23 0.0000 0.0000 2.900000 0 0.0000 0.0000 2.900000 1 0.0000 0.0000 2.900000 2 -0.0000 -0.0000 2.900000 3 0.0000 0.0000 2.900000 4 0.0000 0.0000 2.900000 5 -0.0000 -0.0000 2.900000 6 0.0000 0.0000 2.900000 7 -0.0000 -0.0000 2.900000 8 0.0000 0.0000 2.900000 9 -0.0000 -0.0000 2.900000 10 0.0000 0.0000 2.900000 11 0.0000 0.0000 2.900000 12 -0.0000 -0.0000 2.900000 13 -0.0000 -0.0000 2.900000 14 0.0000 0.0000 2.900000 15 0.0000 0.0000 2.900000 16 0.0000 0.0000 2.900000 17 0.0000 0.0000 2.900000 18 -0.0000 -0.0000 2.900000 19 -0.0000 0.0000 2.900000 20 -0.0000 -0.0000 2.900000 21 0.0000 0.0000 2.900000 22 0.0000 0.0000 2.900000 23 0.0000 0.0000 2.950000 0 0.0000 0.0000 2.950000 1 0.0000 0.0000 2.950000 2 -0.0000 -0.0000 2.950000 3 0.0000 0.0000 2.950000 4 -0.0000 -0.0000 2.950000 5 -0.0000 -0.0000 2.950000 6 -0.0000 -0.0000 2.950000 7 -0.0000 -0.0000 2.950000 8 0.0000 0.0000 2.950000 9 -0.0000 -0.0000 2.950000 10 0.0000 0.0000 2.950000 11 0.0000 0.0000 2.950000 12 -0.0000 -0.0000 2.950000 13 -0.0000 -0.0000 2.950000 14 -0.0000 -0.0000 2.950000 15 0.0000 0.0000 2.950000 16 0.0000 0.0000 2.950000 17 0.0000 0.0000 2.950000 18 0.0000 0.0000 2.950000 19 0.0000 0.0000 2.950000 20 0.0000 0.0000 2.950000 21 0.0000 0.0000 2.950000 22 0.0000 0.0000 2.950000 23 0.0000 0.0000
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <shape xmlns:android="http://schemas.android.com/apk/res/android" android:shape="rectangle"> <corners android:radius="5dp"/> </shape>
{ "pile_set_name": "Github" }
/* * Copyright (c) 2013 The Chromium Authors. All rights reserved. * Copyright (C) 2013 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #import "config.h" #import "WebColorPickerMac.h" #if ENABLE(INPUT_TYPE_COLOR) #if USE(APPKIT) #import <WebCore/Color.h> #import <WebCore/ColorMac.h> #import <pal/spi/mac/NSColorWellSPI.h> #import <pal/spi/mac/NSPopoverColorWellSPI.h> #import <pal/spi/mac/NSPopoverSPI.h> #import <wtf/WeakObjCPtr.h> static const size_t maxColorSuggestions = 12; static const CGFloat colorPickerMatrixNumColumns = 12.0; static const CGFloat colorPickerMatrixBorderWidth = 1.0; // FIXME: <rdar://problem/41173525> We should not have to track changes in NSPopoverColorWell's implementation. static const CGFloat colorPickerMatrixSwatchWidth = 13.0; @protocol WKPopoverColorWellDelegate <NSObject> - (void)didClosePopover; @end @interface WKPopoverColorWell : NSPopoverColorWell { RetainPtr<NSColorList> _suggestedColors; WeakObjCPtr<id <WKPopoverColorWellDelegate>> _webDelegate; } @property (nonatomic, weak) id<WKPopoverColorWellDelegate> webDelegate; - (void)setSuggestedColors:(NSColorList *)suggestedColors; @end @interface WKColorPopoverMac : NSObject<WKColorPickerUIMac, WKPopoverColorWellDelegate, NSWindowDelegate> { @private BOOL _lastChangedByUser; WebKit::WebColorPickerMac *_picker; RetainPtr<WKPopoverColorWell> _popoverWell; } - (id)initWithFrame:(const WebCore::IntRect &)rect inView:(NSView *)view; @end namespace WebKit { Ref<WebColorPickerMac> WebColorPickerMac::create(WebColorPicker::Client* client, const WebCore::Color& initialColor, const WebCore::IntRect& rect, Vector<WebCore::Color>&& suggestions, NSView *view) { return adoptRef(*new WebColorPickerMac(client, initialColor, rect, WTFMove(suggestions), view)); } WebColorPickerMac::~WebColorPickerMac() { if (m_colorPickerUI) { [m_colorPickerUI invalidate]; m_colorPickerUI = nil; } } WebColorPickerMac::WebColorPickerMac(WebColorPicker::Client* client, const WebCore::Color& initialColor, const WebCore::IntRect& rect, Vector<WebCore::Color>&& suggestions, NSView *view) : WebColorPicker(client) , m_suggestions(WTFMove(suggestions)) { m_colorPickerUI = adoptNS([[WKColorPopoverMac alloc] initWithFrame:rect inView:view]); } void WebColorPickerMac::endPicker() { [m_colorPickerUI invalidate]; m_colorPickerUI = nil; WebColorPicker::endPicker(); } void WebColorPickerMac::setSelectedColor(const WebCore::Color& color) { if (!m_client || !m_colorPickerUI) return; [m_colorPickerUI setColor:nsColor(color)]; } void WebColorPickerMac::didChooseColor(const WebCore::Color& color) { if (!m_client) return; m_client->didChooseColor(color); } void WebColorPickerMac::showColorPicker(const WebCore::Color& color) { if (!m_client) return; [m_colorPickerUI setAndShowPicker:this withColor:nsColor(color) suggestions:WTFMove(m_suggestions)]; } } // namespace WebKit @implementation WKPopoverColorWell + (NSPopover *)_colorPopoverCreateIfNecessary:(BOOL)forceCreation { static NSPopover *colorPopover = nil; if (forceCreation) { NSPopover *popover = [[NSPopover alloc] init]; [popover _setRequiresCorrectContentAppearance:YES]; popover.behavior = NSPopoverBehaviorTransient; NSColorPopoverController *controller = [[NSClassFromString(@"NSColorPopoverController") alloc] init]; popover.contentViewController = controller; controller.popover = popover; [controller release]; colorPopover = popover; } return colorPopover; } - (id <WKPopoverColorWellDelegate>)webDelegate { return _webDelegate.getAutoreleased(); } - (void)setWebDelegate:(id <WKPopoverColorWellDelegate>)webDelegate { _webDelegate = webDelegate; } - (void)_showPopover { NSPopover *popover = [[self class] _colorPopoverCreateIfNecessary:YES]; popover.delegate = self; [self deactivate]; // Deactivate previous NSPopoverColorWell NSColorWell *owner = [NSColorWell _exclusiveColorPanelOwner]; if ([owner isKindOfClass:[NSPopoverColorWell class]]) [owner deactivate]; NSColorPopoverController *controller = (NSColorPopoverController *)[popover contentViewController]; controller.delegate = self; if (_suggestedColors) { NSUInteger numColors = [[_suggestedColors allKeys] count]; CGFloat swatchWidth = (colorPickerMatrixNumColumns * colorPickerMatrixSwatchWidth + (colorPickerMatrixNumColumns * colorPickerMatrixBorderWidth - numColors)) / numColors; CGFloat swatchHeight = colorPickerMatrixSwatchWidth; // topBarMatrixView cannot be accessed until view has been loaded if (!controller.isViewLoaded) [controller loadView]; NSColorPickerMatrixView *topMatrix = controller.topBarMatrixView; [topMatrix setNumberOfColumns:numColors]; [topMatrix setSwatchSize:NSMakeSize(swatchWidth, swatchHeight)]; [topMatrix setColorList:_suggestedColors.get()]; } [self activate:YES]; [popover showRelativeToRect:self.bounds ofView:self preferredEdge:NSMinYEdge]; } - (void)popoverDidClose:(NSNotification *)notification { [self.webDelegate didClosePopover]; } - (NSView *)hitTest:(NSPoint)point { return nil; } - (void)setSuggestedColors:(NSColorList *)suggestedColors { _suggestedColors = suggestedColors; } @end @implementation WKColorPopoverMac - (id)initWithFrame:(const WebCore::IntRect &)rect inView:(NSView *)view { if(!(self = [super init])) return self; _popoverWell = adoptNS([[WKPopoverColorWell alloc] initWithFrame:[view convertRect:NSRectFromCGRect(rect) toView:nil]]); if (!_popoverWell) return self; [_popoverWell setAlphaValue:0.0]; [[view window].contentView addSubview:_popoverWell.get()]; return self; } - (void)setAndShowPicker:(WebKit::WebColorPickerMac*)picker withColor:(NSColor *)color suggestions:(Vector<WebCore::Color>&&)suggestions { _picker = picker; [_popoverWell setTarget:self]; [_popoverWell setWebDelegate:self]; [_popoverWell setAction:@selector(didChooseColor:)]; [_popoverWell setColor:color]; NSColorList *suggestedColors = nil; if (suggestions.size()) { suggestedColors = [[[NSColorList alloc] init] autorelease]; for (size_t i = 0; i < std::min(suggestions.size(), maxColorSuggestions); i++) [suggestedColors insertColor:nsColor(suggestions.at(i)) key:@(i).stringValue atIndex:i]; } [_popoverWell setSuggestedColors:suggestedColors]; [_popoverWell _showPopover]; [[NSColorPanel sharedColorPanel] setDelegate:self]; _lastChangedByUser = YES; } - (void)invalidate { [_popoverWell removeFromSuperviewWithoutNeedingDisplay]; [_popoverWell setTarget:nil]; [_popoverWell setAction:nil]; [_popoverWell deactivate]; _popoverWell = nil; _picker = nil; NSColorPanel *panel = [NSColorPanel sharedColorPanel]; if (panel.delegate == self) { panel.delegate = nil; [panel close]; } } - (void)windowWillClose:(NSNotification *)notification { if (!_picker) return; if (notification.object == [NSColorPanel sharedColorPanel]) { _lastChangedByUser = YES; _picker->endPicker(); } } - (void)didChooseColor:(id)sender { if (sender != _popoverWell) return; // Handle the case where the <input type='color'> value is programmatically set. if (!_lastChangedByUser) { _lastChangedByUser = YES; return; } _picker->didChooseColor(WebCore::colorFromNSColor([_popoverWell color])); } - (void)setColor:(NSColor *)color { _lastChangedByUser = NO; [_popoverWell setColor:color]; } - (void)didClosePopover { if (!_picker) return; if (![NSColorPanel sharedColorPanel].isVisible) _picker->endPicker(); } @end #endif // USE(APPKIT) #endif // ENABLE(INPUT_TYPE_COLOR)
{ "pile_set_name": "Github" }
var $path = require("path"); module.exports = { mode: "production", devtool: "source-map", entry: { index: ["core-js/stable", "./index.js"] }, output: { path: $path.join(__dirname, "dist"), publicPath: "dist/", filename: "[name].js", chunkFilename: "[name].js" }, module: { rules: [{ test: /.js$/, include: /node_modules/, use: { loader: "babel-loader", options: { presets: ["@babel/preset-env"], plugins: ["@babel/plugin-syntax-dynamic-import"] } } }, { test: /.js$/, use: ["source-map-loader"], enforce: "pre" }] } };
{ "pile_set_name": "Github" }
3-Hour Docker Workshop Training Prep ==================================== If you are in a classroom environment, you will be given two machines listed as `studentXYZa` and `studentXYZb` where `XYZ` is a number. If you are doing this workshop on your own, you will need to deploy the hosts on your own. Download `rexconfig.yml`, `student.json`, and `dockertraining.sh` to a local folder. Edit `rexconfig.yml` with your AWS ACCESS & SECRET KEYS. Edit `student.json` to make sure `rexconfig.yml` and `dockertraining.sh` are in an available directory. The `docker-machine` AWS security group should have the following ports open | Protocol | Port Range | ---------|:----------:| | TCP | 3376 | | TCP | 80 | | TCP | 8080 | | TCP | 7946 | | TCP | 5000 | | TCP | 2376 | | TCP | 2375 | | UDP | 4789 | | TCP | 443 | | TCP | 8888 | | TCP | 22 | | TCP | 3375 | | TCP | 8500 | Using [Docker Machine with the experimental EMC {code} extension framework](http://blog.emccode.com/2015/09/26/make-docker-machine-do-anything-with-our-experimental-extensions/) deploy a host. Each student needs 2 hosts. Download this unsupported binary and make it executable (example for Mac OS X): ``` $ curl -L https://github.com/kacole2/machine/releases/download/v0.5.0-dev-ext/docker-machine_darwin-amd64 > /usr/local/bin/docker-machine $ chmod +x /usr/local/bin/docker-machine ``` Use this binary to provision your hosts for the workshop: #### For US-East-1 (default) (AMI using Ubuntu 14.04.03 with Kernel 3.19-30) ``` $ docker-machine -D create --driver amazonec2 --amazonec2-access-key <access-key> --amazonec2-secret-key <secret-key> --amazonec2-vpc-id <vpc-id> --amazonec2-zone "b" --amazonec2-ami ami-f6e6979c --engine-install-url "https://get.docker.com" --extension /Users/kcoleman/Desktop/student.json student001a $ docker-machine -D create --driver amazonec2 --amazonec2-access-key <access-key> --amazonec2-secret-key <secret-key> --amazonec2-vpc-id <vpc-id> --amazonec2-zone "b" --amazonec2-ami ami-f6e6979c --engine-install-url "https://get.docker.com" --extension /Users/kcoleman/Desktop/student.json student001b ``` #### For EU-West-1 (AMI using Ubuntu 14.04.03 with Kernel 3.19-30) ``` $ docker-machine -D create --driver amazonec2 --amazonec2-access-key <access-key> --amazonec2-secret-key <secret-key> --amazonec2-region "eu-west-1" --amazonec2-ami ami-0ab16e79 --amazonec2-vpc-id <vpc-id> --engine-install-url "https://test.docker.com" --extension /Users/kcoleman/Desktop/student.json student001a $ docker-machine -D create --driver amazonec2 --amazonec2-access-key <access-key> --amazonec2-secret-key <secret-key> --amazonec2-region "eu-west-1" --amazonec2-ami ami-0ab16e79 --amazonec2-vpc-id <vpc-id> --engine-install-url "https://test.docker.com" --extension /Users/kcoleman/Desktop/student.json student001b ``` #### AMI ID's in all regions. Docker 1.9 Networking Requires a kernel version of 3.16 or greater. | Region | AMI-ID | -------------------|:------------:| | US-East-1 | ami-f6e6979c | | US-West-2 | ami-d79284b6 | | US-West-1 | ami-fe2c429e | | AP-Northeast-1 | ami-2c547542 | | SA-East-1 | ami-a0e258cc | | EU-West-1 | ami-0ab16e79 |
{ "pile_set_name": "Github" }
'use strict'; const toposort = require('toposort'); const _ = require('lodash'); /** Sorts dependencies between chunks by their "parents" attribute. This function sorts chunks based on their dependencies with each other. The parent relation between chunks as generated by Webpack for each chunk is used to define a directed (and hopefully acyclic) graph, which is then topologically sorted in order to retrieve the correct order in which chunks need to be embedded into HTML. A directed edge in this graph is describing a "is parent of" relationship from a chunk to another (distinct) chunk. Thus topological sorting orders chunks from bottom-layer chunks to highest level chunks that use the lower-level chunks. @param {Array} chunks an array of chunks as generated by the html-webpack-plugin. - For webpack < 4, It is assumed that each entry contains at least the properties "id" (containing the chunk id) and "parents" (array containing the ids of the parent chunks). - For webpack 4+ the see the chunkGroups param for parent-child relationships @param {Array} chunks an array of ChunkGroups that has a getParents method. Each ChunkGroup contains a list of chunks in order. @return {Array} A topologically sorted version of the input chunks */ module.exports.dependency = (chunks, options, compilation) => { const chunkGroups = compilation.chunkGroups; if (!chunks) { return chunks; } // We build a map (chunk-id -> chunk) for faster access during graph building. const nodeMap = {}; chunks.forEach(chunk => { nodeMap[chunk.id] = chunk; }); // Next, we add an edge for each parent relationship into the graph let edges = []; if (chunkGroups) { // Add an edge for each parent (parent -> child) edges = chunkGroups.reduce((result, chunkGroup) => result.concat( Array.from(chunkGroup.parentsIterable, parentGroup => [parentGroup, chunkGroup]) ), []); const sortedGroups = toposort.array(chunkGroups, edges); // flatten chunkGroup into chunks const sortedChunks = sortedGroups .reduce((result, chunkGroup) => result.concat(chunkGroup.chunks), []) .map(chunk => // use the chunk from the list passed in, since it may be a filtered list nodeMap[chunk.id]) .filter((chunk, index, self) => { // make sure exists (ie excluded chunks not in nodeMap) const exists = !!chunk; // make sure we have a unique list const unique = self.indexOf(chunk) === index; return exists && unique; }); return sortedChunks; } else { // before webpack 4 there was no chunkGroups chunks.forEach(chunk => { if (chunk.parents) { // Add an edge for each parent (parent -> child) chunk.parents.forEach(parentId => { // webpack2 chunk.parents are chunks instead of string id(s) const parentChunk = _.isObject(parentId) ? parentId : nodeMap[parentId]; // If the parent chunk does not exist (e.g. because of an excluded chunk) // we ignore that parent if (parentChunk) { edges.push([parentChunk, chunk]); } }); } }); // We now perform a topological sorting on the input chunks and built edges return toposort.array(chunks, edges); } }; /** * Sorts the chunks based on the chunk id. * * @param {Array} chunks the list of chunks to sort * @return {Array} The sorted list of chunks */ module.exports.id = chunks => chunks.sort(function orderEntryLast (a, b) { if (a.entry !== b.entry) { return b.entry ? 1 : -1; } else { return b.id - a.id; } }); /** * Performs identity mapping (no-sort). * @param {Array} chunks the chunks to sort * @return {Array} The sorted chunks */ module.exports.none = chunks => chunks; /** * Sort manually by the chunks * @param {Array} chunks the chunks to sort * @return {Array} The sorted chunks */ module.exports.manual = (chunks, options) => { const specifyChunks = options.chunks; const chunksResult = []; let filterResult = []; if (Array.isArray(specifyChunks)) { for (var i = 0; i < specifyChunks.length; i++) { filterResult = chunks.filter(chunk => { if (chunk.names[0] && chunk.names[0] === specifyChunks[i]) { return true; } return false; }); filterResult.length > 0 && chunksResult.push(filterResult[0]); } } return chunksResult; }; /** * Defines the default sorter. */ module.exports.auto = module.exports.id; // In webpack 2 the ids have been flipped. // Therefore the id sort doesn't work the same way as it did for webpack 1 // Luckily the dependency sort is working as expected if (Number(require('webpack/package.json').version.split('.')[0]) > 1) { module.exports.auto = module.exports.dependency; }
{ "pile_set_name": "Github" }
# -*-perl-*- hey - emacs - this is a perl file # src/tools/msvc/vcregress.pl use strict; our $config; use Cwd; use File::Basename; use File::Copy; use File::Find (); use Install qw(Install); my $startdir = getcwd(); chdir "../../.." if (-d "../../../src/tools/msvc"); my $topdir = getcwd(); my $tmp_installdir = "$topdir/tmp_install"; do 'src/tools/msvc/config_default.pl'; do 'src/tools/msvc/config.pl' if (-f 'src/tools/msvc/config.pl'); # buildenv.pl is for specifying the build environment settings # it should contain lines like: # $ENV{PATH} = "c:/path/to/bison/bin;$ENV{PATH}"; if (-e "src/tools/msvc/buildenv.pl") { do "src/tools/msvc/buildenv.pl"; } my $what = shift || ""; if ($what =~ /^(check|installcheck|plcheck|contribcheck|modulescheck|ecpgcheck|isolationcheck|upgradecheck|bincheck|recoverycheck|taptest)$/i ) { $what = uc $what; } else { usage(); } # use a capital C here because config.pl has $config my $Config = -e "release/postgres/postgres.exe" ? "Release" : "Debug"; copy("$Config/refint/refint.dll", "src/test/regress"); copy("$Config/autoinc/autoinc.dll", "src/test/regress"); copy("$Config/regress/regress.dll", "src/test/regress"); copy("$Config/dummy_seclabel/dummy_seclabel.dll", "src/test/regress"); $ENV{PATH} = "$topdir/$Config/libpq;$ENV{PATH}"; if ($ENV{PERL5LIB}) { $ENV{PERL5LIB} = "$topdir/src/tools/msvc;$ENV{PERL5LIB}"; } else { $ENV{PERL5LIB} = "$topdir/src/tools/msvc"; } my $maxconn = ""; $maxconn = "--max_connections=$ENV{MAX_CONNECTIONS}" if $ENV{MAX_CONNECTIONS}; my $temp_config = ""; $temp_config = "--temp-config=\"$ENV{TEMP_CONFIG}\"" if $ENV{TEMP_CONFIG}; chdir "src/test/regress"; my %command = ( CHECK => \&check, PLCHECK => \&plcheck, INSTALLCHECK => \&installcheck, ECPGCHECK => \&ecpgcheck, CONTRIBCHECK => \&contribcheck, MODULESCHECK => \&modulescheck, ISOLATIONCHECK => \&isolationcheck, BINCHECK => \&bincheck, RECOVERYCHECK => \&recoverycheck, UPGRADECHECK => \&upgradecheck, TAPTEST => \&taptest,); my $proc = $command{$what}; exit 3 unless $proc; &$proc(@ARGV); exit 0; ######################################################################## sub installcheck { my $schedule = shift || 'serial'; my @args = ( "../../../$Config/pg_regress/pg_regress", "--dlpath=.", "--bindir=../../../$Config/psql", "--schedule=${schedule}_schedule", "--encoding=SQL_ASCII", "--no-locale"); push(@args, $maxconn) if $maxconn; system(@args); my $status = $? >> 8; exit $status if $status; } sub check { my $schedule = shift || 'parallel'; InstallTemp(); chdir "${topdir}/src/test/regress"; my @args = ( "../../../$Config/pg_regress/pg_regress", "--dlpath=.", "--bindir=", "--schedule=${schedule}_schedule", "--encoding=SQL_ASCII", "--no-locale", "--temp-instance=./tmp_check"); push(@args, $maxconn) if $maxconn; push(@args, $temp_config) if $temp_config; system(@args); my $status = $? >> 8; exit $status if $status; } sub ecpgcheck { my $msbflags = $ENV{MSBFLAGS} || ""; chdir $startdir; system("msbuild ecpg_regression.proj $msbflags /p:config=$Config"); my $status = $? >> 8; exit $status if $status; InstallTemp(); chdir "$topdir/src/interfaces/ecpg/test"; my $schedule = "ecpg"; my @args = ( "../../../../$Config/pg_regress_ecpg/pg_regress_ecpg", "--bindir=", "--dbname=ecpg1_regression,ecpg2_regression", "--create-role=regress_ecpg_user1,regress_ecpg_user2", "--schedule=${schedule}_schedule", "--encoding=SQL_ASCII", "--no-locale", "--temp-instance=./tmp_chk"); push(@args, $maxconn) if $maxconn; system(@args); $status = $? >> 8; exit $status if $status; } sub isolationcheck { chdir "../isolation"; copy("../../../$Config/isolationtester/isolationtester.exe", "../../../$Config/pg_isolation_regress"); my @args = ( "../../../$Config/pg_isolation_regress/pg_isolation_regress", "--bindir=../../../$Config/psql", "--inputdir=.", "--schedule=./isolation_schedule"); push(@args, $maxconn) if $maxconn; system(@args); my $status = $? >> 8; exit $status if $status; } sub tap_check { die "Tap tests not enabled in configuration" unless $config->{tap_tests}; my @flags; foreach my $arg (0 .. scalar(@_)) { next unless $_[$arg] =~ /^PROVE_FLAGS=(.*)/; @flags = split(/\s+/, $1); splice(@_, $arg, 1); last; } my $dir = shift; chdir $dir; my @args = ("prove", @flags, "t/*.pl"); # adjust the environment for just this test local %ENV = %ENV; $ENV{PERL5LIB} = "$topdir/src/test/perl;$ENV{PERL5LIB}"; $ENV{PG_REGRESS} = "$topdir/$Config/pg_regress/pg_regress"; $ENV{TESTDIR} = "$dir"; system(@args); my $status = $? >> 8; return $status; } sub bincheck { InstallTemp(); my $mstat = 0; # Find out all the existing TAP tests by looking for t/ directories # in the tree. my @bin_dirs = glob("$topdir/src/bin/*"); # Process each test foreach my $dir (@bin_dirs) { next unless -d "$dir/t"; my $status = tap_check($dir); $mstat ||= $status; } exit $mstat if $mstat; } sub taptest { my $dir = shift; my @args; if ($dir =~ /^PROVE_FLAGS=/) { push(@args, $dir); $dir = shift; } die "no tests found!" unless -d "$topdir/$dir/t"; push(@args, "$topdir/$dir"); InstallTemp(); my $status = tap_check(@args); exit $status if $status; } sub mangle_plpython3 { my $tests = shift; mkdir "results" unless -d "results"; mkdir "sql/python3"; mkdir "results/python3"; mkdir "expected/python3"; foreach my $test (@$tests) { local $/ = undef; foreach my $dir ('sql','expected') { my $extension = ($dir eq 'sql' ? 'sql' : 'out'); my @files = glob("$dir/$test.$extension $dir/${test}_[0-9].$extension"); foreach my $file (@files) { open(my $handle, '<', $file) || die "test file $file not found"; my $contents = <$handle>; close($handle); do { s/except ([[:alpha:]][[:alpha:].]*), *([[:alpha:]][[:alpha:]]*):/except $1 as $2:/g; s/<type 'exceptions\.([[:alpha:]]*)'>/<class '$1'>/g; s/<type 'long'>/<class 'int'>/g; s/([0-9][0-9]*)L/$1/g; s/([ [{])u"/$1"/g; s/([ [{])u'/$1'/g; s/def next/def __next__/g; s/LANGUAGE plpython2?u/LANGUAGE plpython3u/g; s/EXTENSION ([^ ]*_)*plpython2?u/EXTENSION $1plpython3u/g; s/installing required extension "plpython2u"/installing required extension "plpython3u"/g; } for ($contents); my $base = basename $file; open($handle, '>', "$dir/python3/$base") || die "opening python 3 file for $file"; print $handle $contents; close($handle); } } } do { s!^!python3/!; } foreach(@$tests); return @$tests; } sub plcheck { chdir "../../pl"; foreach my $pl (glob("*")) { next unless -d "$pl/sql" && -d "$pl/expected"; my $lang = $pl eq 'tcl' ? 'pltcl' : $pl; if ($lang eq 'plpython') { next unless -d "$topdir/$Config/plpython2" || -d "$topdir/$Config/plpython3"; $lang = 'plpythonu'; } else { next unless -d "../../$Config/$lang"; } my @lang_args = ("--load-extension=$lang"); chdir $pl; my @tests = fetchTests(); @tests = mangle_plpython3(\@tests) if $lang eq 'plpythonu' && -d "$topdir/$Config/plpython3"; if ($lang eq 'plperl') { # run both trusted and untrusted perl tests push(@lang_args, "--load-extension=plperlu"); # assume we're using this perl to built postgres # test if we can run two interpreters in one backend, and if so # run the trusted/untrusted interaction tests use Config; if ($Config{usemultiplicity} eq 'define') { push(@tests, 'plperl_plperlu'); } } elsif ($lang eq 'plpythonu' && -d "$topdir/$Config/plpython3") { @lang_args = (); } print "============================================================\n"; print "Checking $lang\n"; my @args = ( "../../../$Config/pg_regress/pg_regress", "--bindir=../../../$Config/psql", "--dbname=pl_regression", @lang_args, @tests); system(@args); my $status = $? >> 8; exit $status if $status; chdir ".."; } chdir "../../.."; } sub subdircheck { my $module = shift; if ( !-d "$module/sql" || !-d "$module/expected" || (!-f "$module/GNUmakefile" && !-f "$module/Makefile")) { return; } chdir $module; my @tests = fetchTests(); my @opts = fetchRegressOpts(); # Special processing for python transform modules, see their respective # Makefiles for more details regarding Python-version specific # dependencies. if ( $module =~ /_plpython$/ ) { die "Python not enabled in configuration" if !defined($config->{python}); @opts = grep { $_ !~ /plpythonu/ } @opts; if (-d "$topdir/$Config/plpython2") { push @opts, "--load-extension=plpythonu"; push @opts, '--load-extension=' . $module . 'u'; } else { # must be python 3 @tests = mangle_plpython3(\@tests); } } print "============================================================\n"; print "Checking $module\n"; my @args = ( "$topdir/$Config/pg_regress/pg_regress", "--bindir=${topdir}/${Config}/psql", "--dbname=contrib_regression", @opts, @tests); print join(' ',@args),"\n"; system(@args); chdir ".."; } sub contribcheck { chdir "../../../contrib"; my $mstat = 0; foreach my $module (glob("*")) { # these configuration-based exclusions must match Install.pm next if ($module eq "uuid-ossp" && !defined($config->{uuid})); next if ($module eq "sslinfo" && !defined($config->{openssl})); next if ($module eq "xml2" && !defined($config->{xml})); next if ($module =~ /_plperl$/ && !defined($config->{perl})); next if ($module =~ /_plpython$/ && !defined($config->{python})); next if ($module eq "sepgsql"); subdircheck($module); my $status = $? >> 8; $mstat ||= $status; } exit $mstat if $mstat; } sub modulescheck { chdir "../../../src/test/modules"; my $mstat = 0; foreach my $module (glob("*")) { subdircheck($module); my $status = $? >> 8; $mstat ||= $status; } exit $mstat if $mstat; } sub recoverycheck { InstallTemp(); my $mstat = 0; my $dir = "$topdir/src/test/recovery"; my $status = tap_check($dir); exit $status if $status; } # Run "initdb", then reconfigure authentication. sub standard_initdb { return ( system('initdb', '-N') == 0 and system( "$topdir/$Config/pg_regress/pg_regress", '--config-auth', $ENV{PGDATA}) == 0); } # This is similar to appendShellString(). Perl system(@args) bypasses # cmd.exe, so omit the caret escape layer. sub quote_system_arg { my $arg = shift; # Change N >= 0 backslashes before a double quote to 2N+1 backslashes. $arg =~ s/(\\*)"/${\($1 . $1)}\\"/gs; # Change N >= 1 backslashes at end of argument to 2N backslashes. $arg =~ s/(\\+)$/${\($1 . $1)}/gs; # Wrap the whole thing in unescaped double quotes. return "\"$arg\""; } # Generate a database with a name made of a range of ASCII characters, useful # for testing pg_upgrade. sub generate_db { my ($prefix, $from_char, $to_char, $suffix) = @_; my $dbname = $prefix; for my $i ($from_char .. $to_char) { next if $i == 7 || $i == 10 || $i == 13; # skip BEL, LF, and CR $dbname = $dbname . sprintf('%c', $i); } $dbname .= $suffix; system('createdb', quote_system_arg($dbname)); my $status = $? >> 8; exit $status if $status; } sub upgradecheck { my $status; my $cwd = getcwd(); # Much of this comes from the pg_upgrade test.sh script, # but it only covers the --install case, and not the case # where the old and new source or bin dirs are different. # i.e. only this version to this version check. That's # what pg_upgrade's "make check" does. $ENV{PGHOST} = 'localhost'; $ENV{PGPORT} ||= 50432; my $tmp_root = "$topdir/src/bin/pg_upgrade/tmp_check"; (mkdir $tmp_root || die $!) unless -d $tmp_root; my $upg_tmp_install = "$tmp_root/install"; # unshared temp install print "Setting up temp install\n\n"; Install($upg_tmp_install, "all", $config); # Install does a chdir, so change back after that chdir $cwd; my ($bindir, $libdir, $oldsrc, $newsrc) = ("$upg_tmp_install/bin", "$upg_tmp_install/lib", $topdir, $topdir); $ENV{PATH} = "$bindir;$ENV{PATH}"; my $data = "$tmp_root/data"; $ENV{PGDATA} = "$data.old"; my $logdir = "$topdir/src/bin/pg_upgrade/log"; (mkdir $logdir || die $!) unless -d $logdir; print "\nRunning initdb on old cluster\n\n"; standard_initdb() or exit 1; print "\nStarting old cluster\n\n"; my @args = ('pg_ctl', 'start', '-l', "$logdir/postmaster1.log"); system(@args) == 0 or exit 1; print "\nCreating databases with names covering most ASCII bytes\n\n"; generate_db("\\\"\\", 1, 45, "\\\\\"\\\\\\"); generate_db('', 46, 90, ''); generate_db('', 91, 127, ''); print "\nSetting up data for upgrading\n\n"; installcheck(); # now we can chdir into the source dir chdir "$topdir/src/bin/pg_upgrade"; print "\nDumping old cluster\n\n"; @args = ('pg_dumpall', '-f', "$tmp_root/dump1.sql"); system(@args) == 0 or exit 1; print "\nStopping old cluster\n\n"; system("pg_ctl stop") == 0 or exit 1; $ENV{PGDATA} = "$data"; print "\nSetting up new cluster\n\n"; standard_initdb() or exit 1; print "\nRunning pg_upgrade\n\n"; @args = ( 'pg_upgrade', '-d', "$data.old", '-D', $data, '-b', $bindir, '-B', $bindir); system(@args) == 0 or exit 1; print "\nStarting new cluster\n\n"; @args = ('pg_ctl', '-l', "$logdir/postmaster2.log", 'start'); system(@args) == 0 or exit 1; print "\nSetting up stats on new cluster\n\n"; system(".\\analyze_new_cluster.bat") == 0 or exit 1; print "\nDumping new cluster\n\n"; @args = ('pg_dumpall', '-f', "$tmp_root/dump2.sql"); system(@args) == 0 or exit 1; print "\nStopping new cluster\n\n"; system("pg_ctl stop") == 0 or exit 1; print "\nDeleting old cluster\n\n"; system(".\\delete_old_cluster.bat") == 0 or exit 1; print "\nComparing old and new cluster dumps\n\n"; @args = ('diff', '-q', "$tmp_root/dump1.sql", "$tmp_root/dump2.sql"); system(@args); $status = $?; if (!$status) { print "PASSED\n"; } else { print "dumps not identical!\n"; exit(1); } } sub fetchRegressOpts { my $handle; open($handle, '<', "GNUmakefile") || open($handle, '<', "Makefile") || die "Could not open Makefile"; local ($/) = undef; my $m = <$handle>; close($handle); my @opts; $m =~ s{\\\r?\n}{}g; if ($m =~ /^\s*REGRESS_OPTS\s*\+?=(.*)/m) { # Substitute known Makefile variables, then ignore options that retain # an unhandled variable reference. Ignore anything that isn't an # option starting with "--". @opts = grep { !/\$\(/ && /^--/ } map { (my $x = $_) =~ s/\Q$(top_builddir)\E/\"$topdir\"/; $x; } split(/\s+/, $1); } if ($m =~ /^\s*ENCODING\s*=\s*(\S+)/m) { push @opts, "--encoding=$1"; } if ($m =~ /^\s*NO_LOCALE\s*=\s*\S+/m) { push @opts, "--no-locale"; } return @opts; } sub fetchTests { my $handle; open($handle, '<', "GNUmakefile") || open($handle, '<', "Makefile") || die "Could not open Makefile"; local ($/) = undef; my $m = <$handle>; close($handle); my $t = ""; $m =~ s{\\\r?\n}{}g; if ($m =~ /^REGRESS\s*=\s*(.*)$/gm) { $t = $1; $t =~ s/\s+/ /g; if ($m =~ /contrib\/pgcrypto/) { # pgcrypto is special since the tests depend on the # configuration of the build my $cftests = $config->{openssl} ? GetTests("OSSL_TESTS", $m) : GetTests("INT_TESTS", $m); my $pgptests = $config->{zlib} ? GetTests("ZLIB_TST", $m) : GetTests("ZLIB_OFF_TST", $m); $t =~ s/\$\(CF_TESTS\)/$cftests/; $t =~ s/\$\(CF_PGP_TESTS\)/$pgptests/; } } return split(/\s+/, $t); } sub GetTests { my $testname = shift; my $m = shift; if ($m =~ /^$testname\s*=\s*(.*)$/gm) { return $1; } return ""; } sub InstallTemp { unless ($ENV{NO_TEMP_INSTALL}) { print "Setting up temp install\n\n"; Install("$tmp_installdir", "all", $config); } $ENV{PATH} = "$tmp_installdir/bin;$ENV{PATH}"; } sub usage { print STDERR "Usage: vcregress.pl <mode> [ <arg>]\n\n", "Options for <mode>:\n", " bincheck run tests of utilities in src/bin/\n", " check deploy instance and run regression tests on it\n", " contribcheck run tests of modules in contrib/\n", " ecpgcheck run regression tests of ECPG\n", " installcheck run regression tests on existing instance\n", " isolationcheck run isolation tests\n", " modulescheck run tests of modules in src/test/modules/\n", " plcheck run tests of PL languages\n", " recoverycheck run recovery test suite\n", " taptest run an arbitrary TAP test set\n", " upgradecheck run tests of pg_upgrade\n", "\nOptions for <arg>: (used by check and installcheck)\n", " serial serial mode\n", " parallel parallel mode\n", "\nOption for <arg>: for taptest\n", " TEST_DIR (required) directory where tests reside\n"; exit(1); }
{ "pile_set_name": "Github" }
Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. # Code is not reloaded between requests. config.cache_classes = true # Eager load code on boot. This eager loads most of Rails and # your application in memory, allowing both threaded web servers # and those relying on copy on write to perform better. # Rake tasks automatically ignore this option for performance. config.eager_load = true # Full error reports are disabled and caching is turned on. config.consider_all_requests_local = false config.action_controller.perform_caching = true # Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"] # or in config/master.key. This key is used to decrypt credentials (and other encrypted files). # config.require_master_key = true # Disable serving static files from the `/public` folder by default since # Apache or NGINX already handles this. config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present? # Compress JavaScripts and CSS. config.assets.js_compressor = :uglifier # config.assets.css_compressor = :sass # Do not fallback to assets pipeline if a precompiled asset is missed. config.assets.compile = false # `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb # Enable serving of images, stylesheets, and JavaScripts from an asset server. # config.action_controller.asset_host = 'http://assets.example.com' # Specifies the header that your server uses for sending files. # config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache # config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX # Store uploaded files on the local file system (see config/storage.yml for options) config.active_storage.service = :local # Mount Action Cable outside main process or domain # config.action_cable.mount_path = nil # config.action_cable.url = 'wss://example.com/cable' # config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ] # Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies. # config.force_ssl = true # Use the lowest log level to ensure availability of diagnostic information # when problems arise. config.log_level = :debug # Prepend all log lines with the following tags. config.log_tags = [ :request_id ] # Use a different cache store in production. # config.cache_store = :mem_cache_store # Use a real queuing backend for Active Job (and separate queues per environment) # config.active_job.queue_adapter = :resque # config.active_job.queue_name_prefix = "example_#{Rails.env}" config.action_mailer.perform_caching = false # Ignore bad email addresses and do not raise email delivery errors. # Set this to true and configure the email server for immediate delivery to raise delivery errors. # config.action_mailer.raise_delivery_errors = false # Enable locale fallbacks for I18n (makes lookups for any locale fall back to # the I18n.default_locale when a translation cannot be found). config.i18n.fallbacks = true # Send deprecation notices to registered listeners. config.active_support.deprecation = :notify # Use default logging formatter so that PID and timestamp are not suppressed. config.log_formatter = ::Logger::Formatter.new # Use a different logger for distributed setups. # require 'syslog/logger' # config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name') if ENV["RAILS_LOG_TO_STDOUT"].present? logger = ActiveSupport::Logger.new(STDOUT) logger.formatter = config.log_formatter config.logger = ActiveSupport::TaggedLogging.new(logger) end # Do not dump schema after migrations. config.active_record.dump_schema_after_migration = false end
{ "pile_set_name": "Github" }
# DeepDreamVideo Implementing **#deepdream** on video **Creative Request** It would be very helpful for other deepdream researchers, if you could **include the used parameters in the description of your youtube videos**. You can find the parameters in the image filenames. Included experiment: Deep Dreaming Fear & Loathing in Las Vegas: the Great Fan Francisco Acid Wave The results can be seen on youtube: https://www.youtube.com/watch?v=oyxSerkkP4o Mp4 not yet destroyed by youtube compression also at [mega.nz](https://mega.nz/#!KldUTKKD!38qj6WtEOE4pno90dAW98gkNK2O3tvz6ZwKTxpHJWFc) together with [original video file](https://mega.nz/#!X9MWWDTQ!lbC7C5B4incMkLGVM00qwI4NP-ifi2KcqsmfsdIm_E0). All single processed + unprocessed frames are also at [github](https://github.com/graphific/Fear-and-Loathing-experiment) ![deepdreamanim1](http://media.giphy.com/media/l41lRx92QqsIXy5MI/giphy.gif "deep dream animation 1") ![deepdreamanim2](http://media.giphy.com/media/l41lSzjTsGJcIzpKg/giphy.gif "deep dream animation 2") Advise also at https://github.com/graphific/DeepDreamVideo/wiki ##INSTALL Dependencies A good overview (constantly being updated) on which software libraries to install & list of web resources/howto is at reddit: https://www.reddit.com/r/deepdream/comments/3cawxb/what_are_deepdream_images_how_do_i_make_my_own/ ##On using a CPU as opposed to GPU As there's been a lot of interest in using this code, and deepdream in general, on machines without a decent graphic card (GPU), heres a minor benchmark to let you decide if its worth the time on your pc:<br/> (note that the timing also depends on how far down in the layers of the network you want to go: the deeper, the longer time it takes)<br/> <br/> GPU K20 (amazon ec2 g2.2xlarge, 2x 4Gb GPU):<br/> 1 picture, 540x360px = 1 second = 60 min for 2 min video (3600 frames/framerate 30)<br/> 1 picture, 1024x768px = 3 seconds = 3h for 2 min video (3600 frames/framerate 30)<br/> <br/> CPU (amazon ec2 g2.2xlarge, Intel Xeon E5-2670 (Sandy Bridge) Processor, 8 cores, 2.6 GHz, 3.3 GHz turbo ): <br/> 1 picture, 540x360px = 45 seconds = 1d 21h for 2 min video (3600 frames/framerate 30)<br/> 1 picture, 1024x768px = 144 seconds = 6d for 2 min video (3600 frames/framerate 30)<br/> ##Usage: Extract frames from the source movie in the selected format (png or jpg). `./1_movie2frames.sh ffmpeg [original_video] [frames_directory] [png / jpg]` or `./1_movie2frames.sh avconv [original_video] [frames_directory] [png / jpg]` or `./1_movie2frames.sh mplayer [original_video] [frames_directory] [png / jpg]` Let a pretrained deep neural network dream on it frames, one by one, taking each new frame and adding 0-50% of the old frame into it for continuity of the hallucinated artifacts, and go drink your caffe <pre>usage: 2_dreaming_time.py [-h] -i INPUT -o OUTPUT -it IMAGE_TYPE [--gpu GPU] [-t MODEL_PATH] [-m MODEL_NAME] [-p PREVIEW] [-oct OCTAVES] [-octs OCTAVESCALE] [-itr ITERATIONS] [-j JITTER] [-z ZOOM] [-s STEPSIZE] [-b BLEND] [-l LAYERS [LAYERS ...]] [-v VERBOSE] [-gi GUIDE_IMAGE] [-sf START_FRAME] [-ef END_FRAME] Dreaming in videos. optional arguments: -h, --help show this help message and exit -i INPUT, --input INPUT Input directory where extracted frames are stored -o OUTPUT, --output OUTPUT Output directory where processed frames are to be stored -it IMAGE_TYPE, --image_type IMAGE_TYPE Specify whether jpg or png --gpu GPU Switch for gpu computation. -t MODEL_PATH, --model_path MODEL_PATH Model directory to use -m MODEL_NAME, --model_name MODEL_NAME Caffe Model name to use -p PREVIEW, --preview PREVIEW Preview image width. Default: 0 -oct OCTAVES, --octaves OCTAVES Octaves. Default: 4 -octs OCTAVESCALE, --octavescale OCTAVESCALE Octave Scale. Default: 1.4 -itr ITERATIONS, --iterations ITERATIONS Iterations. Default: 10 -j JITTER, --jitter JITTER Jitter. Default: 32 -z ZOOM, --zoom ZOOM Zoom in Amount. Default: 1 -s STEPSIZE, --stepsize STEPSIZE Step Size. Default: 1.5 -b BLEND, --blend BLEND Blend Amount. Default: "0.5" (constant), or "loop" (0.5-1.0), or "random" -l LAYERS [LAYERS ...], --layers LAYERS [LAYERS ...] Array of Layers to loop through. Default: [customloop] - or choose ie [inception_4c/output] for that single layer -v VERBOSE, --verbose VERBOSE verbosity [0-3] -gi GUIDE_IMAGE, --guide_image GUIDE_IMAGE path to guide image -sf START_FRAME, --start_frame START_FRAME starting frame nr -ef END_FRAME, --end_frame END_FRAME end frame nr </pre> gpu: `python 2_dreaming_time.py -i frames_directory -o processed_frames_dir --gpu 0` cpu: `python 2_dreaming_time.py -i frames_directory -o processed_frames_dir` different models can be loaded with: `python 2_dreaming_time.py -i frames_directory -o processed_frames_dir --model_path ../caffe/models/Places205-CNN/ --model_name Places205.caffemodel --gpu 0` or `python 2_dreaming_time.py -i frames_directory -o processed_frames_dir --model_path ../caffe/models/bvlc_googlenet/ --model_name bvlc_googlenet.caffemodel --gpu 0` (again eat your heart out, Not a free lunch, but free models are [here](https://github.com/BVLC/caffe/wiki/Model-Zoo)) and sticking to one specific layer: `python 2_dreaming_time.py -i frames_directory -o processed_frames_dir -l inception_4c/output --gpu 0` (**don't forget the --gpu 0 flag if you got a gpu to run on**) Once enough frames are processed (the script will cut the audio to the needed length automatically) or once all frames are done, put the frames + audio back together: `./3_frames2movie.sh [ffmpeg / avconv / mplayer] [processed_frames_dir] [original_video] [png / jpg]` ##Guided Dreaming <img src="images/guided_dreaming.jpg?raw=true" style="max-width: 300px;"/><br/> command: `python 2_dreaming_time.py -i frames_directory -o processed_frames_dir -l inception_4c/output --guide-image image_file.jpg --gpu 0` or `python 2_dreaming_time.py -i frames_directory -o processed_frames_dir -l inception_4c/output --guide-image image_file.jpg` if you're running cpu mode ##Batch Processing with different parameters `python 2_dreaming_time.py -i frames -o processed -l inception_4c/output --guide-image flower.jpg --gpu 0 --start-frame 1 --end-frame 100; python 2_dreaming_time.py -i frames -o processed -l inception_4b/output --guide-image disco.jpg --gpu 0 --start-frame 101 --end-frame 200` ##Blending Options The best results come from a well selected blending factor, used to blend each frame into the next, keeping consitancy between the frames and the dreamed up artefacts, but without the added dreamed artefacts overruling the original scene, or in the opposite case, switching too rapidly. blending can be set by <pre>--blend</pre> and can be a float, default 0.5, "random" (a random float between 0.5 and 1., where 1 means disregarding all info from the old frame and starting from scratch with dreaming up artefacts), and "loop" which loops back and forth from 0.5 to 1.0, as originally done in the Fear and Loathing clip. Constant (default): `python 2_dreaming_time.py -i frames_directory -o processed_frames_dir -b 0.5` <img src="images/blend_constant.gif?raw=true" style="max-width: 300px;"/><br/> Loop: `python 2_dreaming_time.py -i frames_directory -o processed_frames_dir -b loop` <img src="images/blend_loop.gif?raw=true" style="max-width: 300px;"/><br/> Random: `python 2_dreaming_time.py -i frames_directory -o processed_frames_dir -b random` <img src="images/blend_random.gif?raw=true" style="max-width: 300px;"/><br/> ##More information: This repo implements a deep neural network hallucinating Fear & Loathing in Las Vegas. Visualizing the internals of a deep net we let it develop further what it think it sees. We're using the #deepdream technique developed by Google, first explained in the Google Research blog post about Neural Network art. - http://googleresearch.blogspot.nl/2015/06/inceptionism-going-deeper-into-neural.html Code: - https://github.com/google/deepdream ## parameters used (and useful to play with): - network: standard reference GoogLeNet model trained on ImageNet from the Caffe Model Zoo (https://github.com/BVLC/caffe/wiki/Model-Zoo) - iterations: 5 - jitter: 32 (default) - octaves: 4 (default) - layers locked to moving upwards from inception_4c/output to inception_5b/output (only the output layers, as they are most sensitive to visualizing "objects", where reduce layers are more like "edge detectors") and back again - every next unprocessed frame in the movie clip is blended with the previous processed frame before being "dreamed" on, moving the alpha from 0.5 to 1 and back again (so 50% previous image net created, 50% the movie frame, to taking 100% of the movie frame only). This takes care of "overfitting" on the frames and makes sure we don't iteratively build more and more "hallucinations" of the net and move away from the original movie clip. ## An investigation of using the MIT Places trained CNN (mainly landscapes): https://www.youtube.com/watch?v=6IgbMiEaFRY ## Installing DeepDream: - original Google code is relatively straightforward to use: https://github.com/google/deepdream/blob/master/dream.ipynb - gist for osx: https://gist.github.com/robertsdionne/f58a5fc6e5d1d5d2f798 - docker image: https://registry.hub.docker.com/u/mjibson/deepdream/ - booting preinstalled ami + installing caffe at amazon: https://github.com/graphific/dl-machine - general overview of convolutinal nets using Caffe: https://github.com/graphific/DL-Meetup-intro/blob/master/PyConSe15-cat-vs-dog.ipynb - or using lasagne: http://www.slideshare.net/roelofp/python-for-image-understanding-deep-learning-with-convolutional-neural-nets ## Credits Roelof | [KTH](http://www.csc.kth.se/~roelof/) & [Graph Technologies](http://www.graph-technologies.com/) | [@graphific](https://twitter.com/graphific)
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <package xmlns="http://www.idpf.org/2007/opf" version="3.0" unique-identifier="uid" xmlns:dc="http://purl.org/dc/elements/1.1/"> <metadata> <dc:title>Title</dc:title> <dc:language>en</dc:language> <dc:identifier id="uid">NOID</dc:identifier> <meta property="dcterms:modified">2019-01-01T12:00:00Z</meta> <!-- Required metadata for EDUPUB teacher's edition --> <dc:type>edupub</dc:type> <dc:type>teacher-edition</dc:type> <!--<dc:source>org.example.sudent.edition.id</dc:source>--> <meta property="schema:accessibilityFeature">tableOfContents</meta> </metadata> <manifest> <item id="t001" href="contents.xhtml" properties="nav" media-type="application/xhtml+xml"/> </manifest> <spine> <itemref idref="t001"/> </spine> </package>
{ "pile_set_name": "Github" }
// // CallToObjectEqualsViaBaseTests.cs // // Author: // Simon Lindgren <[email protected]> // // Copyright (c) 2012 Simon Lindgren // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using NUnit.Framework; using ICSharpCode.NRefactory.CSharp.CodeActions; using ICSharpCode.NRefactory.CSharp.Refactoring; namespace ICSharpCode.NRefactory.CSharp.CodeIssues { [TestFixture] public class CallToObjectEqualsViaBaseTests : InspectionActionTestBase { [Test] public void SimpleCase() { var input = @" class Foo { Foo() { bool b = base.Equals (""blah""); } }"; TestRefactoringContext context; var issues = GetIssues(new CallToObjectEqualsViaBaseIssue(), input, out context); Assert.AreEqual(1, issues.Count); CheckFix(context, issues, @" class Foo { Foo() { bool b = object.ReferenceEquals (this, ""blah""); } }"); } [Test] public void NonObjectBase() { var input = @" class Foo { } class Bar : Foo { void Baz () { bool b = base.Equals (""blah""); } }"; TestRefactoringContext context; var issues = GetIssues(new CallToObjectEqualsViaBaseIssue(), input, out context); Assert.AreEqual(1, issues.Count); CheckFix(context, issues, @" class Foo { } class Bar : Foo { void Baz () { bool b = object.ReferenceEquals (this, ""blah""); } }"); } [Test] public void IgnoresCallsToOtherObjects() { var input = @" class Foo { } class Bar : Foo { void Baz () { var foo1 = new Foo(); var foo2 = new Foo(); bool b = foo1.Equals(foo2); } }"; TestRefactoringContext context; var issues = GetIssues(new CallToObjectEqualsViaBaseIssue(), input, out context); Assert.AreEqual(0, issues.Count); } } }
{ "pile_set_name": "Github" }
package com.bird.web.file.upload; import com.bird.web.file.upload.validator.ValidateResult; import org.springframework.web.multipart.MultipartFile; /** * 定义抽象的文件上传监听器基类 * @author liuxx * @date 2018/5/2 */ public class AbstractUploadListener implements IUploadListener { @Override public void beforeValidate(MultipartFile file, IUploadContext context) { } @Override public void afterValidate(MultipartFile file, IUploadContext context, ValidateResult result) { } @Override public void beforeHandle(MultipartFile file, IUploadContext context) { } @Override public void afterHandle(MultipartFile file, IUploadContext context) { } @Override public void beforeStorage(MultipartFile file, IUploadContext context) { } @Override public void afterStorage(MultipartFile file, IUploadContext context, UploadResult result) { } }
{ "pile_set_name": "Github" }
// Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). // All rights reserved. // This component and the accompanying materials are made available // under the terms of the License "Eclipse Public License v1.0" // which accompanies this distribution, and is available // at the URL "http://www.eclipse.org/legal/epl-v10.html". // // Initial Contributors: // Nokia Corporation - initial contribution. // // Contributors: // // Description: // Implements a Session of a Symbian OS server for the RUsbMassStorage API // // /** @file @internalTechnology */ #ifndef CUSBOTGSESSION_H #define CUSBOTGSESSION_H _LIT(KUsbOtgClientPncCat, "UsbOtgServer"); class CUsbOtgServer; enum TMsManPanicClient { EUsbOtgPanicIllegalIPC }; class CUsbOtgSession : public CSession2 { public: static CUsbOtgSession* NewL(); virtual void CreateL(); protected: CUsbOtgSession(); void ConstructL(); private: ~CUsbOtgSession(); public: // CSession2 void ServiceL(const RMessage2& aMessage); void DispatchMessageL(const RMessage2& aMessage); private: // Services void DeviceInsertedL(const RMessage2& aMessage); void NotifyChange(const RMessage2& aMessage); void NotifyChangeCancel(); TInt BusDrop(); protected: // panic the client void PanicClient(const RMessage2& aMessage,TInt aPanic) const; private: CUsbOtgServer& Server(); }; inline CUsbOtgServer& CUsbOtgSession::Server() { return *static_cast<CUsbOtgServer*>(const_cast<CServer2*>(CSession2::Server())); } #endif // CUSBOTGSESSION_H
{ "pile_set_name": "Github" }
--- title: Magento_CatalogRuleStaging source_repo: magento2ee release: 2.4.0 github_path: app/code/Magento/CatalogRuleStaging/README.md last_modified_at: '2016-08-03 10:46:30 +0300' content: |- ## Magento_CatalogRuleStaging module ## Overview The Magento_CatalogRuleStaging module is a part of the staging functionality in Magento EE. It enables you to create new catalog rule updates or add new changes to the existing store updates. In other words, you can modify the catalog rules in updates. These updates are shown on the content dashboard. ## Implementation details The Magento_CatalogRuleStaging module changes a catalog rule creation page and the catalog rule related database tables to make them compatible with the Magento Staging Framework. This module depends on the Magento_CatalogRule module and extends its functionality. It changes the database structure of the Magento_CatalogRule module and the way in which catalog rules are managed. The Magento_CatalogRule module must be enabled. The Magento_CatalogRuleStaging module enables you to stage the following catalog rule attributes: - Rule Name - Description - Websites - Customer Groups - Priority - Product Apply - Product Discount Amount - Subproduct Discounts - Subproduct Apply - Subproduct Discount Amount - Discard Subsequent Rules These attributes cannot be modified and are a part of the static Magento Catalog Rule form. ### Installation details The Magento_CatalogRuleStaging module makes irreversible changes in a database during installation. You cannot uninstall this module. ## Dependencies You can find the list of modules that have dependencies on the Magento_CatalogRuleStaging module in the `require` section of the `composer.json` file. The file is located in the root directory of the module. ## Extension points Extension points enable extension developers to interact with the Magento_CatalogRuleStaging module. You can interact with the Magento_CatalogRuleStaging module using the Magento extension mechanism, see [Magento plug-ins](http://devdocs.magento.com/guides/v2.1/extension-dev-guide/plugins.html). [The Magento dependency injection mechanism](http://devdocs.magento.com/guides/v2.1/extension-dev-guide/depend-inj.html) enables you to override the functionality of the Magento_CatalogRuleStaging module. ### Layouts You can extend and override layouts in the `app/code/Magento/CatalogRuleStaging/view/adminhtml/layout` directory. For more information about layouts, see the [Layout documentation](http://devdocs.magento.com/guides/v2.1/frontend-dev-guide/layouts/layout-overview.html). ## Additional Information You can track [backward incompatible changes made in a Magento EE mainline after the Magento 2.0 release](http://devdocs.magento.com/guides/v2.0/release-notes/changes/ee_changes.html).
{ "pile_set_name": "Github" }
import { PolygonLayer } from '@deck.gl/layers'; import { point, polygon } from '@turf/helpers'; import turfBbox from '@turf/bbox'; import turfBboxPolygon from '@turf/bbox-polygon'; import turfBuffer from '@turf/buffer'; import turfDifference from '@turf/difference'; import turfDistance from '@turf/distance'; const POLYGON_LINE_COLOR = [0, 255, 0, 255]; const POLYGON_FILL_COLOR = [255, 255, 255, 90]; const POLYGON_LINE_WIDTH = 2; const POLYGON_DASHES = [20, 20]; const POLYGON_THRESHOLD = 0.01; const EXPANSION_KM = 10; const LAYER_ID_VIEW = 'DeckDrawerView'; const LAYER_ID_PICK = 'DeckDrawerPick'; export const SELECTION_TYPE = { NONE: null, RECTANGLE: 'rectangle', POLYGON: 'polygon', }; export default class DeckDrawer { nebula: Record<string, any>; usePolygon: boolean; validPolygon: boolean; landPoints: [number, number][]; mousePoints: [number, number][]; constructor(nebula: Record<string, any>) { this.nebula = nebula; this.usePolygon = false; this.landPoints = []; this.mousePoints = []; } _getLayerIds() { // TODO: sort by mouse priority return this.nebula.deckgl.props.layers .filter((l) => l && l.props && l.props.nebulaLayer && l.props.nebulaLayer.enableSelection) .map((l) => l.id); } _selectFromPickingInfos(pickingInfos: Record<string, any>[]) { const objects = pickingInfos.map( ({ layer, index, object }) => object.original || layer.props.nebulaLayer.deckCache.originals[index] ); this.nebula.props.onSelection(objects); } _getBoundingBox(): Record<string, any> { const { mousePoints } = this; const allX = mousePoints.map((mousePoint) => mousePoint[0]); const allY = mousePoints.map((mousePoint) => mousePoint[1]); const x = Math.min(...allX); const y = Math.min(...allY); const maxX = Math.max(...allX); const maxY = Math.max(...allY); return { x, y, width: maxX - x, height: maxY - y }; } _selectRectangleObjects() { if (this.landPoints.length !== 2) return; const [x1, y1] = this.mousePoints[0]; const [x2, y2] = this.mousePoints[1]; const pickingInfos = this.nebula.deckgl.pickObjects({ x: Math.min(x1, x2), y: Math.min(y1, y2), width: Math.abs(x2 - x1), height: Math.abs(y2 - y1), layerIds: this._getLayerIds(), }); this._selectFromPickingInfos(pickingInfos); } _selectPolygonObjects() { const pickingInfos = this.nebula.deckgl.pickObjects({ ...this._getBoundingBox(), layerIds: [LAYER_ID_PICK, ...this._getLayerIds()], }); this._selectFromPickingInfos(pickingInfos.filter((item) => item.layer.id !== LAYER_ID_PICK)); } _getMousePosFromEvent(event: Record<string, any>): [number, number] { const { offsetX, offsetY } = event; return [offsetX, offsetY]; } handleEvent( event: Record<string, any>, lngLat: [number, number], selectionType: number ): { redraw: boolean; deactivate: boolean } { // capture all events (mouse-up is needed to prevent us stuck in moving map) if (event.type !== 'mouseup') event.stopPropagation(); // @ts-ignore this.usePolygon = selectionType === SELECTION_TYPE.POLYGON; let redraw = false; let deactivate = false; const { usePolygon, landPoints, mousePoints } = this; if (event.type === 'mousedown') { if (usePolygon && landPoints.length) { // if landPoints.length is zero we want to insert two points (so we let it run the else) // also don't insert if polygon is invalid if (this.landPoints.length < 3 || this.validPolygon) { landPoints.push(lngLat); mousePoints.push(this._getMousePosFromEvent(event)); } } else { this.landPoints = [lngLat, lngLat]; const m = this._getMousePosFromEvent(event); this.mousePoints = [m, m]; } redraw = true; } else if (event.type === 'mousemove' && landPoints.length) { // update last point landPoints[landPoints.length - 1] = lngLat; mousePoints[mousePoints.length - 1] = this._getMousePosFromEvent(event); redraw = true; } else if (event.type === 'mouseup') { if (usePolygon) { // check to see if completed // TODO: Maybe double-click to finish? if ( landPoints.length > 4 && turfDistance(landPoints[0], landPoints[landPoints.length - 1]) < POLYGON_THRESHOLD && this.validPolygon ) { this._selectPolygonObjects(); this.reset(); redraw = true; deactivate = true; } } else { this._selectRectangleObjects(); this.reset(); redraw = true; deactivate = true; } } return { redraw, deactivate }; } reset() { this.landPoints = []; this.mousePoints = []; } _makeStartPointHighlight(center: [number, number]): number[] { const buffer = turfBuffer(point(center), POLYGON_THRESHOLD / 4.0); // @ts-ignore return turfBboxPolygon(turfBbox(buffer)).geometry.coordinates; } render() { const data = []; const dataPick = []; if (!this.usePolygon && this.landPoints.length === 2) { // Use mouse points instead of land points so we get the right shape // no matter what bearing is. const [[x1, y1], [x2, y2]] = this.mousePoints; const selPolygon = [ [x1, y1], [x1, y2], [x2, y2], [x2, y1], [x1, y1], ].map((mousePos) => this.nebula.unprojectMousePosition(mousePos)); data.push({ polygon: selPolygon, lineColor: POLYGON_LINE_COLOR, fillColor: POLYGON_FILL_COLOR, }); } else if (this.usePolygon && this.landPoints.length) { data.push({ polygon: this.landPoints, lineColor: POLYGON_LINE_COLOR, fillColor: POLYGON_FILL_COLOR, }); // Hack: use a polygon to hide the outside, because pickObjects() // does not support polygons if (this.landPoints.length >= 3) { const landPointsPoly = polygon([[...this.landPoints, this.landPoints[0]]]); const bigBuffer = turfBuffer(point(this.landPoints[0]), EXPANSION_KM); let bigPolygon; try { // turfDifference throws an exception if the polygon // intersects with itself bigPolygon = turfDifference(bigBuffer, landPointsPoly); dataPick.push({ polygon: bigPolygon.geometry.coordinates, fillColor: [0, 0, 0, 1], }); this.validPolygon = true; } catch (e) { // invalid selection polygon this.validPolygon = false; } } } if (this.landPoints.length) { // highlight start point data.push({ polygon: this._makeStartPointHighlight(this.landPoints[0]), lineColor: [0, 0, 0, 0], fillColor: POLYGON_LINE_COLOR, }); } // Hack to make the PolygonLayer() stay active, // otherwise it takes 3 seconds (!) to init! // TODO: fix this data.push({ polygon: [[0, 0]] }); dataPick.push({ polygon: [[0, 0]] }); return [ new PolygonLayer({ id: LAYER_ID_VIEW, data, // @ts-ignore fp64: false, opacity: 1.0, pickable: false, lineWidthMinPixels: POLYGON_LINE_WIDTH, lineWidthMaxPixels: POLYGON_LINE_WIDTH, lineDashJustified: true, getLineDashArray: (x) => POLYGON_DASHES, // @ts-ignore getLineColor: (obj) => obj.lineColor || [0, 0, 0, 255], // @ts-ignore getFillColor: (obj) => obj.fillColor || [0, 0, 0, 255], // @ts-ignore getPolygon: (o) => o.polygon, }), new PolygonLayer({ id: LAYER_ID_PICK, data: dataPick, // @ts-ignore getLineColor: (obj) => obj.lineColor || [0, 0, 0, 255], // @ts-ignore getFillColor: (obj) => obj.fillColor || [0, 0, 0, 255], // @ts-ignore fp64: false, opacity: 1.0, stroked: false, pickable: true, // @ts-ignore getPolygon: (o) => o.polygon, }), ]; } }
{ "pile_set_name": "Github" }
// // detail/win_iocp_wait_op.hpp // ~~~~~~~~~~~~~~~~~~~~~~~~~~~ // // Copyright (c) 2003-2019 Christopher M. Kohlhoff (chris at kohlhoff dot com) // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // #ifndef ASIO_DETAIL_WIN_IOCP_WAIT_OP_HPP #define ASIO_DETAIL_WIN_IOCP_WAIT_OP_HPP #if defined(_MSC_VER) && (_MSC_VER >= 1200) # pragma once #endif // defined(_MSC_VER) && (_MSC_VER >= 1200) #include "asio/detail/config.hpp" #if defined(ASIO_HAS_IOCP) #include "asio/detail/bind_handler.hpp" #include "asio/detail/buffer_sequence_adapter.hpp" #include "asio/detail/fenced_block.hpp" #include "asio/detail/handler_alloc_helpers.hpp" #include "asio/detail/handler_invoke_helpers.hpp" #include "asio/detail/memory.hpp" #include "asio/detail/reactor_op.hpp" #include "asio/detail/socket_ops.hpp" #include "asio/error.hpp" #include "asio/detail/push_options.hpp" namespace asio { namespace detail { template <typename Handler, typename IoExecutor> class win_iocp_wait_op : public reactor_op { public: ASIO_DEFINE_HANDLER_PTR(win_iocp_wait_op); win_iocp_wait_op(socket_ops::weak_cancel_token_type cancel_token, Handler& handler, const IoExecutor& io_ex) : reactor_op(&win_iocp_wait_op::do_perform, &win_iocp_wait_op::do_complete), cancel_token_(cancel_token), handler_(ASIO_MOVE_CAST(Handler)(handler)), io_executor_(io_ex) { handler_work<Handler, IoExecutor>::start(handler_, io_executor_); } static status do_perform(reactor_op*) { return done; } static void do_complete(void* owner, operation* base, const asio::error_code& result_ec, std::size_t /*bytes_transferred*/) { asio::error_code ec(result_ec); // Take ownership of the operation object. win_iocp_wait_op* o(static_cast<win_iocp_wait_op*>(base)); ptr p = { asio::detail::addressof(o->handler_), o, o }; handler_work<Handler, IoExecutor> w(o->handler_, o->io_executor_); ASIO_HANDLER_COMPLETION((*o)); // The reactor may have stored a result in the operation object. if (o->ec_) ec = o->ec_; // Map non-portable errors to their portable counterparts. if (ec.value() == ERROR_NETNAME_DELETED) { if (o->cancel_token_.expired()) ec = asio::error::operation_aborted; else ec = asio::error::connection_reset; } else if (ec.value() == ERROR_PORT_UNREACHABLE) { ec = asio::error::connection_refused; } // Make a copy of the handler so that the memory can be deallocated before // the upcall is made. Even if we're not about to make an upcall, a // sub-object of the handler may be the true owner of the memory associated // with the handler. Consequently, a local copy of the handler is required // to ensure that any owning sub-object remains valid until after we have // deallocated the memory here. detail::binder1<Handler, asio::error_code> handler(o->handler_, ec); p.h = asio::detail::addressof(handler.handler_); p.reset(); // Make the upcall if required. if (owner) { fenced_block b(fenced_block::half); ASIO_HANDLER_INVOCATION_BEGIN((handler.arg1_)); w.complete(handler, handler.handler_); ASIO_HANDLER_INVOCATION_END; } } private: socket_ops::weak_cancel_token_type cancel_token_; Handler handler_; IoExecutor io_executor_; }; } // namespace detail } // namespace asio #include "asio/detail/pop_options.hpp" #endif // defined(ASIO_HAS_IOCP) #endif // ASIO_DETAIL_WIN_IOCP_WAIT_OP_HPP
{ "pile_set_name": "Github" }
const fs = require('fs') const qcloud = require('wafer-node-sdk') // 获取基础配置 const configs = require('./config') // 获取 sdk.config const sdkConfig = (() => { const sdkConfigPath = '/data/release/sdk.config.json' // 检查文件是否存在 try { const stats = fs.statSync(sdkConfigPath) if (!stats.isFile()) { console.log('sdk.config.json 不存在,将使用 config.js 中的配置') return {} } } catch (e) { return {} } // 返回配置信息 try { const content = fs.readFileSync(sdkConfigPath, 'utf8') return JSON.parse(content) } catch (e) { // 如果配置读取错误或者 JSON 解析错误,则输出空配置项 console.log('sdk.config.json 解析错误,不是 JSON 字符串') return {} } })() // 初始化 SDK // 将基础配置和 sdk.config 合并传入 SDK 并导出初始化完成的 SDK module.exports = qcloud(Object.assign({}, sdkConfig, configs))
{ "pile_set_name": "Github" }
/** * Localization strings for the UI Multiselect widget * * @locale de, de-DE, de-AT, de-CH */ $.extend($.ui.multiselect.locale, { addAll:'Alle hinzufügen', removeAll:'Alle entfernen', itemsCount:'Einträge ausgewählt' });
{ "pile_set_name": "Github" }
#!/usr/bin/env bats # This tests contacting a registry using a token server load helpers user="testuser" password="testpassword" email="[email protected]" base="hello-world" @test "Test token server login" { run docker_t login -u $user -p $password -e $email localregistry:5554 echo $output [ "$status" -eq 0 ] # First line is WARNING about credential save or email deprecation [ "${lines[2]}" = "Login Succeeded" -o "${lines[1]}" = "Login Succeeded" ] } @test "Test token server bad login" { run docker_t login -u "testuser" -p "badpassword" -e $email localregistry:5554 [ "$status" -ne 0 ] run docker_t login -u "baduser" -p "testpassword" -e $email localregistry:5554 [ "$status" -ne 0 ] } @test "Test push and pull with token auth" { login localregistry:5555 image="localregistry:5555/testuser/token" build $image "$base:latest" run docker_t push $image echo $output [ "$status" -eq 0 ] docker_t rmi $image docker_t pull $image } @test "Test push and pull with token auth wrong namespace" { login localregistry:5555 image="localregistry:5555/notuser/token" build $image "$base:latest" run docker_t push $image [ "$status" -ne 0 ] } @test "Test oauth token server login" { version_check docker "$GOLEM_DIND_VERSION" "1.11.0" login_oauth localregistry:5557 } @test "Test oauth token server bad login" { version_check docker "$GOLEM_DIND_VERSION" "1.11.0" run docker_t login -u "testuser" -p "badpassword" -e $email localregistry:5557 [ "$status" -ne 0 ] run docker_t login -u "baduser" -p "testpassword" -e $email localregistry:5557 [ "$status" -ne 0 ] } @test "Test oauth push and pull with token auth" { version_check docker "$GOLEM_DIND_VERSION" "1.11.0" login_oauth localregistry:5558 image="localregistry:5558/testuser/token" build $image "$base:latest" run docker_t push $image echo $output [ "$status" -eq 0 ] docker_t rmi $image docker_t pull $image } @test "Test oauth push and build with token auth" { version_check docker "$GOLEM_DIND_VERSION" "1.11.0" login_oauth localregistry:5558 image="localregistry:5558/testuser/token-build" tempImage $image run docker_t push $image echo $output [ "$status" -eq 0 ] has_digest "$output" docker_t rmi $image image2="localregistry:5558/testuser/token-build-2" run build $image2 $image echo $output [ "$status" -eq 0 ] run docker_t push $image2 echo $output [ "$status" -eq 0 ] has_digest "$output" } @test "Test oauth push and pull with token auth wrong namespace" { version_check docker "$GOLEM_DIND_VERSION" "1.11.0" login_oauth localregistry:5558 image="localregistry:5558/notuser/token" build $image "$base:latest" run docker_t push $image [ "$status" -ne 0 ] } @test "Test oauth with v1 search" { version_check docker "$GOLEM_DIND_VERSION" "1.12.0" run docker_t search localregistry:5600/testsearch [ "$status" -ne 0 ] login_oauth localregistry:5600 run docker_t search localregistry:5600/testsearch echo $output [ "$status" -eq 0 ] echo $output | grep "testsearch-1" echo $output | grep "testsearch-2" }
{ "pile_set_name": "Github" }
/* * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 */ package org.eclipse.ditto.protocoladapter.provider; import org.eclipse.ditto.protocoladapter.Adapter; import org.eclipse.ditto.signals.commands.base.ErrorResponse; /** * Interface providing the error response adapter. * * @param <E> the type of error response */ interface ErrorResponseAdapterProvider<E extends ErrorResponse<?>> { /** * @return the error response adapter */ Adapter<E> getErrorResponseAdapter(); }
{ "pile_set_name": "Github" }
//////////////////////////////////////////////////////////////////////////// // // Copyright 2017 Kishikawa Katsumi. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////// import Foundation struct SourceFile { let sourceText: String.UTF8View let sourceLines: [SourceLine] subscript(range: SourceRange) -> String { let start = range.start let end = range.end let startIndex: String.Index if start.line > 0 { startIndex = sourceText.index(sourceText.startIndex, offsetBy: sourceLines[start.line].offset + start.column) } else { startIndex = sourceText.index(sourceText.startIndex, offsetBy: start.column) } let endIndex: String.Index if end.line > 0 { endIndex = sourceText.index(sourceText.startIndex, offsetBy: sourceLines[end.line].offset + end.column) } else { endIndex = sourceText.index(sourceText.startIndex, offsetBy: end.column) } return String(String(sourceText)[startIndex..<endIndex]) } } struct SourceLine { let text: String.UTF8View let lineNumber: Int let offset: Int }
{ "pile_set_name": "Github" }
# Journal entries ![Screen Capture](./entries.gif) A journal entry is created for every day. It collects the day's tasks, memos and any other content which you don't want to have in separate notes. I use it personally mostly for time tracking.
{ "pile_set_name": "Github" }
shader vertex = $CORANGE/shaders/deferred/clear.vs shader frag = $CORANGE/shaders/deferred/clear.fs
{ "pile_set_name": "Github" }
:: After Pulling, Patching, and making sure the version number is changed in src, this bat will compile and create zips for all release. :: It will also create a zip for ExampleMod @ECHO off :: Compile/Build exe echo "Building Release" set tModLoaderVersion=v0.11.7.6 Beta 2 call buildRelease.bat set destinationFolder=.\tModLoader %tModLoaderVersion% Release @IF %ERRORLEVEL% NEQ 0 ( pause EXIT /B %ERRORLEVEL% ) @ECHO on :: Make up-to-date Installers ::cd ..\installer2 ::call createInstallers.bat ::cd ..\solutions :: Folder for release mkdir "%destinationFolder%" :: Temp Folders set win=%destinationFolder%\tModLoader Windows %tModLoaderVersion% set mac=%destinationFolder%\tModLoader Mac %tModLoaderVersion% set macReal=%destinationFolder%\tModLoader Mac %tModLoaderVersion%\tModLoader.app\Contents\MacOS set lnx=%destinationFolder%\tModLoader Linux %tModLoaderVersion% set mcfna=%destinationFolder%\ModCompile_FNA set mcxna=%destinationFolder%\ModCompile_XNA set pdbs=%destinationFolder%\pdbs set winsteam=..\..\steamworks_sdk_150\sdk\tools\ContentBuilder\content\Windows set lnxsteam=..\..\steamworks_sdk_150\sdk\tools\ContentBuilder\content\Linux set macsteam=..\..\steamworks_sdk_150\sdk\tools\ContentBuilder\content\Mac set sharedsteam=..\..\steamworks_sdk_150\sdk\tools\ContentBuilder\content\Shared rmdir /S /Q "%winsteam%" rmdir /S /Q "%lnxsteam%" rmdir /S /Q "%macsteam%" rmdir /S /Q "%sharedsteam%" mkdir "%win%" mkdir "%mac%" mkdir "%lnx%" mkdir "%mcfna%" mkdir "%mcxna%" mkdir "%pdbs%" mkdir "%winsteam%" mkdir "%lnxsteam%" mkdir "%macsteam%" mkdir "%sharedsteam%" :: Windows release robocopy /s ReleaseExtras\Content "%win%\Content" robocopy /s ReleaseExtras\JourneysEndCompatibilityContent "%win%\Content" robocopy /s ReleaseExtras\WindowsFiles "%win%" copy ..\src\tModLoader\bin\WindowsRelease\net45\Terraria.exe "%win%\tModLoader.exe" /y copy ..\src\tModLoader\bin\WindowsServerRelease\net45\Terraria.exe "%win%\tModLoaderServer.exe" /y copy ..\src\tModLoader\bin\WindowsRelease\net45\tModLoader.pdb "%win%\tModLoader.pdb" /y copy ..\src\tModLoader\bin\WindowsServerRelease\net45\tModLoaderServer.pdb "%win%\tModLoaderServer.pdb" /y ::copy ReleaseExtras\README_Windows.txt "%win%\README.txt" /y ::copy ..\installer2\WindowsInstaller.jar "%win%\tModLoaderInstaller.jar" /y :: Windows Steam robocopy /s "%win%" "%winsteam%" del "%win%\steam_api.dll" del "%win%\CSteamworks.dll" call python ZipAndMakeExecutable.py "%win%" "%win%.zip" :: Windows ModCompile :: TODO: investigate why this isn't working on my machine :: for /f %%i in ('..\setup\bin\setup --steamdir') do set steamdir=%%i set steamdir=C:\Program Files (x86)\Steam\steamapps\common\tModLoader :: Make sure to clear out ModCompile and run Setup Debugging so ModCompile folder is clean from old versions. copy "%steamdir%\ModCompile" "%mcfna%" del "%mcfna%"\buildlock 2>nul copy ..\src\tModLoader\bin\WindowsRelease\net45\tModLoader.xml "%mcfna%" /y copy ..\src\tModLoader\bin\WindowsRelease\net45\tModLoader.pdb "%mcfna%" /y copy ..\references\MonoMod.RuntimeDetour.xml "%mcfna%" /y copy ..\references\MonoMod.Utils.xml "%mcfna%" /y call python ZipAndMakeExecutable.py "%mcfna%" "%mcfna%.zip" :: Linux release robocopy /s ReleaseExtras\LinuxFiles "%lnx%" robocopy /s ReleaseExtras\LinuxMacSharedFiles "%lnx%" robocopy /s ReleaseExtras\Content "%lnx%\Content" robocopy /s ReleaseExtras\JourneysEndCompatibilityContent "%lnx%\Content" copy ..\src\tModLoader\bin\LinuxRelease\net45\Terraria.exe "%lnx%\tModLoader.exe" /y copy ..\src\tModLoader\bin\LinuxServerRelease\net45\Terraria.exe "%lnx%\tModLoaderServer.exe" /y copy ..\src\tModLoader\bin\LinuxRelease\net45\tModLoader.pdb "%lnx%\tModLoader.pdb" /y copy ..\src\tModLoader\bin\LinuxServerRelease\net45\tModLoaderServer.pdb "%lnx%\tModLoaderServer.pdb" /y copy ReleaseExtras\tModLoader-mono "%lnx%\tModLoader-mono" /y copy ReleaseExtras\tModLoader-kick "%lnx%\tModLoader-kick" /y copy ReleaseExtras\tModLoader-kick "%lnx%\tModLoader" /y copy ReleaseExtras\tModLoader-kick "%lnx%\tModLoaderServer" /y ::copy ReleaseExtras\README_Linux.txt "%lnx%\README.txt" /y ::copy ..\installer2\LinuxInstaller.jar "%lnx%\tModLoaderInstaller.jar" /y :: Linux Steam robocopy /s "%lnx%" "%lnxsteam%" del "%lnx%\lib\libsteam_api.so" del "%lnx%\lib64\libsteam_api.so" del "%lnx%\lib\libCSteamworks.so" del "%lnx%\lib64\libCSteamworks.so" call python ZipAndMakeExecutable.py "%lnx%" "%lnx%.tar.gz" call python ZipAndMakeExecutable.py "%lnx%" "%lnx%.zip" :: Mac release robocopy /s ReleaseExtras\MacFiles "%mac%" robocopy /s ReleaseExtras\LinuxMacSharedFiles "%macReal%" robocopy /s ReleaseExtras\Content "%macReal%\Content" robocopy /s ReleaseExtras\JourneysEndCompatibilityContent "%macReal%\Content" copy ..\src\tModLoader\bin\MacRelease\net45\Terraria.exe "%macReal%\tModLoader.exe" /y copy ..\src\tModLoader\bin\MacServerRelease\net45\Terraria.exe "%macReal%\tModLoaderServer.exe" /y copy ..\src\tModLoader\bin\MacRelease\net45\tModLoader.pdb "%macReal%\tModLoader.pdb" /y copy ..\src\tModLoader\bin\MacServerRelease\net45\tModLoaderServer.pdb "%macReal%\tModLoaderServer.pdb" /y copy ReleaseExtras\tModLoader-mono "%macReal%\tModLoader-mono" /y copy ReleaseExtras\tModLoader-kick "%macReal%\tModLoader-kick" /y copy ReleaseExtras\tModLoader-kick "%macReal%\tModLoader" /y copy ReleaseExtras\tModLoader-kick "%macReal%\tModLoaderServer" /y ::copy ReleaseExtras\README_Mac.txt "%mac%\README.txt" /y ::copy ..\installer2\MacInstaller.jar "%mac%\tModLoaderInstaller.jar" /y :: Mac Steam robocopy /s "%mac%" "%macsteam%" del "%macReal%\osx\libsteam_api.dylib" del "%macReal%\osx\CSteamworks" call python ZipAndMakeExecutable.py "%mac%" "%mac%.zip" :: Mono ModCompile copy "%mcfna%" "%mcxna%" del "%mcxna%\tModLoader.FNA.exe" del "%mcxna%\FNA.dll" del "%mcxna%\tModLoader.pdb" copy ..\src\tModLoader\bin\MacRelease\net45\tModLoader.pdb "%mcxna%\tModLoader_Mac.pdb" /y copy ..\src\tModLoader\bin\LinuxRelease\net45\tModLoader.pdb "%mcxna%\tModLoader_Linux.pdb" /y copy ..\src\tModLoader\bin\WindowsRelease\net45\Terraria.exe "%mcxna%\tModLoader.XNA.exe" /y copy ..\src\tModLoader\bin\WindowsRelease\net45\Microsoft.Xna.Framework.dll "%mcxna%" /y copy ..\src\tModLoader\bin\WindowsRelease\net45\Microsoft.Xna.Framework.Game.dll "%mcxna%" /y copy ..\src\tModLoader\bin\WindowsRelease\net45\Microsoft.Xna.Framework.Graphics.dll "%mcxna%" /y copy ..\src\tModLoader\bin\WindowsRelease\net45\Microsoft.Xna.Framework.Xact.dll "%mcxna%" /y call python ZipAndMakeExecutable.py "%mcxna%" "%mcxna%.zip" :: PDB backups copy ..\src\tModLoader\bin\WindowsRelease\net45\tModLoader.pdb "%pdbs%\WindowsRelease.pdb" /y copy ..\src\tModLoader\bin\WindowsServerRelease\net45\tModLoaderServer.pdb "%pdbs%\WindowsServerRelease.pdb" /y copy ..\src\tModLoader\bin\MacRelease\net45\tModLoader.pdb "%pdbs%\MacRelease.pdb" /y copy ..\src\tModLoader\bin\MacServerRelease\net45\tModLoaderServer.pdb "%pdbs%\MacServerRelease.pdb" /y copy ..\src\tModLoader\bin\LinuxRelease\net45\tModLoader.pdb "%pdbs%\LinuxRelease.pdb" /y copy ..\src\tModLoader\bin\LinuxServerRelease\net45\tModLoaderServer.pdb "%pdbs%\LinuxServerRelease.pdb" /y call python ZipAndMakeExecutable.py "%pdbs%" "%pdbs%.zip" :: SharedSteam echo|set /p="1281930" > "%sharedsteam%\steam_appid.txt" :: CleanUp, Delete temp Folders rmdir "%win%" /S /Q rmdir "%mac%" /S /Q rmdir "%lnx%" /S /Q rmdir "%mcfna%" /S /Q rmdir "%mcxna%" /S /Q rmdir "%pdbs%" /S /Q :: ExampleMod.zip (TODO, other parts of ExampleMod release) rmdir ..\ExampleMod\bin /S /Q rmdir ..\ExampleMod\obj /S /Q rmdir ..\ExampleMod\.vs /S /Q call python ZipAndMakeExecutable.py "..\ExampleMod" "%destinationFolder%\ExampleMod %tModLoaderVersion%.zip" ExampleMod\ echo( echo( echo( echo tModLoader %tModLoaderVersion% ready to release. echo Upload the 6 zip files to github. echo( echo( pause
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <title>The page you were looking for doesn't exist (404)</title> <style type="text/css"> body { background-color: #fff; color: #666; text-align: center; font-family: arial, sans-serif; } div.dialog { width: 25em; padding: 0 4em; margin: 4em auto 0 auto; border: 1px solid #ccc; border-right-color: #999; border-bottom-color: #999; } h1 { font-size: 100%; color: #f00; line-height: 1.5em; } </style> </head> <body> <!-- This file lives in public/404.html --> <div class="dialog"> <h1>The page you were looking for doesn't exist.</h1> <p>You may have mistyped the address or the page may have moved.</p> </div> </body> </html>
{ "pile_set_name": "Github" }
TRUNCATE function_results; DROP TABLE function_results
{ "pile_set_name": "Github" }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Buffers.Text { public static partial class Utf16Parser { unsafe static bool TryParseDecimal(char* text, int length, out decimal value) { var span = new ReadOnlySpan<char>(text, length); return TryParseDecimal(span, out value, out int consumed); } unsafe static bool TryParseDecimal(char* text, int length, out decimal value, out int charactersConsumed) { var span = new ReadOnlySpan<char>(text, length); return TryParseDecimal(span, out value, out charactersConsumed); } public static bool TryParseDecimal(ReadOnlySpan<char> text, out decimal value) { return TryParseDecimal(text, out value, out int consumed); } public static bool TryParseDecimal(ReadOnlySpan<char> text, out decimal value, out int charactersConsumed) { // Precondition replacement if (text.Length < 1) { value = 0; charactersConsumed = 0; return false; } value = 0.0M; charactersConsumed = 0; string decimalString = ""; bool decimalPlace = false, signed = false; int indexOfFirstDigit = 0; if (text[0] == '-' || text[0] == '+') { signed = true; decimalString += text[0]; indexOfFirstDigit = 1; charactersConsumed++; } for (int charIndex = indexOfFirstDigit; charIndex < text.Length; charIndex++) { char nextChar = text[charIndex]; char nextCharVal = (char)(nextChar - '0'); if (nextCharVal > 9) { if (!decimalPlace && nextChar == '.') { charactersConsumed++; decimalPlace = true; decimalString += nextChar; } else if ((decimalPlace && signed && charactersConsumed == 2) || ((signed || decimalPlace) && charactersConsumed == 1)) { value = 0; charactersConsumed = 0; return false; } else { if (decimal.TryParse(decimalString, out value)) { return true; } else { charactersConsumed = 0; return false; } } } else { charactersConsumed++; decimalString += nextChar; } } if ((decimalPlace && signed && charactersConsumed == 2) || ((signed || decimalPlace) && charactersConsumed == 1)) { value = 0; charactersConsumed = 0; return false; } else { if (decimal.TryParse(decimalString, out value)) { return true; } else { charactersConsumed = 0; return false; } } } } }
{ "pile_set_name": "Github" }
{ "type": "object", "$schema": "http://json-schema.org/schema#", "x-kubernetes-group-version-kind": [ { "Kind": "Namespace", "Version": "v1", "Group": "" } ], "description": "Namespace provides a scope for Names. Use of multiple namespaces is optional.", "properties": { "status": { "additionalProperties": false, "description": "NamespaceStatus is information about the current status of a Namespace.", "properties": { "phase": { "type": [ "string", "null" ], "description": "Phase is the current lifecycle phase of the namespace. More info: http://releases.k8s.io/HEAD/docs/design/namespaces.md#phases" } } }, "kind": { "type": [ "string", "null" ], "description": "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds" }, "spec": { "additionalProperties": false, "description": "NamespaceSpec describes the attributes on a Namespace.", "properties": { "finalizers": { "items": { "type": [ "string", "null" ] }, "type": [ "array", "null" ], "description": "Finalizers is an opaque list of values that must be empty to permanently remove object from storage. More info: http://releases.k8s.io/HEAD/docs/design/namespaces.md#finalizers" } } }, "apiVersion": { "type": [ "string", "null" ], "description": "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources" }, "metadata": { "additionalProperties": false, "description": "ObjectMeta is metadata that all persisted resources must have, which includes all objects users must create.", "properties": { "ownerReferences": { "items": { "additionalProperties": false, "required": [ "apiVersion", "kind", "name", "uid" ], "description": "OwnerReference contains enough information to let you identify an owning object. Currently, an owning object must be in the same namespace, so there is no namespace field.", "properties": { "kind": { "type": "string", "description": "Kind of the referent. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds" }, "uid": { "type": "string", "description": "UID of the referent. More info: http://kubernetes.io/docs/user-guide/identifiers#uids" }, "apiVersion": { "type": "string", "description": "API version of the referent." }, "controller": { "type": "boolean", "description": "If true, this reference points to the managing controller." }, "blockOwnerDeletion": { "type": "boolean", "description": "If true, AND if the owner has the \"foregroundDeletion\" finalizer, then the owner cannot be deleted from the key-value store until this reference is removed. Defaults to false. To set this field, a user needs \"delete\" permission of the owner, otherwise 422 (Unprocessable Entity) will be returned." }, "name": { "type": "string", "description": "Name of the referent. More info: http://kubernetes.io/docs/user-guide/identifiers#names" } } }, "type": [ "array", "null" ], "description": "List of objects depended by this object. If ALL objects in the list have been deleted, this object will be garbage collected. If this object is managed by a controller, then an entry in this list will point to this controller, with the controller field set to true. There cannot be more than one managing controller." }, "name": { "type": [ "string", "null" ], "description": "Name must be unique within a namespace. Is required when creating resources, although some resources may allow a client to request the generation of an appropriate name automatically. Name is primarily intended for creation idempotence and configuration definition. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/identifiers#names" }, "deletionTimestamp": { "type": [ "string", "null" ], "format": "date-time" }, "clusterName": { "type": [ "string", "null" ], "description": "The name of the cluster which the object belongs to. This is used to distinguish resources with same name and namespace in different clusters. This field is not set anywhere right now and apiserver is going to ignore it if set in create or update request." }, "deletionGracePeriodSeconds": { "type": "integer", "description": "Number of seconds allowed for this object to gracefully terminate before it will be removed from the system. Only set when deletionTimestamp is also set. May only be shortened. Read-only.", "format": "int64" }, "labels": { "additionalProperties": { "type": [ "string", "null" ] }, "type": "object", "description": "Map of string keys and values that can be used to organize and categorize (scope and select) objects. May match selectors of replication controllers and services. More info: http://kubernetes.io/docs/user-guide/labels" }, "namespace": { "type": [ "string", "null" ], "description": "Namespace defines the space within each name must be unique. An empty namespace is equivalent to the \"default\" namespace, but \"default\" is the canonical representation. Not all objects are required to be scoped to a namespace - the value of this field for those objects will be empty.\n\nMust be a DNS_LABEL. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/namespaces" }, "generation": { "type": "integer", "description": "A sequence number representing a specific generation of the desired state. Populated by the system. Read-only.", "format": "int64" }, "finalizers": { "items": { "type": [ "string", "null" ] }, "type": [ "array", "null" ], "description": "Must be empty before the object is deleted from the registry. Each entry is an identifier for the responsible component that will remove the entry from the list. If the deletionTimestamp of the object is non-nil, entries in this list can only be removed." }, "resourceVersion": { "type": [ "string", "null" ], "description": "An opaque value that represents the internal version of this object that can be used by clients to determine when objects have changed. May be used for optimistic concurrency, change detection, and the watch operation on a resource or set of resources. Clients must treat these values as opaque and passed unmodified back to the server. They may only be valid for a particular resource or set of resources.\n\nPopulated by the system. Read-only. Value must be treated as opaque by clients and . More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#concurrency-control-and-consistency" }, "generateName": { "type": [ "string", "null" ], "description": "GenerateName is an optional prefix, used by the server, to generate a unique name ONLY IF the Name field has not been provided. If this field is used, the name returned to the client will be different than the name passed. This value will also be combined with a unique suffix. The provided value has the same validation rules as the Name field, and may be truncated by the length of the suffix required to make the value unique on the server.\n\nIf this field is specified and the generated name exists, the server will NOT return a 409 - instead, it will either return 201 Created or 500 with Reason ServerTimeout indicating a unique name could not be found in the time allotted, and the client should retry (optionally after the time indicated in the Retry-After header).\n\nApplied only if Name is not specified. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#idempotency" }, "creationTimestamp": { "type": [ "string", "null" ], "format": "date-time" }, "annotations": { "additionalProperties": { "type": [ "string", "null" ] }, "type": "object", "description": "Annotations is an unstructured key value map stored with a resource that may be set by external tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved when modifying objects. More info: http://kubernetes.io/docs/user-guide/annotations" }, "selfLink": { "type": [ "string", "null" ], "description": "SelfLink is a URL representing this object. Populated by the system. Read-only." }, "uid": { "type": [ "string", "null" ], "description": "UID is the unique in time and space value for this object. It is typically generated by the server on successful creation of a resource and is not allowed to change on PUT operations.\n\nPopulated by the system. Read-only. More info: http://kubernetes.io/docs/user-guide/identifiers#uids" } } } } }
{ "pile_set_name": "Github" }
<?php /** * Smarty Method RegisterObject * * Smarty::registerObject() method * * @package Smarty * @subpackage PluginsInternal * @author Uwe Tews */ class Smarty_Internal_Method_RegisterObject { /** * Valid for Smarty and template object * * @var int */ public $objMap = 3; /** * Registers object to be used in templates * * @api Smarty::registerObject() * @link http://www.smarty.net/docs/en/api.register.object.tpl * * @param \Smarty_Internal_TemplateBase|\Smarty_Internal_Template|\Smarty $obj * @param string $object_name * @param object $object the * referenced * PHP object to * register * @param array $allowed_methods_properties list of * allowed * methods * (empty = all) * @param bool $format smarty * argument * format, else * traditional * @param array $block_methods list of * block-methods * * @return \Smarty|\Smarty_Internal_Template * @throws \SmartyException */ public function registerObject(Smarty_Internal_TemplateBase $obj, $object_name, $object, $allowed_methods_properties = array(), $format = true, $block_methods = array()) { $smarty = $obj->_getSmartyObj(); // test if allowed methods callable if (!empty($allowed_methods_properties)) { foreach ((array) $allowed_methods_properties as $method) { if (!is_callable(array($object, $method)) && !property_exists($object, $method)) { throw new SmartyException("Undefined method or property '$method' in registered object"); } } } // test if block methods callable if (!empty($block_methods)) { foreach ((array) $block_methods as $method) { if (!is_callable(array($object, $method))) { throw new SmartyException("Undefined method '$method' in registered object"); } } } // register the object $smarty->registered_objects[ $object_name ] = array($object, (array) $allowed_methods_properties, (boolean) $format, (array) $block_methods); return $obj; } }
{ "pile_set_name": "Github" }
// Copyright 2019 The original author or authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package layout import ( "os" "path/filepath" ) // FromPath reads an OCI image layout at path and constructs a layout.Path. func FromPath(path string) (Path, error) { // TODO: check oci-layout exists _, err := os.Stat(filepath.Join(path, "index.json")) if err != nil { return "", err } return Path(path), nil }
{ "pile_set_name": "Github" }
/* Copyright 2015 Brock Reeve * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace Pickaxe.Sdk { public abstract class AliasBase : AstNode { public abstract AstNode Statement { get; } public TableAlias Alias { get { return Children.Where(x => x.GetType() == typeof(TableAlias)).Cast<TableAlias>().SingleOrDefault(); } } public InnerJoinStatement Join { get { return Children.Where(x => x.GetType() == typeof(InnerJoinStatement)).Cast<InnerJoinStatement>().SingleOrDefault(); } } } }
{ "pile_set_name": "Github" }
%YAML 1.1 %TAG !u! tag:unity3d.com,2011: --- !u!114 &11400000 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 0} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: 693374c076b9a4144ac7666774b98408, type: 3} m_Name: Hotkey Item - Action Bar 4 Button 1 m_EditorClassIdentifier: input: {fileID: 11400000, guid: 899dff3e7662461489d8d3e369ab42b3, type: 2} id: Action Bar 4 Button 1 key: 49 modifier: 2
{ "pile_set_name": "Github" }
/* ClipBoard.java * * created: Mon Oct 26 1998 * * This file is part of Artemis * * Copyright (C) 1998,1999,2000 Genome Research Limited * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * $Header: //tmp/pathsoft/artemis/uk/ac/sanger/artemis/ClipBoard.java,v 1.1 2004-06-09 09:44:14 tjc Exp $ **/ package uk.ac.sanger.artemis; /** * A simple clipboard for Diana which can interact with the system clipboard. * Any Object can be stored in this clipboard but only some will be useful to * other programs. * * @author Kim Rutherford * @version $Id: ClipBoard.java,v 1.1 2004-06-09 09:44:14 tjc Exp $ **/ public class ClipBoard { /** * Create a new ClipBoard object. **/ public ClipBoard () { } /** * Set the contents of the clipboard. * @param clip The new clipboard contents. **/ public void setClip (Selection clip) { this.clip = clip; } /** * Return the current contents of the clipboard. **/ public Selection getClip () { return clip; } /** * Holds whatever was last clipped. **/ Selection clip; }
{ "pile_set_name": "Github" }
/* Vietnamese initialisation for the jQuery UI date picker plugin. */ /* Translated by Le Thanh Huy ([email protected]). */ jQuery(function($){ $.datepicker.regional['vi'] = { closeText: 'Đóng', prevText: '&#x3C;Trước', nextText: 'Tiếp&#x3E;', currentText: 'Hôm nay', monthNames: ['Tháng Một', 'Tháng Hai', 'Tháng Ba', 'Tháng Tư', 'Tháng Năm', 'Tháng Sáu', 'Tháng Bảy', 'Tháng Tám', 'Tháng Chín', 'Tháng Mười', 'Tháng Mười Một', 'Tháng Mười Hai'], monthNamesShort: ['Tháng 1', 'Tháng 2', 'Tháng 3', 'Tháng 4', 'Tháng 5', 'Tháng 6', 'Tháng 7', 'Tháng 8', 'Tháng 9', 'Tháng 10', 'Tháng 11', 'Tháng 12'], dayNames: ['Chủ Nhật', 'Thứ Hai', 'Thứ Ba', 'Thứ Tư', 'Thứ Năm', 'Thứ Sáu', 'Thứ Bảy'], dayNamesShort: ['CN', 'T2', 'T3', 'T4', 'T5', 'T6', 'T7'], dayNamesMin: ['CN', 'T2', 'T3', 'T4', 'T5', 'T6', 'T7'], weekHeader: 'Tu', dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: false, showMonthAfterYear: false, yearSuffix: ''}; $.datepicker.setDefaults($.datepicker.regional['vi']); });
{ "pile_set_name": "Github" }
//===- llvm/Target/TargetSchedule.cpp - Sched Machine Model ---------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // // This file implements a wrapper around MCSchedModel that allows the interface // to benefit from information currently only available in TargetInstrInfo. // //===----------------------------------------------------------------------===// #include "llvm/CodeGen/TargetSchedule.h" #include "llvm/CodeGen/MachineFunction.h" #include "llvm/CodeGen/MachineInstr.h" #include "llvm/CodeGen/MachineOperand.h" #include "llvm/CodeGen/TargetInstrInfo.h" #include "llvm/CodeGen/TargetRegisterInfo.h" #include "llvm/CodeGen/TargetSubtargetInfo.h" #include "llvm/MC/MCInstrDesc.h" #include "llvm/MC/MCInstrItineraries.h" #include "llvm/MC/MCSchedule.h" #include "llvm/Support/CommandLine.h" #include "llvm/Support/ErrorHandling.h" #include "llvm/Support/raw_ostream.h" #include <algorithm> #include <cassert> #include <cstdint> using namespace llvm; static cl::opt<bool> EnableSchedModel("schedmodel", cl::Hidden, cl::init(true), cl::desc("Use TargetSchedModel for latency lookup")); static cl::opt<bool> EnableSchedItins("scheditins", cl::Hidden, cl::init(true), cl::desc("Use InstrItineraryData for latency lookup")); bool TargetSchedModel::hasInstrSchedModel() const { return EnableSchedModel && SchedModel.hasInstrSchedModel(); } bool TargetSchedModel::hasInstrItineraries() const { return EnableSchedItins && !InstrItins.isEmpty(); } static unsigned gcd(unsigned Dividend, unsigned Divisor) { // Dividend and Divisor will be naturally swapped as needed. while (Divisor) { unsigned Rem = Dividend % Divisor; Dividend = Divisor; Divisor = Rem; }; return Dividend; } static unsigned lcm(unsigned A, unsigned B) { unsigned LCM = (uint64_t(A) * B) / gcd(A, B); assert((LCM >= A && LCM >= B) && "LCM overflow"); return LCM; } void TargetSchedModel::init(const TargetSubtargetInfo *TSInfo) { STI = TSInfo; SchedModel = TSInfo->getSchedModel(); TII = TSInfo->getInstrInfo(); STI->initInstrItins(InstrItins); unsigned NumRes = SchedModel.getNumProcResourceKinds(); ResourceFactors.resize(NumRes); ResourceLCM = SchedModel.IssueWidth; for (unsigned Idx = 0; Idx < NumRes; ++Idx) { unsigned NumUnits = SchedModel.getProcResource(Idx)->NumUnits; if (NumUnits > 0) ResourceLCM = lcm(ResourceLCM, NumUnits); } MicroOpFactor = ResourceLCM / SchedModel.IssueWidth; for (unsigned Idx = 0; Idx < NumRes; ++Idx) { unsigned NumUnits = SchedModel.getProcResource(Idx)->NumUnits; ResourceFactors[Idx] = NumUnits ? (ResourceLCM / NumUnits) : 0; } } /// Returns true only if instruction is specified as single issue. bool TargetSchedModel::mustBeginGroup(const MachineInstr *MI, const MCSchedClassDesc *SC) const { if (hasInstrSchedModel()) { if (!SC) SC = resolveSchedClass(MI); if (SC->isValid()) return SC->BeginGroup; } return false; } bool TargetSchedModel::mustEndGroup(const MachineInstr *MI, const MCSchedClassDesc *SC) const { if (hasInstrSchedModel()) { if (!SC) SC = resolveSchedClass(MI); if (SC->isValid()) return SC->EndGroup; } return false; } unsigned TargetSchedModel::getNumMicroOps(const MachineInstr *MI, const MCSchedClassDesc *SC) const { if (hasInstrItineraries()) { int UOps = InstrItins.getNumMicroOps(MI->getDesc().getSchedClass()); return (UOps >= 0) ? UOps : TII->getNumMicroOps(&InstrItins, *MI); } if (hasInstrSchedModel()) { if (!SC) SC = resolveSchedClass(MI); if (SC->isValid()) return SC->NumMicroOps; } return MI->isTransient() ? 0 : 1; } // The machine model may explicitly specify an invalid latency, which // effectively means infinite latency. Since users of the TargetSchedule API // don't know how to handle this, we convert it to a very large latency that is // easy to distinguish when debugging the DAG but won't induce overflow. static unsigned capLatency(int Cycles) { return Cycles >= 0 ? Cycles : 1000; } /// Return the MCSchedClassDesc for this instruction. Some SchedClasses require /// evaluation of predicates that depend on instruction operands or flags. const MCSchedClassDesc *TargetSchedModel:: resolveSchedClass(const MachineInstr *MI) const { // Get the definition's scheduling class descriptor from this machine model. unsigned SchedClass = MI->getDesc().getSchedClass(); const MCSchedClassDesc *SCDesc = SchedModel.getSchedClassDesc(SchedClass); if (!SCDesc->isValid()) return SCDesc; #ifndef NDEBUG unsigned NIter = 0; #endif while (SCDesc->isVariant()) { assert(++NIter < 6 && "Variants are nested deeper than the magic number"); SchedClass = STI->resolveSchedClass(SchedClass, MI, this); SCDesc = SchedModel.getSchedClassDesc(SchedClass); } return SCDesc; } /// Find the def index of this operand. This index maps to the machine model and /// is independent of use operands. Def operands may be reordered with uses or /// merged with uses without affecting the def index (e.g. before/after /// regalloc). However, an instruction's def operands must never be reordered /// with respect to each other. static unsigned findDefIdx(const MachineInstr *MI, unsigned DefOperIdx) { unsigned DefIdx = 0; for (unsigned i = 0; i != DefOperIdx; ++i) { const MachineOperand &MO = MI->getOperand(i); if (MO.isReg() && MO.isDef()) ++DefIdx; } return DefIdx; } /// Find the use index of this operand. This is independent of the instruction's /// def operands. /// /// Note that uses are not determined by the operand's isUse property, which /// is simply the inverse of isDef. Here we consider any readsReg operand to be /// a "use". The machine model allows an operand to be both a Def and Use. static unsigned findUseIdx(const MachineInstr *MI, unsigned UseOperIdx) { unsigned UseIdx = 0; for (unsigned i = 0; i != UseOperIdx; ++i) { const MachineOperand &MO = MI->getOperand(i); if (MO.isReg() && MO.readsReg() && !MO.isDef()) ++UseIdx; } return UseIdx; } // Top-level API for clients that know the operand indices. unsigned TargetSchedModel::computeOperandLatency( const MachineInstr *DefMI, unsigned DefOperIdx, const MachineInstr *UseMI, unsigned UseOperIdx) const { if (!hasInstrSchedModel() && !hasInstrItineraries()) return TII->defaultDefLatency(SchedModel, *DefMI); if (hasInstrItineraries()) { int OperLatency = 0; if (UseMI) { OperLatency = TII->getOperandLatency(&InstrItins, *DefMI, DefOperIdx, *UseMI, UseOperIdx); } else { unsigned DefClass = DefMI->getDesc().getSchedClass(); OperLatency = InstrItins.getOperandCycle(DefClass, DefOperIdx); } if (OperLatency >= 0) return OperLatency; // No operand latency was found. unsigned InstrLatency = TII->getInstrLatency(&InstrItins, *DefMI); // Expected latency is the max of the stage latency and itinerary props. // Rather than directly querying InstrItins stage latency, we call a TII // hook to allow subtargets to specialize latency. This hook is only // applicable to the InstrItins model. InstrSchedModel should model all // special cases without TII hooks. InstrLatency = std::max(InstrLatency, TII->defaultDefLatency(SchedModel, *DefMI)); return InstrLatency; } // hasInstrSchedModel() const MCSchedClassDesc *SCDesc = resolveSchedClass(DefMI); unsigned DefIdx = findDefIdx(DefMI, DefOperIdx); if (DefIdx < SCDesc->NumWriteLatencyEntries) { // Lookup the definition's write latency in SubtargetInfo. const MCWriteLatencyEntry *WLEntry = STI->getWriteLatencyEntry(SCDesc, DefIdx); unsigned WriteID = WLEntry->WriteResourceID; unsigned Latency = capLatency(WLEntry->Cycles); if (!UseMI) return Latency; // Lookup the use's latency adjustment in SubtargetInfo. const MCSchedClassDesc *UseDesc = resolveSchedClass(UseMI); if (UseDesc->NumReadAdvanceEntries == 0) return Latency; unsigned UseIdx = findUseIdx(UseMI, UseOperIdx); int Advance = STI->getReadAdvanceCycles(UseDesc, UseIdx, WriteID); if (Advance > 0 && (unsigned)Advance > Latency) // unsigned wrap return 0; return Latency - Advance; } // If DefIdx does not exist in the model (e.g. implicit defs), then return // unit latency (defaultDefLatency may be too conservative). #ifndef NDEBUG if (SCDesc->isValid() && !DefMI->getOperand(DefOperIdx).isImplicit() && !DefMI->getDesc().OpInfo[DefOperIdx].isOptionalDef() && SchedModel.isComplete()) { errs() << "DefIdx " << DefIdx << " exceeds machine model writes for " << *DefMI << " (Try with MCSchedModel.CompleteModel set to false)"; llvm_unreachable("incomplete machine model"); } #endif // FIXME: Automatically giving all implicit defs defaultDefLatency is // undesirable. We should only do it for defs that are known to the MC // desc like flags. Truly implicit defs should get 1 cycle latency. return DefMI->isTransient() ? 0 : TII->defaultDefLatency(SchedModel, *DefMI); } unsigned TargetSchedModel::computeInstrLatency(const MCSchedClassDesc &SCDesc) const { return capLatency(MCSchedModel::computeInstrLatency(*STI, SCDesc)); } unsigned TargetSchedModel::computeInstrLatency(unsigned Opcode) const { assert(hasInstrSchedModel() && "Only call this function with a SchedModel"); unsigned SCIdx = TII->get(Opcode).getSchedClass(); return capLatency(SchedModel.computeInstrLatency(*STI, SCIdx)); } unsigned TargetSchedModel::computeInstrLatency(const MCInst &Inst) const { if (hasInstrSchedModel()) return capLatency(SchedModel.computeInstrLatency(*STI, *TII, Inst)); return computeInstrLatency(Inst.getOpcode()); } unsigned TargetSchedModel::computeInstrLatency(const MachineInstr *MI, bool UseDefaultDefLatency) const { // For the itinerary model, fall back to the old subtarget hook. // Allow subtargets to compute Bundle latencies outside the machine model. if (hasInstrItineraries() || MI->isBundle() || (!hasInstrSchedModel() && !UseDefaultDefLatency)) return TII->getInstrLatency(&InstrItins, *MI); if (hasInstrSchedModel()) { const MCSchedClassDesc *SCDesc = resolveSchedClass(MI); if (SCDesc->isValid()) return computeInstrLatency(*SCDesc); } return TII->defaultDefLatency(SchedModel, *MI); } unsigned TargetSchedModel:: computeOutputLatency(const MachineInstr *DefMI, unsigned DefOperIdx, const MachineInstr *DepMI) const { if (!SchedModel.isOutOfOrder()) return 1; // Out-of-order processor can dispatch WAW dependencies in the same cycle. // Treat predication as a data dependency for out-of-order cpus. In-order // cpus do not need to treat predicated writes specially. // // TODO: The following hack exists because predication passes do not // correctly append imp-use operands, and readsReg() strangely returns false // for predicated defs. Register Reg = DefMI->getOperand(DefOperIdx).getReg(); const MachineFunction &MF = *DefMI->getMF(); const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo(); if (!DepMI->readsRegister(Reg, TRI) && TII->isPredicated(*DepMI)) return computeInstrLatency(DefMI); // If we have a per operand scheduling model, check if this def is writing // an unbuffered resource. If so, it treated like an in-order cpu. if (hasInstrSchedModel()) { const MCSchedClassDesc *SCDesc = resolveSchedClass(DefMI); if (SCDesc->isValid()) { for (const MCWriteProcResEntry *PRI = STI->getWriteProcResBegin(SCDesc), *PRE = STI->getWriteProcResEnd(SCDesc); PRI != PRE; ++PRI) { if (!SchedModel.getProcResource(PRI->ProcResourceIdx)->BufferSize) return 1; } } } return 0; } double TargetSchedModel::computeReciprocalThroughput(const MachineInstr *MI) const { if (hasInstrItineraries()) { unsigned SchedClass = MI->getDesc().getSchedClass(); return MCSchedModel::getReciprocalThroughput(SchedClass, *getInstrItineraries()); } if (hasInstrSchedModel()) return MCSchedModel::getReciprocalThroughput(*STI, *resolveSchedClass(MI)); return 0.0; } double TargetSchedModel::computeReciprocalThroughput(unsigned Opcode) const { unsigned SchedClass = TII->get(Opcode).getSchedClass(); if (hasInstrItineraries()) return MCSchedModel::getReciprocalThroughput(SchedClass, *getInstrItineraries()); if (hasInstrSchedModel()) { const MCSchedClassDesc &SCDesc = *SchedModel.getSchedClassDesc(SchedClass); if (SCDesc.isValid() && !SCDesc.isVariant()) return MCSchedModel::getReciprocalThroughput(*STI, SCDesc); } return 0.0; } double TargetSchedModel::computeReciprocalThroughput(const MCInst &MI) const { if (hasInstrSchedModel()) return SchedModel.getReciprocalThroughput(*STI, *TII, MI); return computeReciprocalThroughput(MI.getOpcode()); }
{ "pile_set_name": "Github" }
/** * Lo-Dash 2.4.1 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm/` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** * Used to convert characters to HTML entities: * * Though the `>` character is escaped for symmetry, characters like `>` and `/` * don't require escaping in HTML and have no special meaning unless they're part * of a tag or an unquoted attribute value. * http://mathiasbynens.be/notes/ambiguous-ampersands (under "semi-related fun fact") */ var htmlEscapes = { '&': '&amp;', '<': '&lt;', '>': '&gt;', '"': '&quot;', "'": '&#39;' }; module.exports = htmlEscapes;
{ "pile_set_name": "Github" }
// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. // Use of this file is governed by the BSD 3-clause license that // can be found in the LICENSE.txt file in the project root. package antlr import ( "strconv" "strings" ) type TokenSourceCharStreamPair struct { tokenSource TokenSource charStream CharStream } // A token has properties: text, type, line, character position in the line // (so we can ignore tabs), token channel, index, and source from which // we obtained this token. type Token interface { GetSource() *TokenSourceCharStreamPair GetTokenType() int GetChannel() int GetStart() int GetStop() int GetLine() int GetColumn() int GetText() string SetText(s string) GetTokenIndex() int SetTokenIndex(v int) GetTokenSource() TokenSource GetInputStream() CharStream } type BaseToken struct { source *TokenSourceCharStreamPair tokenType int // token type of the token channel int // The parser ignores everything not on DEFAULT_CHANNEL start int // optional return -1 if not implemented. stop int // optional return -1 if not implemented. tokenIndex int // from 0..n-1 of the token object in the input stream line int // line=1..n of the 1st character column int // beginning of the line at which it occurs, 0..n-1 text string // text of the token. readOnly bool } const ( TokenInvalidType = 0 // During lookahead operations, this "token" signifies we hit rule end ATN state // and did not follow it despite needing to. TokenEpsilon = -2 TokenMinUserTokenType = 1 TokenEOF = -1 // All tokens go to the parser (unless Skip() is called in that rule) // on a particular "channel". The parser tunes to a particular channel // so that whitespace etc... can go to the parser on a "hidden" channel. TokenDefaultChannel = 0 // Anything on different channel than DEFAULT_CHANNEL is not parsed // by parser. TokenHiddenChannel = 1 ) func (b *BaseToken) GetChannel() int { return b.channel } func (b *BaseToken) GetStart() int { return b.start } func (b *BaseToken) GetStop() int { return b.stop } func (b *BaseToken) GetLine() int { return b.line } func (b *BaseToken) GetColumn() int { return b.column } func (b *BaseToken) GetTokenType() int { return b.tokenType } func (b *BaseToken) GetSource() *TokenSourceCharStreamPair { return b.source } func (b *BaseToken) GetTokenIndex() int { return b.tokenIndex } func (b *BaseToken) SetTokenIndex(v int) { b.tokenIndex = v } func (b *BaseToken) GetTokenSource() TokenSource { return b.source.tokenSource } func (b *BaseToken) GetInputStream() CharStream { return b.source.charStream } type CommonToken struct { *BaseToken } func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start, stop int) *CommonToken { t := new(CommonToken) t.BaseToken = new(BaseToken) t.source = source t.tokenType = tokenType t.channel = channel t.start = start t.stop = stop t.tokenIndex = -1 if t.source.tokenSource != nil { t.line = source.tokenSource.GetLine() t.column = source.tokenSource.GetCharPositionInLine() } else { t.column = -1 } return t } // An empty {@link Pair} which is used as the default value of // {@link //source} for tokens that do not have a source. //CommonToken.EMPTY_SOURCE = [ nil, nil ] // Constructs a New{@link CommonToken} as a copy of another {@link Token}. // // <p> // If {@code oldToken} is also a {@link CommonToken} instance, the newly // constructed token will share a reference to the {@link //text} field and // the {@link Pair} stored in {@link //source}. Otherwise, {@link //text} will // be assigned the result of calling {@link //GetText}, and {@link //source} // will be constructed from the result of {@link Token//GetTokenSource} and // {@link Token//GetInputStream}.</p> // // @param oldToken The token to copy. // func (c *CommonToken) clone() *CommonToken { t := NewCommonToken(c.source, c.tokenType, c.channel, c.start, c.stop) t.tokenIndex = c.GetTokenIndex() t.line = c.GetLine() t.column = c.GetColumn() t.text = c.GetText() return t } func (c *CommonToken) GetText() string { if c.text != "" { return c.text } input := c.GetInputStream() if input == nil { return "" } n := input.Size() if c.start < n && c.stop < n { return input.GetTextFromInterval(NewInterval(c.start, c.stop)) } return "<EOF>" } func (c *CommonToken) SetText(text string) { c.text = text } func (c *CommonToken) String() string { txt := c.GetText() if txt != "" { txt = strings.Replace(txt, "\n", "\\n", -1) txt = strings.Replace(txt, "\r", "\\r", -1) txt = strings.Replace(txt, "\t", "\\t", -1) } else { txt = "<no text>" } var ch string if c.channel > 0 { ch = ",channel=" + strconv.Itoa(c.channel) } else { ch = "" } return "[@" + strconv.Itoa(c.tokenIndex) + "," + strconv.Itoa(c.start) + ":" + strconv.Itoa(c.stop) + "='" + txt + "',<" + strconv.Itoa(c.tokenType) + ">" + ch + "," + strconv.Itoa(c.line) + ":" + strconv.Itoa(c.column) + "]" }
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ #ifndef _GUAC_PARSER_H #define _GUAC_PARSER_H /** * Provides functions and structures for parsing the Guacamole protocol. * * @file parser.h */ #include "parser-types.h" #include "parser-constants.h" #include "socket-types.h" struct guac_parser { /** * The opcode of the instruction. */ char* opcode; /** * The number of arguments passed to this instruction. */ int argc; /** * Array of all arguments passed to this instruction. */ char** argv; /** * The parse state of the instruction. */ guac_parse_state state; /** * The length of the current element, if known. */ int __element_length; /** * The number of elements currently parsed. */ int __elementc; /** * All currently parsed elements. */ char* __elementv[GUAC_INSTRUCTION_MAX_ELEMENTS]; /** * Pointer to the first character of the current in-progress instruction * within the buffer. */ char* __instructionbuf_unparsed_start; /** * Pointer to the first unused section of the instruction buffer. */ char* __instructionbuf_unparsed_end; /** * The instruction buffer. This is essentially the input buffer, * provided as a convenience to be used to buffer instructions until * those instructions are complete and ready to be parsed. */ char __instructionbuf[32768]; }; /** * Allocates a new parser. * * @return The newly allocated parser, or NULL if an error occurs during * allocation, in which case guac_error will be set appropriately. */ guac_parser* guac_parser_alloc(); /** * Appends data from the given buffer to the given parser. The data will be * appended, if possible, to the in-progress instruction as a reference and * thus the buffer must remain valid throughout the life of the current * instruction. This function may modify the contents of the buffer when those * contents are part of an element within the instruction being read. * * @param parser The parser to append data to. * @param buffer A buffer containing data that should be appended to this * parser. * @param length The number of bytes available for appending within the buffer. * @return The number of bytes appended to this parser, which may be * zero if more data is needed. */ int guac_parser_append(guac_parser* parser, void* buffer, int length); /** * Returns the number of unparsed bytes stored in the given parser's internal * buffers. * * @param parser The parser to return the length of. * @return The number of unparsed bytes stored in the given parser. */ int guac_parser_length(guac_parser* parser); /** * Removes up to length bytes from internal buffer of unparsed bytes, storing * them in the given buffer. * * @param parser The parser to remove unparsed bytes from. * @param buffer The buffer to store the unparsed bytes within. * @param length The length of the given buffer. * @return The number of bytes stored in the given buffer. */ int guac_parser_shift(guac_parser* parser, void* buffer, int length); /** * Frees all memory allocated to the given parser. * * @param parser The parser to free. */ void guac_parser_free(guac_parser* parser); /** * Reads a single instruction from the given guac_socket connection. This * may result in additional data being read from the guac_socket, stored * internally within a buffer for future parsing. Future calls to * guac_parser_read() will read from the interal buffer before reading * from the guac_socket. Data from the internal buffer can be removed * and used elsewhere through guac_parser_shift(). * * If an error occurs reading the instruction, non-zero is returned, * and guac_error is set appropriately. * * @param parser The guac_parser to read instruction data from. * @param socket The guac_socket connection to use. * @param usec_timeout The maximum number of microseconds to wait before * giving up. * @return Zero if an instruction was read within the time allowed, or * non-zero if no instruction could be read. If the instruction * could not be read completely because the timeout elapsed, in * which case guac_error will be set to GUAC_STATUS_INPUT_TIMEOUT * and additional calls to guac_parser_read() will be required. */ int guac_parser_read(guac_parser* parser, guac_socket* socket, int usec_timeout); /** * Reads a single instruction from the given guac_socket. This operates * identically to guac_parser_read(), except that an error is returned if * the expected opcode is not received. * * If an error occurs reading the instruction, NULL is returned, * and guac_error is set appropriately. * * If the instruction read is not the expected instruction, NULL is returned, * and guac_error is set to GUAC_STATUS_BAD_STATE. * * @param parser The guac_parser to read instruction data from. * @param socket The guac_socket connection to use. * @param usec_timeout The maximum number of microseconds to wait before * giving up. * @param opcode The opcode of the instruction to read. * @return Zero if an instruction with the given opcode was read, non-zero * otherwise. If an instruction was read, but the instruction had a * different opcode, non-zero is returned and guac_error is set to * GUAC_STATUS_BAD_STATE. */ int guac_parser_expect(guac_parser* parser, guac_socket* socket, int usec_timeout, const char* opcode); #endif
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE module PUBLIC "-//Puppy Crawl//DTD Check Configuration 1.3//EN" "http://www.puppycrawl.com/dtds/configuration_1_3.dtd"><!-- Checkstyle-Configuration: Android Checkstyle Description: none --> <module name="Checker"> <!--<property name="severity" value="warning" />--> <module name="FileLength"> <property name="max" value="3000" /> </module> <module name="FileTabCharacter" /> <!-- Trailing spaces --> <module name="RegexpSingleline"> <property name="format" value="\s+$" /> <property name="message" value="Line has trailing spaces." /> </module> <!-- Space after 'for' and 'if' --> <module name="RegexpSingleline"> <property name="format" value="^\s*(for|if)[^ ]\(" /> <property name="message" value="Space needed before opening parenthesis." /> </module> <!-- For each spacing --> <module name="RegexpSingleline"> <property name="format" value="^\s*for \(.*?([^ ]:|:[^ ])" /> <property name="message" value="Space needed around ':' character." /> </module> <module name="NewlineAtEndOfFile"> <property name="severity" value="ignore" /> <metadata name="net.sf.eclipsecs.core.lastEnabledSeverity" value="inherit" /> </module> <!-- exclusions --> <module name="SuppressionCommentFilter"> <property name="offCommentFormat" value="CHECKSTYLE.OFF\: ([\w\|]+)" /> <property name="onCommentFormat" value="CHECKSTYLE.ON\: ([\w\|]+)" /> <property name="checkFormat" value="$1" /> </module> <module name="SuppressWarningsFilter" /> <module name="Translation" /> <module name="TreeWalker"> <property name="cacheFile" value="target/cachefile" /> <property name="tabWidth" value="4" /> <module name="ArrayTypeStyle" /> <module name="AvoidNestedBlocks" /> <module name="AvoidStarImport" /> <module name="ConstantName"> <property name="format" value="^(_)?[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$" /> </module> <module name="CovariantEquals" /> <module name="CyclomaticComplexity"> <property name="max" value="15" /> </module> <module name="DesignForExtension"> <property name="severity" value="ignore" /> <metadata name="net.sf.eclipsecs.core.lastEnabledSeverity" value="inherit" /> </module> <!--<module name="EmptyBlock" />--> <module name="EmptyBlock"> <property name="option" value="text" /> <property name="tokens" value="LITERAL_DO, LITERAL_ELSE, LITERAL_FINALLY, LITERAL_IF, LITERAL_FOR, LITERAL_TRY, LITERAL_WHILE, INSTANCE_INIT, STATIC_INIT" /> </module> <module name="EmptyForIteratorPad" /> <module name="EmptyStatement" /> <module name="EqualsHashCode" /> <module name="FileContentsHolder" /> <module name="FinalClass" /> <module name="FinalParameters"> <property name="severity" value="ignore" /> <metadata name="net.sf.eclipsecs.core.lastEnabledSeverity" value="inherit" /> </module> <module name="GenericWhitespace" /> <module name="HideUtilityClassConstructor" /> <module name="IllegalImport" /> <module name="IllegalInstantiation" /> <module name="InnerAssignment" /> <!--<module name="InterfaceIsType" />--> <!--<module name="InnerTypeLast" />--> <module name="LeftCurly" /> <module name="LineLength"> <property name="max" value="132" /> </module> <module name="LocalFinalVariableName" /> <module name="LocalVariableName" /> <!-- Checks that there are no "magic numbers", where a magic number is a numeric literal that is not defined as a constant. --> <module name="MagicNumber"> <property name="tokens" value="NUM_DOUBLE, NUM_FLOAT" /> <property name="ignoreNumbers" value="0, 0.5, -0.5, 1, -1, -2, 2" /> </module> <module name="MemberName" /> <module name="MethodLength"> <property name="tokens" value="METHOD_DEF" /> <property name="max" value="200" /> <property name="countEmpty" value="false" /> </module> <module name="MethodName" /> <module name="MethodParamPad" /> <module name="MissingSwitchDefault" /> <module name="ModifierOrder" /> <module name="NeedBraces"> <property name="tokens" value="LITERAL_DO, LITERAL_ELSE, LITERAL_FOR, LITERAL_WHILE" /> </module> <module name="NoWhitespaceAfter"> <!--<property name="tokens" value="BNOT,DEC,DOT,INC,LNOT,UNARY_MINUS,UNARY_PLUS" />--> </module> <module name="NoWhitespaceBefore" /> <module name="OperatorWrap" /> <module name="PackageName" /> <module name="ParameterName" /> <module name="ParameterNumber"> <property name="max" value="12" /> </module> <module name="ParenPad" /> <module name="RightCurly" /> <module name="RedundantImport" /> <module name="RedundantModifier" /> <module name="SimplifyBooleanExpression" /> <module name="SimplifyBooleanReturn" /> <module name="StaticVariableName" /> <module name="StringLiteralEquality" /> <module name="SuperFinalize" /> <module name="SuppressWarningsHolder" /> <module name="TodoComment"> <property name="severity" value="ignore" /> <metadata name="net.sf.eclipsecs.core.lastEnabledSeverity" value="inherit" /> </module> <module name="TypecastParenPad" /> <module name="TypeName" /> <module name="UnusedImports"> <property name="processJavadoc" value="true" /> </module> <module name="UpperEll" /> <module name="WhitespaceAfter" /> <module name="WhitespaceAround"> <property name="allowEmptyMethods" value="true" /> </module> <!-- <module name="HiddenField"/> --> </module> </module>
{ "pile_set_name": "Github" }
package network // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. import ( "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/azure" "net/http" ) // VirtualNetworksClient is the the Microsoft Azure Network management API // provides a RESTful set of web services that interact with Microsoft Azure // Networks service to manage your network resources. The API has entities // that capture the relationship between an end user and the Microsoft Azure // Networks service. type VirtualNetworksClient struct { ManagementClient } // NewVirtualNetworksClient creates an instance of the VirtualNetworksClient // client. func NewVirtualNetworksClient(subscriptionID string) VirtualNetworksClient { return NewVirtualNetworksClientWithBaseURI(DefaultBaseURI, subscriptionID) } // NewVirtualNetworksClientWithBaseURI creates an instance of the // VirtualNetworksClient client. func NewVirtualNetworksClientWithBaseURI(baseURI string, subscriptionID string) VirtualNetworksClient { return VirtualNetworksClient{NewWithBaseURI(baseURI, subscriptionID)} } // CheckIPAddressAvailability checks whether a private Ip address is available // for use. // // resourceGroupName is the name of the resource group. virtualNetworkName is // the name of the virtual network. ipAddress is the private IP address to be // verified. func (client VirtualNetworksClient) CheckIPAddressAvailability(resourceGroupName string, virtualNetworkName string, ipAddress string) (result IPAddressAvailabilityResult, err error) { req, err := client.CheckIPAddressAvailabilityPreparer(resourceGroupName, virtualNetworkName, ipAddress) if err != nil { return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "CheckIPAddressAvailability", nil, "Failure preparing request") } resp, err := client.CheckIPAddressAvailabilitySender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "CheckIPAddressAvailability", resp, "Failure sending request") } result, err = client.CheckIPAddressAvailabilityResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "CheckIPAddressAvailability", resp, "Failure responding to request") } return } // CheckIPAddressAvailabilityPreparer prepares the CheckIPAddressAvailability request. func (client VirtualNetworksClient) CheckIPAddressAvailabilityPreparer(resourceGroupName string, virtualNetworkName string, ipAddress string) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "virtualNetworkName": autorest.Encode("path", virtualNetworkName), } queryParameters := map[string]interface{}{ "api-version": client.APIVersion, } if len(ipAddress) > 0 { queryParameters["ipAddress"] = autorest.Encode("query", ipAddress) } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/CheckIPAddressAvailability", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{}) } // CheckIPAddressAvailabilitySender sends the CheckIPAddressAvailability request. The method will close the // http.Response Body if it receives an error. func (client VirtualNetworksClient) CheckIPAddressAvailabilitySender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // CheckIPAddressAvailabilityResponder handles the response to the CheckIPAddressAvailability request. The method always // closes the http.Response Body. func (client VirtualNetworksClient) CheckIPAddressAvailabilityResponder(resp *http.Response) (result IPAddressAvailabilityResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // CreateOrUpdate the Put VirtualNetwork operation creates/updates a virtual // network in the specified resource group. This method may poll for // completion. Polling can be canceled by passing the cancel channel // argument. The channel will be used to cancel polling and any outstanding // HTTP requests. // // resourceGroupName is the name of the resource group. virtualNetworkName is // the name of the virtual network. parameters is parameters supplied to the // create/update Virtual Network operation func (client VirtualNetworksClient) CreateOrUpdate(resourceGroupName string, virtualNetworkName string, parameters VirtualNetwork, cancel <-chan struct{}) (result autorest.Response, err error) { req, err := client.CreateOrUpdatePreparer(resourceGroupName, virtualNetworkName, parameters, cancel) if err != nil { return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "CreateOrUpdate", nil, "Failure preparing request") } resp, err := client.CreateOrUpdateSender(req) if err != nil { result.Response = resp return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "CreateOrUpdate", resp, "Failure sending request") } result, err = client.CreateOrUpdateResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "CreateOrUpdate", resp, "Failure responding to request") } return } // CreateOrUpdatePreparer prepares the CreateOrUpdate request. func (client VirtualNetworksClient) CreateOrUpdatePreparer(resourceGroupName string, virtualNetworkName string, parameters VirtualNetwork, cancel <-chan struct{}) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "virtualNetworkName": autorest.Encode("path", virtualNetworkName), } queryParameters := map[string]interface{}{ "api-version": client.APIVersion, } preparer := autorest.CreatePreparer( autorest.AsJSON(), autorest.AsPut(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}", pathParameters), autorest.WithJSON(parameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{Cancel: cancel}) } // CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the // http.Response Body if it receives an error. func (client VirtualNetworksClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoPollForAsynchronous(client.PollingDelay)) } // CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always // closes the http.Response Body. func (client VirtualNetworksClient) CreateOrUpdateResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), autorest.ByClosing()) result.Response = resp return } // Delete the Delete VirtualNetwork operation deletes the specified virtual // network This method may poll for completion. Polling can be canceled by // passing the cancel channel argument. The channel will be used to cancel // polling and any outstanding HTTP requests. // // resourceGroupName is the name of the resource group. virtualNetworkName is // the name of the virtual network. func (client VirtualNetworksClient) Delete(resourceGroupName string, virtualNetworkName string, cancel <-chan struct{}) (result autorest.Response, err error) { req, err := client.DeletePreparer(resourceGroupName, virtualNetworkName, cancel) if err != nil { return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "Delete", nil, "Failure preparing request") } resp, err := client.DeleteSender(req) if err != nil { result.Response = resp return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "Delete", resp, "Failure sending request") } result, err = client.DeleteResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "Delete", resp, "Failure responding to request") } return } // DeletePreparer prepares the Delete request. func (client VirtualNetworksClient) DeletePreparer(resourceGroupName string, virtualNetworkName string, cancel <-chan struct{}) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "virtualNetworkName": autorest.Encode("path", virtualNetworkName), } queryParameters := map[string]interface{}{ "api-version": client.APIVersion, } preparer := autorest.CreatePreparer( autorest.AsDelete(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{Cancel: cancel}) } // DeleteSender sends the Delete request. The method will close the // http.Response Body if it receives an error. func (client VirtualNetworksClient) DeleteSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoPollForAsynchronous(client.PollingDelay)) } // DeleteResponder handles the response to the Delete request. The method always // closes the http.Response Body. func (client VirtualNetworksClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusAccepted, http.StatusNoContent, http.StatusOK), autorest.ByClosing()) result.Response = resp return } // Get the Get VirtualNetwork operation retrieves information about the // specified virtual network. // // resourceGroupName is the name of the resource group. virtualNetworkName is // the name of the virtual network. expand is expand references resources. func (client VirtualNetworksClient) Get(resourceGroupName string, virtualNetworkName string, expand string) (result VirtualNetwork, err error) { req, err := client.GetPreparer(resourceGroupName, virtualNetworkName, expand) if err != nil { return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "Get", nil, "Failure preparing request") } resp, err := client.GetSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "Get", resp, "Failure sending request") } result, err = client.GetResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "Get", resp, "Failure responding to request") } return } // GetPreparer prepares the Get request. func (client VirtualNetworksClient) GetPreparer(resourceGroupName string, virtualNetworkName string, expand string) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "virtualNetworkName": autorest.Encode("path", virtualNetworkName), } queryParameters := map[string]interface{}{ "api-version": client.APIVersion, } if len(expand) > 0 { queryParameters["$expand"] = autorest.Encode("query", expand) } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{}) } // GetSender sends the Get request. The method will close the // http.Response Body if it receives an error. func (client VirtualNetworksClient) GetSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // GetResponder handles the response to the Get request. The method always // closes the http.Response Body. func (client VirtualNetworksClient) GetResponder(resp *http.Response) (result VirtualNetwork, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // List the list VirtualNetwork returns all Virtual Networks in a resource // group // // resourceGroupName is the name of the resource group. func (client VirtualNetworksClient) List(resourceGroupName string) (result VirtualNetworkListResult, err error) { req, err := client.ListPreparer(resourceGroupName) if err != nil { return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "List", nil, "Failure preparing request") } resp, err := client.ListSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "List", resp, "Failure sending request") } result, err = client.ListResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "List", resp, "Failure responding to request") } return } // ListPreparer prepares the List request. func (client VirtualNetworksClient) ListPreparer(resourceGroupName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } queryParameters := map[string]interface{}{ "api-version": client.APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{}) } // ListSender sends the List request. The method will close the // http.Response Body if it receives an error. func (client VirtualNetworksClient) ListSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // ListResponder handles the response to the List request. The method always // closes the http.Response Body. func (client VirtualNetworksClient) ListResponder(resp *http.Response) (result VirtualNetworkListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListNextResults retrieves the next set of results, if any. func (client VirtualNetworksClient) ListNextResults(lastResults VirtualNetworkListResult) (result VirtualNetworkListResult, err error) { req, err := lastResults.VirtualNetworkListResultPreparer() if err != nil { return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "List", nil, "Failure preparing next results request") } if req == nil { return } resp, err := client.ListSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "List", resp, "Failure sending next results request") } result, err = client.ListResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "List", resp, "Failure responding to next results request") } return } // ListAll the list VirtualNetwork returns all Virtual Networks in a // subscription func (client VirtualNetworksClient) ListAll() (result VirtualNetworkListResult, err error) { req, err := client.ListAllPreparer() if err != nil { return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "ListAll", nil, "Failure preparing request") } resp, err := client.ListAllSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "ListAll", resp, "Failure sending request") } result, err = client.ListAllResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "ListAll", resp, "Failure responding to request") } return } // ListAllPreparer prepares the ListAll request. func (client VirtualNetworksClient) ListAllPreparer() (*http.Request, error) { pathParameters := map[string]interface{}{ "subscriptionId": autorest.Encode("path", client.SubscriptionID), } queryParameters := map[string]interface{}{ "api-version": client.APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/virtualNetworks", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{}) } // ListAllSender sends the ListAll request. The method will close the // http.Response Body if it receives an error. func (client VirtualNetworksClient) ListAllSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req) } // ListAllResponder handles the response to the ListAll request. The method always // closes the http.Response Body. func (client VirtualNetworksClient) ListAllResponder(resp *http.Response) (result VirtualNetworkListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListAllNextResults retrieves the next set of results, if any. func (client VirtualNetworksClient) ListAllNextResults(lastResults VirtualNetworkListResult) (result VirtualNetworkListResult, err error) { req, err := lastResults.VirtualNetworkListResultPreparer() if err != nil { return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "ListAll", nil, "Failure preparing next results request") } if req == nil { return } resp, err := client.ListAllSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "ListAll", resp, "Failure sending next results request") } result, err = client.ListAllResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.VirtualNetworksClient", "ListAll", resp, "Failure responding to next results request") } return }
{ "pile_set_name": "Github" }
// Copyright (c) 2010-2013 AlphaSierraPapa for the SharpDevelop Team // // Permission is hereby granted, free of charge, to any person obtaining a copy of this // software and associated documentation files (the "Software"), to deal in the Software // without restriction, including without limitation the rights to use, copy, modify, merge, // publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons // to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or // substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, // INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR // PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE // FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. using System; using NUnit.Framework; namespace ICSharpCode.NRefactory.CSharp.Parser.Expression { [TestFixture] public class CheckedExpressionTests { [Test] public void CheckedExpressionTest() { CheckedExpression ce = ParseUtilCSharp.ParseExpression<CheckedExpression>("checked(a)"); Assert.IsTrue(ce.Expression is IdentifierExpression); } [Test] public void UncheckedExpressionTest() { UncheckedExpression ce = ParseUtilCSharp.ParseExpression<UncheckedExpression>("unchecked(a)"); Assert.IsTrue(ce.Expression is IdentifierExpression); } } }
{ "pile_set_name": "Github" }
<?php /** * @file * Contains the user from URL argument default plugin. */ /** * Default argument plugin to extract a user via menu_get_object */ class views_plugin_argument_default_user extends views_plugin_argument_default { function option_definition() { $options = parent::option_definition(); $options['user'] = array('default' => '', 'bool' => TRUE, 'translatable' => FALSE); return $options; } function options_form(&$form, &$form_state) { $form['user'] = array( '#type' => 'checkbox', '#title' => t('Also look for a node and use the node author'), '#default_value' => $this->options['user'], ); } function convert_options(&$options) { if (!isset($options['user']) && isset($this->argument->options['default_argument_user'])) { $options['user'] = $this->argument->options['default_argument_user']; } } function get_argument() { foreach (range(1, 3) as $i) { $user = menu_get_object('user', $i); if (!empty($user)) { return $user->uid; } } foreach (range(1, 3) as $i) { $user = menu_get_object('user_uid_optional', $i); if (!empty($user)) { return $user->uid; } } if (!empty($this->options['user'])) { foreach (range(1, 3) as $i) { $node = menu_get_object('node', $i); if (!empty($node)) { return $node->uid; } } } if (arg(0) == 'user' && is_numeric(arg(1))) { return arg(1); } if (!empty($this->options['user'])) { if (arg(0) == 'node' && is_numeric(arg(1))) { $node = node_load(arg(1)); if ($node) { return $node->uid; } } } // If the current page is a view that takes uid as an argument, return the uid. $view = views_get_page_view(); if ($view && isset($view->argument['uid'])) { return $view->argument['uid']->argument; } } }
{ "pile_set_name": "Github" }
// Copyright 2013 Dario Castañé. All rights reserved. // Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Based on src/pkg/reflect/deepequal.go from official // golang's stdlib. package mergo import ( "errors" "reflect" ) // Errors reported by Mergo when it finds invalid arguments. var ( ErrNilArguments = errors.New("src and dst must not be nil") ErrDifferentArgumentsTypes = errors.New("src and dst must be of same type") ErrNotSupported = errors.New("only structs and maps are supported") ErrExpectedMapAsDestination = errors.New("dst was expected to be a map") ErrExpectedStructAsDestination = errors.New("dst was expected to be a struct") ErrNonPointerAgument = errors.New("dst must be a pointer") ) // During deepMerge, must keep track of checks that are // in progress. The comparison algorithm assumes that all // checks in progress are true when it reencounters them. // Visited are stored in a map indexed by 17 * a1 + a2; type visit struct { ptr uintptr typ reflect.Type next *visit } // From src/pkg/encoding/json/encode.go. func isEmptyValue(v reflect.Value) bool { switch v.Kind() { case reflect.Array, reflect.Map, reflect.Slice, reflect.String: return v.Len() == 0 case reflect.Bool: return !v.Bool() case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return v.Int() == 0 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: return v.Uint() == 0 case reflect.Float32, reflect.Float64: return v.Float() == 0 case reflect.Interface, reflect.Ptr: if v.IsNil() { return true } return isEmptyValue(v.Elem()) case reflect.Func: return v.IsNil() case reflect.Invalid: return true } return false } func resolveValues(dst, src interface{}) (vDst, vSrc reflect.Value, err error) { if dst == nil || src == nil { err = ErrNilArguments return } vDst = reflect.ValueOf(dst).Elem() if vDst.Kind() != reflect.Struct && vDst.Kind() != reflect.Map { err = ErrNotSupported return } vSrc = reflect.ValueOf(src) // We check if vSrc is a pointer to dereference it. if vSrc.Kind() == reflect.Ptr { vSrc = vSrc.Elem() } return }
{ "pile_set_name": "Github" }
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. 'use strict'; (function() { var mojomId = 'mojo/public/mojom/base/file_path.mojom'; if (mojo.internal.isMojomLoaded(mojomId)) { console.warn('The following mojom is loaded multiple times: ' + mojomId); return; } mojo.internal.markMojomLoaded(mojomId); var bindings = mojo; var associatedBindings = mojo; var codec = mojo.internal; var validator = mojo.internal; var exports = mojo.internal.exposeNamespace('mojoBase.mojom'); function FilePath(values) { this.initDefaults_(); this.initFields_(values); } FilePath.prototype.initDefaults_ = function() { this.path = null; }; FilePath.prototype.initFields_ = function(fields) { for(var field in fields) { if (this.hasOwnProperty(field)) this[field] = fields[field]; } }; FilePath.generate = function(generator_) { var generated = new FilePath; generated.path = generator_.generateString(false); return generated; }; FilePath.prototype.mutate = function(mutator_) { if (mutator_.chooseMutateField()) { this.path = mutator_.mutateString(this.path, false); } return this; }; FilePath.prototype.getHandleDeps = function() { var handles = []; return handles; }; FilePath.prototype.setHandles = function() { this.setHandlesInternal_(arguments, 0); }; FilePath.prototype.setHandlesInternal_ = function(handles, idx) { return idx; }; FilePath.validate = function(messageValidator, offset) { var err; err = messageValidator.validateStructHeader(offset, codec.kStructHeaderSize); if (err !== validator.validationError.NONE) return err; var kVersionSizes = [ {version: 0, numBytes: 16} ]; err = messageValidator.validateStructVersion(offset, kVersionSizes); if (err !== validator.validationError.NONE) return err; // validate FilePath.path err = messageValidator.validateStringPointer(offset + codec.kStructHeaderSize + 0, false) if (err !== validator.validationError.NONE) return err; return validator.validationError.NONE; }; FilePath.encodedSize = codec.kStructHeaderSize + 8; FilePath.decode = function(decoder) { var packed; var val = new FilePath(); var numberOfBytes = decoder.readUint32(); var version = decoder.readUint32(); val.path = decoder.decodeStruct(codec.String); return val; }; FilePath.encode = function(encoder, val) { var packed; encoder.writeUint32(FilePath.encodedSize); encoder.writeUint32(0); encoder.encodeStruct(codec.String, val.path); }; exports.FilePath = FilePath; })();
{ "pile_set_name": "Github" }
const { $Toast } = require('../../dist/base/index'); Page({ handleText () { $Toast({ content: '这是文本提示' }); }, handleSuccess () { $Toast({ content: '成功的提示', type: 'success' }); }, handleWarning () { $Toast({ content: '警告的提示', type: 'warning' }); }, handleError () { $Toast({ content: '错误的提示', type: 'error' }); }, handleLoading () { $Toast({ content: '加载中', type: 'loading' }); }, handleIcon () { $Toast({ content: '使用内置的图标', icon: 'praise' }); }, handleImage () { $Toast({ content: '使用自定义图片', image: 'https://i.loli.net/2017/08/21/599a521472424.jpg' }); }, handleMask () { $Toast({ content: '5秒后自动关闭', icon: 'prompt', duration: 0, mask: false }); setTimeout(() => { $Toast.hide(); }, 5000); }, });
{ "pile_set_name": "Github" }
{ "activePlaceCount": 0, "birth": { "place": { "name": "Derby, United Kingdom", "placeName": "Derby", "placeType": "inhabited_place" }, "time": { "startYear": 1734 } }, "birthYear": 1734, "date": "1734\u20131797", "death": { "place": { "name": "Derby, United Kingdom", "placeName": "Derby", "placeType": "inhabited_place" }, "time": { "startYear": 1797 } }, "fc": "Joseph Wright of Derby", "gender": "Male", "id": 615, "mda": "Wright, Joseph, of Derby", "movements": [ { "era": { "id": 350, "name": "19th century" }, "id": 364, "name": "Romanticism" }, { "era": { "id": 290, "name": "18th century" }, "id": 349, "name": "Sublime" } ], "startLetter": "W", "totalWorks": 16, "url": "http://www.tate.org.uk/art/artists/joseph-wright-of-derby-615" }
{ "pile_set_name": "Github" }
/***************************************************************************** * * \file * * \brief CTRL_ACCESS interface for SD/MMC card. * * Copyright (c) 2009-2011 Atmel Corporation. All rights reserved. * * \asf_license_start * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. The name of Atmel may not be used to endorse or promote products derived * from this software without specific prior written permission. * * 4. This software may only be redistributed and used in connection with an Atmel * AVR product. * * THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE * EXPRESSLY AND SPECIFICALLY DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * \asf_license_stop * ******************************************************************************/ //_____ I N C L U D E S ___________________________________________________ #include "conf_access.h" #if SD_MMC_SPI_MEM == ENABLE #include "conf_sd_mmc_spi.h" #include "sd_mmc_spi.h" #include "sd_mmc_spi_mem.h" //_____ M A C R O S ________________________________________________________ #define Sd_mmc_spi_access_signal_on() #define Sd_mmc_spi_access_signal_off() //_____ P R I V A T E D E C L A R A T I O N _____________________________ //_____ D E F I N I T I O N ________________________________________________ //extern xdata uint32_t sd_mmc_spi_mem_size; extern uint32_t SD_MMC_SPI_DISK_SIZE; extern volatile uint32_t sd_mmc_spi_last_address; extern bool sd_mmc_spi_init_done; uint8_t sd_mmc_spi_presence_status = SD_MMC_INSERTED; extern bool sd_mmc_spi_init_done; //_____ D E C L A R A T I O N ______________________________________________ void sd_mmc_spi_mem_init(void) { sd_mmc_spi_internal_init(); // Restart Init of SD/MMC card after previous first init } Ctrl_status sd_mmc_spi_test_unit_ready(void) { Sd_mmc_spi_access_signal_on(); switch (sd_mmc_spi_presence_status) { case SD_MMC_REMOVED: sd_mmc_spi_init_done = false; if (sd_mmc_spi_mem_check()) { sd_mmc_spi_presence_status = SD_MMC_INSERTED; Sd_mmc_spi_access_signal_off(); return CTRL_BUSY; } Sd_mmc_spi_access_signal_off(); return CTRL_NO_PRESENT; case SD_MMC_INSERTED: if (!sd_mmc_spi_mem_check()) { sd_mmc_spi_presence_status = SD_MMC_REMOVING; sd_mmc_spi_init_done = false; Sd_mmc_spi_access_signal_off(); return CTRL_BUSY; } Sd_mmc_spi_access_signal_off(); return CTRL_GOOD; case SD_MMC_REMOVING: sd_mmc_spi_presence_status = SD_MMC_REMOVED; Sd_mmc_spi_access_signal_off(); return CTRL_NO_PRESENT; default: sd_mmc_spi_presence_status = SD_MMC_REMOVED; Sd_mmc_spi_access_signal_off(); return CTRL_BUSY; } /* if (sd_mmc_spi_mem_check()) { if (!sd_mmc_spi_status_changed) { sd_mmc_spi_status_changed = true; return CTRL_BUSY; // BUSY token must be returned to indicate a status change ! } else return CTRL_GOOD; // the 2nd time the host will ask for unit_ready, we can answer GOOD if we have returned BUSY first ! } else { if (sd_mmc_spi_status_changed) { sd_mmc_spi_status_changed = false; return CTRL_BUSY; // BUSY token must be returned to indicate a status change ! } else return CTRL_NO_PRESENT; } */ } Ctrl_status sd_mmc_spi_read_capacity(uint32_t *nb_sector) { // sd_mmc_spi_check_presence(); // ommited because creates interferences with "sd_mmc_spi_test_unit_ready()" function Sd_mmc_spi_access_signal_on(); if (!sd_mmc_spi_init_done) { sd_mmc_spi_mem_init(); } if (sd_mmc_spi_init_done) { *nb_sector = sd_mmc_spi_last_block_address+1; Sd_mmc_spi_access_signal_off(); return CTRL_GOOD; } else { Sd_mmc_spi_access_signal_off(); return CTRL_NO_PRESENT; } } //! //! @brief This function returns the write protected status of the memory. //! //! Only used by memory removal with a HARDWARE SPECIFIC write protected detection //! ! The user must unplug the memory to change this write protected status, //! which cannot be for a SD_MMC. //! //! @return false -> the memory is not write-protected (always) //!/ bool sd_mmc_spi_wr_protect(void) { return false; } //! //! @brief This function tells if the memory has been removed or not. //! //! @return false -> The memory isn't removed //!/ bool sd_mmc_spi_removal(void) { return false; // return ((sd_mmc_spi_check_presence()) ? false : true); } //------------ STANDARD FUNCTIONS to read/write the memory -------------------- #if ACCESS_USB == true #include "conf_usb.h" #ifdef USB_DEVICE_VENDOR_ID // USB Device Stack V2 #include "udi_msc.h" #else // USB Device Stack V1 #include "usb_drv.h" #include "scsi_decoder.h" #endif Ctrl_status sd_mmc_spi_usb_read_10(uint32_t addr, uint16_t nb_sector) { if (!sd_mmc_spi_init_done) { sd_mmc_spi_mem_init(); } if (!sd_mmc_spi_init_done) return CTRL_NO_PRESENT; Sd_mmc_spi_access_signal_on(); if( !sd_mmc_spi_read_open(addr) ) goto sd_mmc_spi_usb_read_10_fail; if( !sd_mmc_spi_read_multiple_sector(nb_sector) ) goto sd_mmc_spi_usb_read_10_fail; if( !sd_mmc_spi_read_close() ) goto sd_mmc_spi_usb_read_10_fail; Sd_mmc_spi_access_signal_off(); return CTRL_GOOD; sd_mmc_spi_usb_read_10_fail: Sd_mmc_spi_access_signal_off(); return CTRL_FAIL; } void sd_mmc_spi_read_multiple_sector_callback(const void *psector) { #ifdef USB_DEVICE_VENDOR_ID // USB Device Stack V2 udi_msc_trans_block( true, (uint8_t*)psector, MMC_SECTOR_SIZE, NULL); #else // USB Device Stack V1 uint16_t data_to_transfer = MMC_SECTOR_SIZE; while (data_to_transfer) { while (!Is_usb_in_ready(g_scsi_ep_ms_in)); Usb_reset_endpoint_fifo_access(g_scsi_ep_ms_in); data_to_transfer = usb_write_ep_txpacket(g_scsi_ep_ms_in, psector, data_to_transfer, &psector); Usb_ack_in_ready_send(g_scsi_ep_ms_in); } #endif } Ctrl_status sd_mmc_spi_usb_write_10(uint32_t addr, uint16_t nb_sector) { bool status; if (!sd_mmc_spi_init_done) { sd_mmc_spi_mem_init(); } if (sd_mmc_spi_init_done) { Sd_mmc_spi_access_signal_on(); sd_mmc_spi_write_open(addr); status = sd_mmc_spi_write_multiple_sector(nb_sector); sd_mmc_spi_write_close(); Sd_mmc_spi_access_signal_off(); if (status) return CTRL_GOOD; else return CTRL_NO_PRESENT; } else return CTRL_NO_PRESENT; } void sd_mmc_spi_write_multiple_sector_callback(void *psector) { #ifdef USB_DEVICE_VENDOR_ID // USB Device Stack V2 udi_msc_trans_block( false, (uint8_t*)psector, MMC_SECTOR_SIZE, NULL); #else // USB Device Stack V1 uint16_t data_to_transfer = MMC_SECTOR_SIZE; while (data_to_transfer) { while (!Is_usb_out_received(g_scsi_ep_ms_out)); Usb_reset_endpoint_fifo_access(g_scsi_ep_ms_out); data_to_transfer = usb_read_ep_rxpacket(g_scsi_ep_ms_out, psector, data_to_transfer, &psector); Usb_ack_out_received_free(g_scsi_ep_ms_out); } #endif } #endif // ACCESS_USB == true //------------ Standard functions for read/write 1 sector to 1 sector ram buffer ----------------- #if ACCESS_MEM_TO_RAM == true Ctrl_status sd_mmc_spi_mem_2_ram(uint32_t addr, void *ram) { Sd_mmc_spi_access_signal_on(); sd_mmc_spi_check_presence(); if (!sd_mmc_spi_init_done) { sd_mmc_spi_mem_init(); } if (!sd_mmc_spi_init_done) return CTRL_NO_PRESENT; if( !sd_mmc_spi_read_open(addr) ) goto sd_mmc_spi_mem_2_ram_fail; if( !sd_mmc_spi_read_sector_to_ram(ram)) goto sd_mmc_spi_mem_2_ram_fail; if( !sd_mmc_spi_read_close() ) goto sd_mmc_spi_mem_2_ram_fail; Sd_mmc_spi_access_signal_off(); return CTRL_GOOD; sd_mmc_spi_mem_2_ram_fail: Sd_mmc_spi_access_signal_off(); return CTRL_FAIL; } //! This fonction initialises the memory for a write operation //! from ram buffer to SD/MMC (1 sector) //! //! DATA FLOW is: RAM => SD/MMC //! //! (sector = 512B) //! @param addr Sector address to write //! @param ram Ram buffer pointer //! //! @return Ctrl_status //! It is ready -> CTRL_GOOD //! An error occurs -> CTRL_FAIL //! Ctrl_status sd_mmc_spi_ram_2_mem(uint32_t addr, const void *ram) { Sd_mmc_spi_access_signal_on(); sd_mmc_spi_check_presence(); if (!sd_mmc_spi_init_done) { sd_mmc_spi_mem_init(); } if (sd_mmc_spi_init_done) { sd_mmc_spi_write_open(addr); if (!sd_mmc_spi_write_sector_from_ram(ram)) { sd_mmc_spi_write_close(); Sd_mmc_spi_access_signal_off(); return CTRL_NO_PRESENT; } sd_mmc_spi_write_close(); Sd_mmc_spi_access_signal_off(); return CTRL_GOOD; } Sd_mmc_spi_access_signal_off(); return CTRL_NO_PRESENT; } #endif // ACCESS_MEM_TO_RAM == true #endif // SD_MMC_SPI_MEM == ENABLE
{ "pile_set_name": "Github" }
package lila.api import org.joda.time.DateTime import org.joda.time.format.ISODateTimeFormat import play.api.libs.json.Json import scala.concurrent.duration._ import scala.util.Try import lila.hub.actorApi.Announce object AnnounceStore { private var current = none[Announce] def get: Option[Announce] = { current foreach { c => if (c.date.isBeforeNow) current = none } current } def set(announce: Option[Announce]) = { current = announce } // examples: // 5 minutes Lichess will restart // 20 seconds Cthulhu will awake def set(str: String): Option[Announce] = { set(str.split(" ").toList match { case length :: unit :: rest => Try { val msg = rest mkString " " val date = DateTime.now plusSeconds Duration(s"$length $unit").toSeconds.toInt val isoDate = ISODateTimeFormat.dateTime print date val json = Json.obj("msg" -> msg, "date" -> isoDate) Announce(msg, date, json) }.toOption case _ => none }) get } def cancel = Announce("", DateTime.now, Json.obj()) }
{ "pile_set_name": "Github" }
/* * Copyright (C) Igor Sysoev */ #include <ngx_config.h> #include <ngx_core.h> #include <ngx_event.h> #if (NGX_TEST_BUILD_EPOLL) /* epoll declarations */ #define EPOLLIN 0x001 #define EPOLLPRI 0x002 #define EPOLLOUT 0x004 #define EPOLLRDNORM 0x040 #define EPOLLRDBAND 0x080 #define EPOLLWRNORM 0x100 #define EPOLLWRBAND 0x200 #define EPOLLMSG 0x400 #define EPOLLERR 0x008 #define EPOLLHUP 0x010 #define EPOLLET 0x80000000 #define EPOLLONESHOT 0x40000000 #define EPOLL_CTL_ADD 1 #define EPOLL_CTL_DEL 2 #define EPOLL_CTL_MOD 3 typedef union epoll_data { void *ptr; int fd; uint32_t u32; uint64_t u64; } epoll_data_t; struct epoll_event { uint32_t events; epoll_data_t data; }; int epoll_create(int size) { return -1; } int epoll_ctl(int epfd, int op, int fd, struct epoll_event *event) { return -1; } int epoll_wait(int epfd, struct epoll_event *events, int nevents, int timeout) { return -1; } #if (NGX_HAVE_FILE_AIO) #define SYS_io_setup 245 #define SYS_io_destroy 246 #define SYS_io_getevents 247 #define SYS_eventfd 323 typedef u_int aio_context_t; struct io_event { uint64_t data; /* the data field from the iocb */ uint64_t obj; /* what iocb this event came from */ int64_t res; /* result code for this event */ int64_t res2; /* secondary result */ }; int eventfd(u_int initval) { return -1; } #endif #endif typedef struct { ngx_uint_t events; } ngx_epoll_conf_t; static ngx_int_t ngx_epoll_init(ngx_cycle_t *cycle, ngx_msec_t timer); static void ngx_epoll_done(ngx_cycle_t *cycle); static ngx_int_t ngx_epoll_add_event(ngx_event_t *ev, ngx_int_t event, ngx_uint_t flags); static ngx_int_t ngx_epoll_del_event(ngx_event_t *ev, ngx_int_t event, ngx_uint_t flags); static ngx_int_t ngx_epoll_add_connection(ngx_connection_t *c); static ngx_int_t ngx_epoll_del_connection(ngx_connection_t *c, ngx_uint_t flags); static ngx_int_t ngx_epoll_process_events(ngx_cycle_t *cycle, ngx_msec_t timer, ngx_uint_t flags); #if (NGX_HAVE_FILE_AIO) static void ngx_epoll_eventfd_handler(ngx_event_t *ev); #endif static void *ngx_epoll_create_conf(ngx_cycle_t *cycle); static char *ngx_epoll_init_conf(ngx_cycle_t *cycle, void *conf); static int ep = -1; static struct epoll_event *event_list; static ngx_uint_t nevents; #if (NGX_HAVE_FILE_AIO) int ngx_eventfd = -1; aio_context_t ngx_aio_ctx = 0; static ngx_event_t ngx_eventfd_event; static ngx_connection_t ngx_eventfd_conn; #endif static ngx_str_t epoll_name = ngx_string("epoll"); static ngx_command_t ngx_epoll_commands[] = { { ngx_string("epoll_events"), NGX_EVENT_CONF|NGX_CONF_TAKE1, ngx_conf_set_num_slot, 0, offsetof(ngx_epoll_conf_t, events), NULL }, ngx_null_command }; ngx_event_module_t ngx_epoll_module_ctx = { &epoll_name, ngx_epoll_create_conf, /* create configuration */ ngx_epoll_init_conf, /* init configuration */ { ngx_epoll_add_event, /* add an event */ ngx_epoll_del_event, /* delete an event */ ngx_epoll_add_event, /* enable an event */ ngx_epoll_del_event, /* disable an event */ ngx_epoll_add_connection, /* add an connection */ ngx_epoll_del_connection, /* delete an connection */ NULL, /* process the changes */ ngx_epoll_process_events, /* process the events */ ngx_epoll_init, /* init the events */ ngx_epoll_done, /* done the events */ } }; ngx_module_t ngx_epoll_module = { NGX_MODULE_V1, &ngx_epoll_module_ctx, /* module context */ ngx_epoll_commands, /* module directives */ NGX_EVENT_MODULE, /* module type */ NULL, /* init master */ NULL, /* init module */ NULL, /* init process */ NULL, /* init thread */ NULL, /* exit thread */ NULL, /* exit process */ NULL, /* exit master */ NGX_MODULE_V1_PADDING }; #if (NGX_HAVE_FILE_AIO) /* * We call io_setup(), io_destroy() io_submit(), and io_getevents() directly * as syscalls instead of libaio usage, because the library header file * supports eventfd() since 0.3.107 version only. * * Also we do not use eventfd() in glibc, because glibc supports it * since 2.8 version and glibc maps two syscalls eventfd() and eventfd2() * into single eventfd() function with different number of parameters. */ static long io_setup(u_int nr_reqs, aio_context_t *ctx) { return syscall(SYS_io_setup, nr_reqs, ctx); } static int io_destroy(aio_context_t ctx) { return syscall(SYS_io_destroy, ctx); } static long io_getevents(aio_context_t ctx, long min_nr, long nr, struct io_event *events, struct timespec *tmo) { return syscall(SYS_io_getevents, ctx, min_nr, nr, events, tmo); } #endif static ngx_int_t ngx_epoll_init(ngx_cycle_t *cycle, ngx_msec_t timer) { ngx_epoll_conf_t *epcf; epcf = ngx_event_get_conf(cycle->conf_ctx, ngx_epoll_module); if (ep == -1) { ep = epoll_create(cycle->connection_n / 2); if (ep == -1) { ngx_log_error(NGX_LOG_EMERG, cycle->log, ngx_errno, "epoll_create() failed"); return NGX_ERROR; } #if (NGX_HAVE_FILE_AIO) { int n; struct epoll_event ee; ngx_eventfd = syscall(SYS_eventfd, 0); if (ngx_eventfd == -1) { ngx_log_error(NGX_LOG_EMERG, cycle->log, ngx_errno, "eventfd() failed"); return NGX_ERROR; } n = 1; if (ioctl(ngx_eventfd, FIONBIO, &n) == -1) { ngx_log_error(NGX_LOG_EMERG, cycle->log, ngx_errno, "ioctl(eventfd, FIONBIO) failed"); } ngx_log_debug1(NGX_LOG_DEBUG_EVENT, cycle->log, 0, "eventfd: %d", ngx_eventfd); n = io_setup(1024, &ngx_aio_ctx); if (n != 0) { ngx_log_error(NGX_LOG_EMERG, cycle->log, -n, "io_setup() failed"); return NGX_ERROR; } ngx_eventfd_event.data = &ngx_eventfd_conn; ngx_eventfd_event.handler = ngx_epoll_eventfd_handler; ngx_eventfd_event.log = cycle->log; ngx_eventfd_event.active = 1; ngx_eventfd_conn.fd = ngx_eventfd; ngx_eventfd_conn.read = &ngx_eventfd_event; ngx_eventfd_conn.log = cycle->log; ee.events = EPOLLIN|EPOLLET; ee.data.ptr = &ngx_eventfd_conn; if (epoll_ctl(ep, EPOLL_CTL_ADD, ngx_eventfd, &ee) == -1) { ngx_log_error(NGX_LOG_EMERG, cycle->log, ngx_errno, "epoll_ctl(EPOLL_CTL_ADD, eventfd) failed"); return NGX_ERROR; } } #endif } if (nevents < epcf->events) { if (event_list) { ngx_free(event_list); } event_list = ngx_alloc(sizeof(struct epoll_event) * epcf->events, cycle->log); if (event_list == NULL) { return NGX_ERROR; } } nevents = epcf->events; ngx_io = ngx_os_io; ngx_event_actions = ngx_epoll_module_ctx.actions; #if (NGX_HAVE_CLEAR_EVENT) ngx_event_flags = NGX_USE_CLEAR_EVENT #else ngx_event_flags = NGX_USE_LEVEL_EVENT #endif |NGX_USE_GREEDY_EVENT |NGX_USE_EPOLL_EVENT; return NGX_OK; } static void ngx_epoll_done(ngx_cycle_t *cycle) { if (close(ep) == -1) { ngx_log_error(NGX_LOG_ALERT, cycle->log, ngx_errno, "epoll close() failed"); } ep = -1; #if (NGX_HAVE_FILE_AIO) if (io_destroy(ngx_aio_ctx) != 0) { ngx_log_error(NGX_LOG_ALERT, cycle->log, ngx_errno, "io_destroy() failed"); } ngx_aio_ctx = 0; #endif ngx_free(event_list); event_list = NULL; nevents = 0; } static ngx_int_t ngx_epoll_add_event(ngx_event_t *ev, ngx_int_t event, ngx_uint_t flags) { int op; uint32_t events, prev; ngx_event_t *e; ngx_connection_t *c; struct epoll_event ee; c = ev->data; events = (uint32_t) event; if (event == NGX_READ_EVENT) { e = c->write; prev = EPOLLOUT; #if (NGX_READ_EVENT != EPOLLIN) events = EPOLLIN; #endif } else { e = c->read; prev = EPOLLIN; #if (NGX_WRITE_EVENT != EPOLLOUT) events = EPOLLOUT; #endif } if (e->active) { op = EPOLL_CTL_MOD; events |= prev; } else { op = EPOLL_CTL_ADD; } ee.events = events | (uint32_t) flags; ee.data.ptr = (void *) ((uintptr_t) c | ev->instance); ngx_log_debug3(NGX_LOG_DEBUG_EVENT, ev->log, 0, "epoll add event: fd:%d op:%d ev:%08XD", c->fd, op, ee.events); if (epoll_ctl(ep, op, c->fd, &ee) == -1) { ngx_log_error(NGX_LOG_ALERT, ev->log, ngx_errno, "epoll_ctl(%d, %d) failed", op, c->fd); return NGX_ERROR; } ev->active = 1; #if 0 ev->oneshot = (flags & NGX_ONESHOT_EVENT) ? 1 : 0; #endif return NGX_OK; } static ngx_int_t ngx_epoll_del_event(ngx_event_t *ev, ngx_int_t event, ngx_uint_t flags) { int op; uint32_t prev; ngx_event_t *e; ngx_connection_t *c; struct epoll_event ee; /* * when the file descriptor is closed, the epoll automatically deletes * it from its queue, so we do not need to delete explicity the event * before the closing the file descriptor */ if (flags & NGX_CLOSE_EVENT) { ev->active = 0; return NGX_OK; } c = ev->data; if (event == NGX_READ_EVENT) { e = c->write; prev = EPOLLOUT; } else { e = c->read; prev = EPOLLIN; } if (e->active) { op = EPOLL_CTL_MOD; ee.events = prev | (uint32_t) flags; ee.data.ptr = (void *) ((uintptr_t) c | ev->instance); } else { op = EPOLL_CTL_DEL; ee.events = 0; ee.data.ptr = NULL; } ngx_log_debug3(NGX_LOG_DEBUG_EVENT, ev->log, 0, "epoll del event: fd:%d op:%d ev:%08XD", c->fd, op, ee.events); if (epoll_ctl(ep, op, c->fd, &ee) == -1) { ngx_log_error(NGX_LOG_ALERT, ev->log, ngx_errno, "epoll_ctl(%d, %d) failed", op, c->fd); return NGX_ERROR; } ev->active = 0; return NGX_OK; } static ngx_int_t ngx_epoll_add_connection(ngx_connection_t *c) { struct epoll_event ee; ee.events = EPOLLIN|EPOLLOUT|EPOLLET; ee.data.ptr = (void *) ((uintptr_t) c | c->read->instance); ngx_log_debug2(NGX_LOG_DEBUG_EVENT, c->log, 0, "epoll add connection: fd:%d ev:%08XD", c->fd, ee.events); if (epoll_ctl(ep, EPOLL_CTL_ADD, c->fd, &ee) == -1) { ngx_log_error(NGX_LOG_ALERT, c->log, ngx_errno, "epoll_ctl(EPOLL_CTL_ADD, %d) failed", c->fd); return NGX_ERROR; } c->read->active = 1; c->write->active = 1; return NGX_OK; } static ngx_int_t ngx_epoll_del_connection(ngx_connection_t *c, ngx_uint_t flags) { int op; struct epoll_event ee; /* * when the file descriptor is closed the epoll automatically deletes * it from its queue so we do not need to delete explicity the event * before the closing the file descriptor */ if (flags & NGX_CLOSE_EVENT) { c->read->active = 0; c->write->active = 0; return NGX_OK; } ngx_log_debug1(NGX_LOG_DEBUG_EVENT, c->log, 0, "epoll del connection: fd:%d", c->fd); op = EPOLL_CTL_DEL; ee.events = 0; ee.data.ptr = NULL; if (epoll_ctl(ep, op, c->fd, &ee) == -1) { ngx_log_error(NGX_LOG_ALERT, c->log, ngx_errno, "epoll_ctl(%d, %d) failed", op, c->fd); return NGX_ERROR; } c->read->active = 0; c->write->active = 0; return NGX_OK; } static ngx_int_t ngx_epoll_process_events(ngx_cycle_t *cycle, ngx_msec_t timer, ngx_uint_t flags) { int events; uint32_t revents; ngx_int_t instance, i; ngx_uint_t level; ngx_err_t err; ngx_log_t *log; ngx_event_t *rev, *wev, **queue; ngx_connection_t *c; /* NGX_TIMER_INFINITE == INFTIM */ ngx_log_debug1(NGX_LOG_DEBUG_EVENT, cycle->log, 0, "epoll timer: %M", timer); events = epoll_wait(ep, event_list, (int) nevents, timer); err = (events == -1) ? ngx_errno : 0; if (flags & NGX_UPDATE_TIME || ngx_event_timer_alarm) { ngx_time_update(); } if (err) { if (err == NGX_EINTR) { if (ngx_event_timer_alarm) { ngx_event_timer_alarm = 0; return NGX_OK; } level = NGX_LOG_INFO; } else { level = NGX_LOG_ALERT; } ngx_log_error(level, cycle->log, err, "epoll_wait() failed"); return NGX_ERROR; } if (events == 0) { if (timer != NGX_TIMER_INFINITE) { return NGX_OK; } ngx_log_error(NGX_LOG_ALERT, cycle->log, 0, "epoll_wait() returned no events without timeout"); return NGX_ERROR; } ngx_mutex_lock(ngx_posted_events_mutex); log = cycle->log; for (i = 0; i < events; i++) { c = event_list[i].data.ptr; instance = (uintptr_t) c & 1; c = (ngx_connection_t *) ((uintptr_t) c & (uintptr_t) ~1); rev = c->read; if (c->fd == -1 || rev->instance != instance) { /* * the stale event from a file descriptor * that was just closed in this iteration */ ngx_log_debug1(NGX_LOG_DEBUG_EVENT, cycle->log, 0, "epoll: stale event %p", c); continue; } #if (NGX_DEBUG0) log = c->log ? c->log : cycle->log; #endif revents = event_list[i].events; ngx_log_debug3(NGX_LOG_DEBUG_EVENT, log, 0, "epoll: fd:%d ev:%04XD d:%p", c->fd, revents, event_list[i].data.ptr); if (revents & (EPOLLERR|EPOLLHUP)) { ngx_log_debug2(NGX_LOG_DEBUG_EVENT, log, 0, "epoll_wait() error on fd:%d ev:%04XD", c->fd, revents); } #if 0 if (revents & ~(EPOLLIN|EPOLLOUT|EPOLLERR|EPOLLHUP)) { ngx_log_error(NGX_LOG_ALERT, log, 0, "strange epoll_wait() events fd:%d ev:%04XD", c->fd, revents); } #endif if ((revents & (EPOLLERR|EPOLLHUP)) && (revents & (EPOLLIN|EPOLLOUT)) == 0) { /* * if the error events were returned without EPOLLIN or EPOLLOUT, * then add these flags to handle the events at least in one * active handler */ revents |= EPOLLIN|EPOLLOUT; } if ((revents & EPOLLIN) && rev->active) { if ((flags & NGX_POST_THREAD_EVENTS) && !rev->accept) { rev->posted_ready = 1; } else { rev->ready = 1; } if (flags & NGX_POST_EVENTS) { queue = (ngx_event_t **) (rev->accept ? &ngx_posted_accept_events : &ngx_posted_events); ngx_locked_post_event(rev, queue); } else { rev->handler(rev); } } wev = c->write; if ((revents & EPOLLOUT) && wev->active) { if (flags & NGX_POST_THREAD_EVENTS) { wev->posted_ready = 1; } else { wev->ready = 1; } if (flags & NGX_POST_EVENTS) { ngx_locked_post_event(wev, &ngx_posted_events); } else { wev->handler(wev); } } } ngx_mutex_unlock(ngx_posted_events_mutex); return NGX_OK; } #if (NGX_HAVE_FILE_AIO) static void ngx_epoll_eventfd_handler(ngx_event_t *ev) { int n; long i, events; uint64_t ready; ngx_err_t err; ngx_event_t *e; ngx_event_aio_t *aio; struct io_event event[64]; struct timespec ts; ngx_log_debug0(NGX_LOG_DEBUG_EVENT, ev->log, 0, "eventfd handler"); n = read(ngx_eventfd, &ready, 8); err = ngx_errno; ngx_log_debug1(NGX_LOG_DEBUG_EVENT, ev->log, 0, "eventfd: %d", n); if (n != 8) { if (n == -1) { if (err == NGX_EAGAIN) { return; } ngx_log_error(NGX_LOG_ALERT, ev->log, err, "read(eventfd) failed"); return; } ngx_log_error(NGX_LOG_ALERT, ev->log, 0, "read(eventfd) returned only %d bytes", n); return; } ts.tv_sec = 0; ts.tv_nsec = 0; while (ready) { events = io_getevents(ngx_aio_ctx, 1, 64, event, &ts); ngx_log_debug1(NGX_LOG_DEBUG_EVENT, ev->log, 0, "io_getevents: %l", events); if (events > 0) { ready -= events; for (i = 0; i < events; i++) { ngx_log_debug4(NGX_LOG_DEBUG_EVENT, ev->log, 0, "io_event: %uXL %uXL %L %L", event[i].data, event[i].obj, event[i].res, event[i].res2); e = (ngx_event_t *) (uintptr_t) event[i].data; e->complete = 1; e->active = 0; e->ready = 1; aio = e->data; aio->res = event[i].res; ngx_post_event(e, &ngx_posted_events); } continue; } if (events == 0) { return; } /* events < 0 */ ngx_log_error(NGX_LOG_ALERT, ev->log, -events, "io_getevents() failed"); return; } } #endif static void * ngx_epoll_create_conf(ngx_cycle_t *cycle) { ngx_epoll_conf_t *epcf; epcf = ngx_palloc(cycle->pool, sizeof(ngx_epoll_conf_t)); if (epcf == NULL) { return NULL; } epcf->events = NGX_CONF_UNSET; return epcf; } static char * ngx_epoll_init_conf(ngx_cycle_t *cycle, void *conf) { ngx_epoll_conf_t *epcf = conf; ngx_conf_init_uint_value(epcf->events, 512); return NGX_CONF_OK; }
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 971f44ba24a74294294daed00507d80e timeCreated: 1436868016 licenseType: Pro MonoImporter: serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {instanceID: 0} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
# Add project specific ProGuard rules here. # By default, the flags in this file are appended to flags specified # in /Users/bobomee/Desktop/develop/sdk/tools/proguard/proguard-android.txt # You can edit the include path and order by changing the proguardFiles # directive in build.gradle. # # For more details, see # http://developer.android.com/guide/developing/tools/proguard.html # Add any project specific keep options here: # If your project uses WebView with JS, uncomment the following # and specify the fully qualified class name to the JavaScript interface # class: #-keepclassmembers class fqcn.of.javascript.interface.for.webview { # public *; #}
{ "pile_set_name": "Github" }
//---------------------------------------- // // Copyright © ying32. All Rights Reserved. // // Licensed under Apache License 2.0 // //---------------------------------------- // +build windows package win type TTokenType uint32 const ( TokenTPad TTokenType = iota + 0 TokenPrimary TokenImpersonation ) type TTokenInformationClass uint32 const ( TokenUser TTokenInformationClass = iota + 1 TokenGroups TokenPrivileges TokenOwner TokenPrimaryGroup TokenDefaultDacl TokenSource TokenType TokenImpersonationLevel TokenStatistics TokenRestrictedSids TokenSessionId TokenGroupsAndPrivileges TokenSessionReference TokenSandBoxInert TokenAuditPolicy TokenOrigin TokenElevationType TokenLinkedToken TokenElevation TokenHasRestrictions TokenAccessInformation TokenVirtualizationAllowed TokenVirtualizationEnabled TokenIntegrityLevel TokenUIAccess TokenMandatoryPolicy TokenLogonSid VMaxTokenInfoClass )
{ "pile_set_name": "Github" }
/* AntiAir: anti-air related calculations - variable naming convention: - fleetObj: instance of KC3Fleet - shipObj: instance of KC3Ship - mst: master data of either ship or gear - pred: predicates, a function that accepts a single parameter and returns a boolean value - predXXX: predicate combinations. "predXXX(pred1, pred2, ...)" combines pred1, pred2, ... in some specific way to produce a new predicate. - module contents: - shipProportionalShotdownRate(shipObj) returns a value (supposed to be 0 <= v <= 1) indicating the rate of planes being shot down. note that it might be possible for this value to exceed 1.0. - shipProportionalShotdown(shipObj, num) same as "shipProportionalShotdownRate", except that this one calculates the number of planes being shotdown with slot capacity is given by "num". - shipFixedShotdown(shipObj, fleetObj, formationModifier, [K]) returns an integer indicating how many planes will be shotdown. "formationModifier" takes one of: 1/1.2/1.6 depending on formation (see "getFormationModifiers" for detail). K (defaults to 1) is optional, depending on whether AACI is triggered and which kind of AACI is triggered. - shipFixedShotdownRange(shipObj, fleetObj, formationModifier) like "shipFixedShotdown" but this one returns a range by considering all possible AACIs "shipObj" can perform and use the largest modifier as upper bound. - shipFixedShotdownRangeWithAACI(shipObj, fleetObj, formationModifier) the same as "shipFixedShotdownRange" except returning the AACI ID of largest modifier. - shipMaxShotdownAllBonuses(shipObj) return the largest fixed and with modifier bonuses of all possible AACIs "shipObj" can perform. - shipPossibleAACIs(shipObj) / fleetPossibleAACIs(fleetObj) returns a list of possible AACI API Ids that ship / fleet could perform. - shipAllPossibleAACIs(mst) returns a list of possible AACI API Ids that type of ship could perform ignored equipments. - sortedPossibleAaciList(aaciIdList) return a list of AACI object sorted by shot down bonus descended. - AACITable[<AACI API>] returns a record of AACI info: - id: AACI API Id - fixed: fixed shotdown bonus - modifier: the "K" value to "shipFixedShotdown" when this AACI is triggered - icon: IDs of icons representing this kind of AACI - predicateShipMst: test whether "mst" can perform this kind of AACI ignoring equipments - predicateShipObj: test whether "shipObj" can perform this particular kind of AACI - other not explicitly listed contents are for debugging or internal use only. */ (function() { "use strict"; function categoryEq(n) { return function (mst) { return mst.api_type[2] /* category */ === n; }; } function iconEq(n) { return function (mst) { return mst.api_type[3] /* icon */ === n; }; } // a predicate combinator, "predAnyOf(f,g)(x)" is the same as "f(x) || g(x)" // test all predicates passed as argument in order, // return the first non-falsy value or "false" if all predicates have failed. function predAnyOf(/* list of predicates */) { var args = arguments; return function(x) { for (var fInd in args) { var result = args[fInd](x); if (result) return result; } return false; }; } function predAllOf(/* list of predicates */) { var args = arguments; return function(x) { var result = true; for (var fInd in args) { result = args[fInd](x); if (! result) return false; } return result; }; } function predNot( pred ) { return function(x) { return ! pred(x); }; } // all types of Radar (12 for small, 13 for large) function isRadar(mst) { return (categoryEq(12)(mst) || categoryEq(13)(mst)); } // AA Radar // Surface Radar are excluded by checking whether // the equipment gives AA stat (api_tyku) function isAARadar(mst) { return isRadar(mst) && mst.api_tyku >= 2; } // AAFD: check by category (36) var isAAFD = categoryEq(36); // High-angle mounts: check by icon (16) var isHighAngleMount = iconEq(16); // Type 3 Shell var isType3Shell = categoryEq(18); // Check by icon (15) var isMachineGun = iconEq(15); // Anti-air gun includes machine guns and rocket launchers, // but not sure why AA stat < 3 gun not counted (only 7.7mm MG for now) var isAAGun = predAllOf(isMachineGun, function(mst) { return mst.api_tyku >= 3; }); var isRedGun = predAnyOf( iconEq(1), iconEq(2), iconEq(3)); function is46cmTripleMount(mst) { // 46cm Kai not counted // http://ja.kancolle.wikia.com/wiki/%E3%82%B9%E3%83%AC%E3%83%83%E3%83%89:363#21 return mst.api_id === 6; //|| mst.api_id === 276; } var isYellowGun = iconEq(4); var isFighter = categoryEq(6); var isDiveBomber = categoryEq(7); var isSeaplaneRecon = categoryEq(10); var isLargeCaliberMainGun = categoryEq(3); function isBuiltinHighAngleMount(mst) { // use the condition also used in game for future unknown equipment return isHighAngleMount(mst) && mst.api_tyku >= 8; /* return [ 122, // aki-gun 130, // maya-gun 135, // 90mm single HA 172, // 5inch ].indexOf( mst.api_id ) !== -1; */ } function is10cmTwinHighAngleMountKaiAMG(mst) { // 10cm Twin High-angle Gun Mount Kai + Additional Machine return mst.api_id === 275; } function isCDMG(mst) { return isAAGun(mst) && mst.api_tyku >= 9; /* return [ 131, // 25mm triple (CD) 173, // Bofors 191, // QF 2-pounder ].indexOf( mst.api_id ) !== -1; */ } var isAAGunNotCD = predAllOf(isAAGun, predNot(isCDMG)); function is12cm30tubeRocketLauncherKai2(mst) { // 12cm 30-tube Rocket Launcher Kai Ni return mst.api_id === 274; } function isBritishRocketLauncher(mst) { // 16inch Mk.I Triple Gun Mount Kai + FCR Type 284 (UP Rocket Launchers embedded) // 20-tube 7inch UP Rocket Launchers return [300, 301].indexOf(mst.api_id) !== -1; } function is20tube7inchUPRocketLaunchers(mst) { // 20-tube 7inch UP Rocket Launchers return mst.api_id === 301; } function isBritishAAGun(mst) { // QF 2-pounder Octuple Pom-pom Gun Mount return [191].indexOf(mst.api_id) !== -1; } // GFCS Mk.37 var isGfcsRadar = masterIdEq(307); // 5inch Single Gun Mount Mk.30 Kai + GFCS Mk.37 var is5inchSingleMountKaiWithGfcs = masterIdEq(308); // 5inch Single Gun Mount Mk.30 Kai var is5inchSingleMountKai = masterIdEq(313); // 5inch Single Gun Mount Mk.30 or +Kai function is5inchSingleMountOrKai(mst) { return [284, 313].indexOf(mst.api_id) !== -1; } // 5inch Twin Dual-purpose Gun Mount (Concentrated Deployment) var is5inchTwinDualMountCD = masterIdEq(362); // GFCS Mk.37 + 5inch Twin Dual-purpose Gun Mount (Concentrated Deployment) var is5inchTwinDualMountCDWithGfcs = masterIdEq(363); // for equipments the coefficient is different for // calculating adjusted ship AA stat and fleet AA stat, // so let's use the following naming convention: // // - "getShipXXX" is for calculating adjusted AA stat for individual ships // - "getFleetXXX" for fleet AA // // verbs might change but the same convention should follow. // TODO: abyssal equipments into consideration? // it is possible for conditions to have overlap: // Akizuki-gun for example is both high angle mount and short caliber main gun. // to resolve this: // - the conditions are re-ordered so the highest applicable // modifier is always checked first. // - the wiki says main gun (red), so maybe an icon-based checker "isRedGun" // might be more appropriate. function getShipEquipmentModifier(mst) { if (isMachineGun(mst)) return 6; if (isHighAngleMount(mst) || isAAFD(mst)) return 4; if (isAARadar(mst)) return 3; // no default value for unverified equipment return 0; } function getFleetEquipmentModifier(mst) { if (isType3Shell(mst)) return 0.6; if (isAARadar(mst)) return 0.4; if (isHighAngleMount(mst) || isAAFD(mst)) return 0.35; if (is46cmTripleMount(mst)) return 0.25; if (predAnyOf(isRedGun, isYellowGun, isMachineGun, isFighter, isDiveBomber, isSeaplaneRecon)(mst)) return 0.2; // no default value for unverified equipment, might use 0.2 as default? return 0; } // updated data: http://wikiwiki.jp/kancolle/?%B9%D2%B6%F5%C0%EF#anti-aircraft // another implementation might give the latest verified data: // https://github.com/Nishisonic/anti_aircraft/blob/gh-pages/js/util.js function getShipImprovementModifier(mst) { if (isMachineGun(mst)) return 4; if (isBuiltinHighAngleMount(mst)) return 3; if (isHighAngleMount(mst)) return 2; if (isAAFD(mst)) return 2; if (isAARadar(mst)) return 0; // no default value for unverified equipment return 0; } function getFleetImprovementModifier(mst) { if (isBuiltinHighAngleMount(mst)) return 3; if (isHighAngleMount(mst)) return 2; if (isAAFD(mst)) return 2; if (isAARadar(mst)) return 1.5; if (isMachineGun(mst)) return 0; // no default value for unverified equipment return 0; } function calcEquipmentAADefense( mst, stars /* number 0..10 */, forFleet /* bool */) { var eTypMod = (forFleet ? getFleetEquipmentModifier : getShipEquipmentModifier)(mst); var eImproveMod = (forFleet ? getFleetImprovementModifier : getShipImprovementModifier)(mst); // according verification, AA bonus of specific equip on specific ship not counted var aaStat = mst.api_tyku; return eTypMod * aaStat + eImproveMod * Math.sqrt( stars ); } // returns a special floor function f(x) = q * floor( x / q ) // - q = 1 if shipObj equips nothing // - q = 2 otherwise function specialFloor(shipObj) { var q = 1; var allItems = allShipEquipments(shipObj); for (var itemInd in allItems) { var item = allItems[itemInd]; if (item.masterId !== 0) { q = 2; break; } } return function(x) { return q * Math.floor(x / q); }; } function allShipEquipments(shipObj) { return shipObj.equipment(true); } function shipEquipmentAntiAir(shipObj, forFleet) { var allItems = allShipEquipments(shipObj); return allItems.reduce( function(curAA, item) { return curAA + item.aaDefense(forFleet); }, 0); } function shipAdjustedAntiAir(shipObj) { //return shipObj.aa[1] + shipEquipmentAntiAir(shipObj, false); // here aa[1] is max naked stat on lv99, equaled to api_tyku[1], to get current level naked stat, // might use current naked stat: aa[0] - equipment stat. // according verification, AA bonus of specific equip on specific ship not counted, // it seems be better not to use aa[0] property, // might use `shipObj.estimateNakedStats("aa")` instead. return shipObj.estimateNakedStats("aa") + shipEquipmentAntiAir(shipObj, false); } function shipProportionalShotdownRate(shipObj, onCombinedFleetNum) { var floor = specialFloor(shipObj); var adjustedAA = shipAdjustedAntiAir(shipObj); var combinedModifier = getCombinedFleetModifier(onCombinedFleetNum); return floor(adjustedAA) / 400 * combinedModifier; } function shipProportionalShotdown(shipObj, num, onCombinedFleetNum) { return Math.floor( shipProportionalShotdownRate(shipObj, onCombinedFleetNum) * num ); } function getCombinedFleetModifier(onCombinedFleetNum, isLongDistanceAirRaid = false) { // https://github.com/Nishisonic/anti_aircraft/blob/gh-pages/js/util.js // http://ja.kancolle.wikia.com/wiki/%E3%82%B9%E3%83%AC%E3%83%83%E3%83%89:363#18 return onCombinedFleetNum > 0 ? // is on combined fleet? onCombinedFleetNum > 1 ? // is escort fleet? 0.6 * 0.8 : // otherwise combined main fleet (isLongDistanceAirRaid ? 0.9 * 0.8 : 1 * 0.8) : 1.0; } function getFormationModifiers(id) { return (id === 1 || id === 4 || id === 5) ? 1.0 // line ahead / echelon / line abreast : (id === 2) ? 1.2 // double line : (id === 3) ? 1.6 // diamond : (id === 6) ? 1.1 // vanguard : (id === 11 || id === 21) ? 1.1 // Combined anti-sub : (id === 12 || id === 14 || id === 22 || id === 24) ? 1.0 // Combined forward / battle : (id === 13 || id === 23) ? 1.5 // Combined diamond : NaN; // NaN for indicating an invalid id } function fleetAdjustedAntiAir(fleetObj, formationModifier) { var allShipEquipmentAA = fleetObj.ship().reduce( function(curAA, ship) { return curAA + shipEquipmentAntiAir(ship, true); }, 0); return (2/1.3) * Math.floor( formationModifier * allShipEquipmentAA ); } function fleetCombinedAdjustedAntiAir(mainFleetObj, escortFleetObj, formationModifier) { var mainAllShipEquipmentAA = mainFleetObj.ship().reduce( function(curAA, ship) { return curAA + shipEquipmentAntiAir(ship, true); }, 0); var escortAllShipEquipmentAA = escortFleetObj.ship().reduce( function(curAA, ship) { return curAA + shipEquipmentAntiAir(ship, true); }, 0); return (2/1.3) * Math.floor( formationModifier * (mainAllShipEquipmentAA + escortAllShipEquipmentAA) ); } /** * @return {number} an integer indicating how many planes will be shotdown. * @param {Object} shipObj - instance of KC3Ship * @param {Object} fleetObj - instance(s) of KC3Fleet, * if combined fleet requested, should pass nested object: {main: mainFleetObj, escort: escortFleetObj}. * @param {number} formationModifier - formation modifier, see #getFormationModifiers * @param {number} K - AACI modifier, default to 1 * @param {number} onCombinedFleetNum - if ship on combined fleet, pass her fleet number (1 or 2), otherwise 0. */ function shipFixedShotdown(shipObj, fleetObj, formationModifier, K = 1, onCombinedFleetNum = 0) { var floor = specialFloor(shipObj); var adjustedAA = shipAdjustedAntiAir(shipObj); return Math.floor( (floor(adjustedAA) + Math.floor( Array.isArray(fleetObj) ? fleetCombinedAdjustedAntiAir(fleetObj.main, fleetObj.escort, formationModifier) : fleetAdjustedAntiAir(fleetObj, formationModifier) ) ) * K / 10 * getCombinedFleetModifier(onCombinedFleetNum) ); } // avoid modifying this structure directly, use "declareAACI" instead. var AACITable = {}; // typeIcons is a array including [ship icon, equip icon, ...] // predicateShipMst is a function f: f(mst) // predicateShipObj is a function f: f(shipObj) // returns a boolean to indicate whether the ship in question (with equipments) // is capable of performing such type of AACI function declareAACI( apiId, fixedBonus, modifier, typeIcons, predicateShipSlots, predicateWithEquips) { AACITable[apiId] = { id: apiId, fixed: fixedBonus, modifier: modifier, icons: typeIcons, predicateShipMst: predicateShipSlots, predicateShipObj: predicateWithEquips }; } function isNotSubmarine( mst ) { var stype = mst.api_stype; return [13 /* SS */, 14 /* SSV */].indexOf( stype ) === -1; } function isBattleship( mst ) { var stype = mst.api_stype; return [8 /* FBB */, 9 /* BB */, 10 /* BBV */].indexOf( stype ) !== -1; } function isAkizukiClass( mst ) { return mst.api_ctype === 54; /* return [ 421, 330, // Akizuki & Kai 422, 346, // Teruzuki & Kai 423, 357, // Hatsuzuki & Kai 532, 537, // Suzutsuki & Kai ].indexOf( mst.api_id ) !== -1; */ } function isIseClassKai( mst ) { return mst.api_ctype === 2 // if non-Kai excluded && mst.api_id !== 77 && mst.api_id !== 87; // ~~Ise Kai Ni included, but Hyuuga Kai Ni incapable for both kind 25 and 28~~ // https://twitter.com/MadonoHaru/status/1121902964120023040 // wtf, it was a bug before 2019-04-30 maint // https://twitter.com/KanColle_STAFF/status/1123197646561136642 //&& mst.api_id !== 554; } // Battleships capable for 12cm 30tube Rocket Launcher Kai 2 function isBattleShipKai( mst ) { return [ 82, // Ise Kai 553, // Ise K2 88, // Hyuuga Kai 554, // Hyuuga K2 148, // Musashi Kai 546, // Musashi K2 ].indexOf( mst.api_id ) !== -1; } // British-relevant ships can trigger AACI with 20-tube 7inch UP Rocket Launchers function isBritishShips( mst ) { return [ 67, // Queen Elizabeth Class 78, // Ark Royal Class 82, // Jervis Class 88, // Nelson Class ].indexOf( mst.api_ctype ) !== -1 || // Kongou Class Kai Ni, K2C [149, 150, 151, 152, 591, 592].indexOf( mst.api_id ) !== -1; } function masterIdEq( n ) { return function(mst) { return mst.api_id === n; }; } function ctypeIdEq( n ) { return function(mst) { return mst.api_ctype === n; }; } // Icons used to declare AACI type var surfaceShipIcon = 0, // Means no icon, low priority akizukiIcon = 421, battleShipIcon = 131, // Yamato, weigh anchor! battleShipKaiIcon = 148, // Musashi Kai represents musashiK2Icon = 546, iseIcon = 77, mayaK2Icon = 428, isuzuK2Icon = 141, kasumiK2BIcon = 470, satsukiK2Icon = 418, kinuK2Icon = 487, yuraK2Icon = 488, fumizukiK2Icon = 548, uit25Icon = 539, i504Icon = 530, tenryuuK2Icon = 477, tatsutaK2Icon = 478, isokazeBkIcon = 557, hamakazeBkIcon = 558, warspiteIcon = 439, gotlandKaiIcon = 579, johnstonIcon = 562, fletcherIcon = 596, atlantaIcon = 597, haMountIcon = 16, radarIcon = 11, aaFdIcon = 30, aaGunIcon = 15, lcMainGunIcon = 3, type3ShellIcon = 12, // Special combined icons for Build-in HA / CDMG / etc biHaMountIcon = "16+30", // HA plus AAFD cdmgIcon = "15+15", // AAGun double haMountNbifdIcon = "16-30", // HA without AAFD aaGunNotCdIcon = "15-15", // Non-CD AA Machine Gun aaGunK2RockeLaunIcon = "15+31", // 12cm 30tube Rocket Launcher Kai 2 haMountKaiAmg = "16+15", // 10cm Twin High-angle Mount Kai + Additional Machine Gun haMountKaiRadar = "16+11", // 5inch Single Gun Mount Mk.30 Kai + GFCS Mk.37 / GFCS Mk.37 + next one haMountCdIcon = "16+16"; // 5inch Twin Dual-purpose Gun Mount (Concentrated Deployment) var isMusashiK2 = masterIdEq( musashiK2Icon ); var isMayaK2 = masterIdEq( mayaK2Icon ); var isIsuzuK2 = masterIdEq( isuzuK2Icon ); var isKasumiK2B = masterIdEq( kasumiK2BIcon ); var isSatsukiK2 = masterIdEq( satsukiK2Icon ); var isKinuK2 = masterIdEq( kinuK2Icon ); var isYuraK2 = masterIdEq( yuraK2Icon ); var isFumizukiK2 = masterIdEq( fumizukiK2Icon ); var isUit25 = masterIdEq( uit25Icon ); var isI504 = masterIdEq( i504Icon ); var isTenryuuK2 = masterIdEq( tenryuuK2Icon ); var isTatsutaK2 = masterIdEq( tatsutaK2Icon ); var isIsokazeBk = masterIdEq( isokazeBkIcon ); var isHamakazeBk = masterIdEq( hamakazeBkIcon ); var isGotlandKai = masterIdEq( gotlandKaiIcon ); var isFletcherClass = ctypeIdEq( 91 ); var isAtlantaClass = ctypeIdEq( 99 ); var isYuubariK2 = masterIdEq( 622 ); // turns a "shipObj" into the list of her equipments // for its parameter function "pred" function withEquipmentMsts( pred ) { return function(shipObj) { var gears = allShipEquipments(shipObj) .filter( function(g) { return g.masterId !== 0; } ) .map( function(g) { return g.master(); }); return pred(gears); }; } // "hasAtLeast(pred)(n)(xs)" is the same as: // xs.filter(pred).length >= n function hasAtLeast(pred, n) { return function(xs) { return xs.filter(pred).length >= n; }; } // "hasSome(pred)(xs)" is the same as: // xs.some(pred) function hasSome(pred) { return function(xs) { return xs.some(pred); }; } // check if slot num of ship (excluding ex-slot) equals or greater // note: 8cm HA mount variants and AA machine guns can be equipped in ex-slot function slotNumAtLeast(n) { return function(mst) { var slotnum = mst.api_slot_num; return slotnum >= n; }; } // All non-sub surface ships // according KC vita codes, no ship type is used in predictions, (only ctype for Akizuki kinds, id for Maya K2 kinds) // might be able to trigger as long as ship can equip corresponding equipment. // but kind 5,7,8,9 (contains HA mount) seems never trigger on Akizuki-class, // reason might be: https://gist.github.com/Nishisonic/62cead1f57a323c737019d6b630fa4a5 declareAACI( 5, 4, 1.5, [surfaceShipIcon, biHaMountIcon, biHaMountIcon, radarIcon], predAllOf(isNotSubmarine, predNot(isAkizukiClass), slotNumAtLeast(3)), withEquipmentMsts( predAllOf( hasAtLeast(isBuiltinHighAngleMount, 2), hasSome( isAARadar )) ) ); declareAACI( 8, 4, 1.4, [surfaceShipIcon, biHaMountIcon, radarIcon], predAllOf(isNotSubmarine, predNot(isAkizukiClass), slotNumAtLeast(2)), withEquipmentMsts( predAllOf( hasSome( isBuiltinHighAngleMount ), hasSome( isAARadar )) ) ); declareAACI( 7, 3, 1.35, [surfaceShipIcon, haMountIcon, aaFdIcon, radarIcon], // 8cm HA variants can be equipped on ex-slot for some ships, min slots can be 2 // but for now, these ships are all not 2-slot DD predAllOf(isNotSubmarine, predNot(isAkizukiClass), slotNumAtLeast(3)), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isAAFD ), hasSome( isAARadar )) ) ); declareAACI( 9, 2, 1.3, [surfaceShipIcon, haMountIcon, aaFdIcon], predAllOf(isNotSubmarine, predNot(isAkizukiClass), slotNumAtLeast(1)), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isAAFD )) ) ); declareAACI( 12, 3, 1.25, [surfaceShipIcon, cdmgIcon, aaGunIcon, radarIcon], predAllOf(isNotSubmarine, slotNumAtLeast(2)), withEquipmentMsts( predAllOf( hasSome( isCDMG ), /* CDMGs are AAGuns, so we need at least 2 AA guns including the CDMG one we have just counted */ hasAtLeast(isAAGun, 2), hasSome( isAARadar )) ) ); // battleship special AACIs declareAACI( 4, 6, 1.5, [battleShipIcon, lcMainGunIcon, type3ShellIcon, aaFdIcon, radarIcon], predAllOf(isBattleship, slotNumAtLeast(4)), withEquipmentMsts( predAllOf( hasSome( isLargeCaliberMainGun ), hasSome( isType3Shell ), hasSome( isAAFD ), hasSome( isAARadar )) ) ); declareAACI( 6, 4, 1.45, [battleShipIcon, lcMainGunIcon, type3ShellIcon, aaFdIcon], predAllOf(isBattleship, slotNumAtLeast(3)), withEquipmentMsts( predAllOf( hasSome( isLargeCaliberMainGun ), hasSome( isType3Shell ), hasSome( isAAFD )) ) ); // Ise-class Kai only AACIs declareAACI( 25, 7, 1.55, [iseIcon, aaGunK2RockeLaunIcon, radarIcon, type3ShellIcon], predAllOf(isIseClassKai, slotNumAtLeast(2)), withEquipmentMsts( predAllOf( hasSome( is12cm30tubeRocketLauncherKai2 ), hasSome( isType3Shell ), hasSome( isAARadar )) ) ); // Musashi K2 declareAACI( 26, 6, 1.4, [musashiK2Icon, haMountKaiAmg, radarIcon], predAllOf(isMusashiK2), withEquipmentMsts( predAllOf( hasSome( is10cmTwinHighAngleMountKaiAMG ), hasSome( isAARadar )) ) ); // api_kind 27 still unknown // Ise-class Kai + Musashi Kai declareAACI( 28, 4, 1.4, [battleShipKaiIcon, aaGunK2RockeLaunIcon, radarIcon], predAllOf(isBattleShipKai), withEquipmentMsts( predAllOf( hasSome( is12cm30tubeRocketLauncherKai2 ), hasSome( isAARadar )) ) ); // Akizuki-class AACIs declareAACI( 1, 7, 1.7, [akizukiIcon, haMountIcon, haMountIcon, radarIcon], predAllOf(isAkizukiClass, slotNumAtLeast(3)), withEquipmentMsts( predAllOf( hasAtLeast( isHighAngleMount, 2 ), hasSome( isRadar )) ) ); declareAACI( 2, 6, 1.7, [akizukiIcon, haMountIcon, radarIcon], predAllOf(isAkizukiClass, slotNumAtLeast(2)), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isRadar )) ) ); declareAACI( 3, 4, 1.6, [akizukiIcon, haMountIcon, haMountIcon], predAllOf(isAkizukiClass, slotNumAtLeast(2)), withEquipmentMsts( hasAtLeast( isHighAngleMount, 2 ) ) ); // Maya K2 declareAACI( 10, 8, 1.65, [mayaK2Icon, haMountIcon, cdmgIcon, radarIcon], // Omitted slot num for specified ship, same below predAllOf(isMayaK2), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isCDMG ), hasSome( isAARadar )) ) ); declareAACI( 11, 6, 1.5, [mayaK2Icon, haMountIcon, cdmgIcon], predAllOf(isMayaK2), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isCDMG )) ) ); // api_kind 13 deprecated by devs // might be non-MayaK2 biHaMount+CDMG+AirRadar +4 x1.35 // Isuzu K2 declareAACI( 14, 4, 1.45, [isuzuK2Icon, haMountIcon, aaGunIcon, radarIcon], predAllOf(isIsuzuK2), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isAAGun ), hasSome( isAARadar )) ) ); declareAACI( 15, 3, 1.3, [isuzuK2Icon, haMountIcon, aaGunIcon], predAllOf(isIsuzuK2), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isAAGun )) ) ); // Kasumi K2B, Yuubari K2 declareAACI( 16, 4, 1.4, [kasumiK2BIcon, haMountIcon, aaGunIcon, radarIcon], predAnyOf(isKasumiK2B, isYuubariK2), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isAAGun ), hasSome( isAARadar )) ) ); declareAACI( 17, 2, 1.25, [kasumiK2BIcon, haMountIcon, aaGunIcon], predAllOf(isKasumiK2B), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isAAGun )) ) ); // Satsuki K2 declareAACI( 18, 2, 1.2, [satsukiK2Icon, cdmgIcon], predAllOf(isSatsukiK2), withEquipmentMsts( hasSome( isCDMG ) ) ); // Kinu K2 declareAACI( 19, 5, 1.45, [kinuK2Icon, haMountNbifdIcon, cdmgIcon], predAllOf(isKinuK2), withEquipmentMsts( predAllOf( /* any HA with builtin AAFD will not work */ predNot( hasSome( isBuiltinHighAngleMount )), hasSome( isHighAngleMount ), hasSome( isCDMG )) ) ); declareAACI( 20, 3, 1.25, [kinuK2Icon, cdmgIcon], predAllOf(isKinuK2), withEquipmentMsts( hasSome( isCDMG ) ) ); // Yura K2 declareAACI( 21, 5, 1.45, [yuraK2Icon, haMountIcon, radarIcon], predAllOf(isYuraK2), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isAARadar )) ) ); // Fumizuki K2 declareAACI( 22, 2, 1.2, [fumizukiK2Icon, cdmgIcon], predAllOf(isFumizukiK2), withEquipmentMsts( hasSome( isCDMG ) ) ); // UIT-25 / I-504 declareAACI( 23, 1, 1.05, [uit25Icon, aaGunNotCdIcon], predAnyOf(isUit25, isI504), withEquipmentMsts( hasSome( isAAGunNotCD ) ) ); // Tenryuu K2 / Tatsuta K2 declareAACI( 24, 3, 1.25, [tatsutaK2Icon, haMountIcon, aaGunNotCdIcon], predAnyOf(isTenryuuK2, isTatsutaK2), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isAAGunNotCD )) ) ); // Isokaze B Kai / Hamakaze B Kai declareAACI( 29, 5, 1.55, [isokazeBkIcon, haMountIcon, radarIcon], predAnyOf(isIsokazeBk, isHamakazeBk), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isAARadar )) ) ); // Tenryuu K2, Gotland Kai declareAACI( 30, 3, 1.3, [tenryuuK2Icon, haMountIcon, haMountIcon, haMountIcon], predAnyOf(isTenryuuK2, isGotlandKai), withEquipmentMsts( predAllOf( hasAtLeast( isHighAngleMount, 3 )) ) ); declareAACI( 31, 2, 1.25, [tenryuuK2Icon, haMountIcon, haMountIcon], predAllOf(isTenryuuK2), withEquipmentMsts( predAllOf( hasAtLeast( isHighAngleMount, 2 )) ) ); // British-relevant ships // Known for now: Nelson, Warspite, Ark Royal, Jervis, all Kongou-class K2 // (QF2 + FCR) OR (QF2 + 7UP) OR (7UP + 7UP) declareAACI( 32, 3, 1.2, [warspiteIcon, aaGunK2RockeLaunIcon, cdmgIcon], predAnyOf(isBritishShips), withEquipmentMsts( predAnyOf( predAllOf( hasSome( isBritishRocketLauncher ), hasSome( isBritishAAGun )), predAllOf( hasAtLeast( is20tube7inchUPRocketLaunchers, 2 )) ) ) ); // Gotland Kai declareAACI( 33, 3, 1.35, [gotlandKaiIcon, haMountIcon, aaGunIcon], predAllOf(isGotlandKai), withEquipmentMsts( predAllOf( hasSome( isHighAngleMount ), hasSome( isAAGun )) ) ); // Fletcher-class all forms (Fletcher, Johnston) declareAACI( 34, 7, 1.6, [fletcherIcon, haMountKaiRadar, haMountKaiRadar], predAllOf(isFletcherClass), withEquipmentMsts( predAllOf( hasAtLeast( is5inchSingleMountKaiWithGfcs, 2 )) ) ); declareAACI( 35, 6, 1.55, [fletcherIcon, haMountKaiRadar, haMountIcon], predAllOf(isFletcherClass), withEquipmentMsts( predAnyOf( hasAtLeast( is5inchSingleMountKaiWithGfcs, 2 ), predAllOf( hasSome( is5inchSingleMountOrKai ), hasSome( is5inchSingleMountKaiWithGfcs )) ) ) ); declareAACI( 36, 6, 1.55, [fletcherIcon, haMountIcon, haMountIcon, radarIcon], // there are enough slots for Kai only predAllOf(isFletcherClass, slotNumAtLeast(3)), withEquipmentMsts( predAllOf( predAnyOf( hasAtLeast( is5inchSingleMountOrKai, 2 ), hasAtLeast( is5inchSingleMountKaiWithGfcs, 2 ), predAllOf( hasSome( is5inchSingleMountOrKai ), hasSome( is5inchSingleMountKaiWithGfcs )) ), hasSome( isGfcsRadar )) ) ); declareAACI( 37, 4, 1.45, [fletcherIcon, haMountIcon, haMountIcon], predAllOf(isFletcherClass), withEquipmentMsts( predAnyOf( hasAtLeast( is5inchSingleMountKai, 2 ), hasAtLeast( is5inchSingleMountKaiWithGfcs, 2 ), predAllOf( hasSome( is5inchSingleMountKai ), hasSome( is5inchSingleMountKaiWithGfcs )) ) ) ); // Atlanta-class declareAACI( 39, 10, 1.7, [atlantaIcon, haMountKaiRadar, haMountCdIcon], predAllOf(isAtlantaClass), withEquipmentMsts( predAllOf( hasSome( is5inchTwinDualMountCDWithGfcs ), hasSome( is5inchTwinDualMountCD )) ) ); declareAACI( 40, 10, 1.7, [atlantaIcon, haMountCdIcon, haMountCdIcon, radarIcon], predAllOf(isAtlantaClass), withEquipmentMsts( predAllOf( predAnyOf( hasAtLeast( is5inchTwinDualMountCD, 2 ), predAllOf( hasSome( is5inchTwinDualMountCD ), hasSome( is5inchTwinDualMountCDWithGfcs )) ), hasSome( isGfcsRadar )) ) ); declareAACI( 41, 9, 1.65, [atlantaIcon, haMountCdIcon, haMountCdIcon], predAllOf(isAtlantaClass), withEquipmentMsts( predAnyOf( hasAtLeast( is5inchTwinDualMountCD, 2 ), predAllOf( hasSome( is5inchTwinDualMountCD ), hasSome( is5inchTwinDualMountCDWithGfcs )) ) ) ); // return a list of possible AACI APIs based on ship and her equipments // - returns a list of **strings**, not numbers // (since object keys has to be strings, and AACITable[key] accepts keys // of both number and string anyway) // - because of the game mechanism, some AACI API Ids returned might be overlapped // and never triggered, "possibleAACIs" is **not** responsible for removing never-triggered // AACI from resulting list. function shipPossibleAACIs(shipObj) { var result = []; $.each( AACITable, function(k,entry) { if (entry.predicateShipMst(shipObj.master()) && entry.predicateShipObj(shipObj)) result.push( k ); }); return result; } // return a list of all possible AACI based on master ship only, equipments ignored function shipAllPossibleAACIs(shipMst) { var result = []; $.each( AACITable, function(k, entry) { if (entry.predicateShipMst( shipMst )) result.push( k ); }); return result; } // return a list of deduplicate possible AACI APIs based on all ships in fleet function fleetPossibleAACIs(fleetObj) { var aaciSet = {}; fleetObj.ship(function(rId, ind, shipObj) { shipPossibleAACIs(shipObj).map(function(apiId) { aaciSet[apiId] = true; }); }); return Object.keys(aaciSet); } // return: a list of sorted AACI objects order by effect desc, // as most effective AACI gets priority to be triggered. // in-game, priority is based on kinds of conditions (in `if...return` flavor), // research about AACI priority for a ship: // * https://docs.google.com/document/d/1XBrQgQsA_pM3fXsDDC7e1N5Xpr2p59kmvQbnY2UH0Ko // * https://gist.github.com/Nishisonic/62cead1f57a323c737019d6b630fa4a5 // * http://nishisonic.xsrv.jp/archives/809 // here still use the simple way via ordering by 'effect' since new AACI kinds not covered by investigations. // note: priority is different from trigger chance rate, since random number roll just done once, // lower priority AACI is still possible to be triggered if chance value is greater. // on the opposite, both lower priority and lesser chance means never be triggered. // param: AACI IDs from possibleAACIs functions // param: a optional callback function to customize ordering function sortedPossibleAaciList(aaciIds, sortCallback) { var aaciList = []; if(!!aaciIds && Array.isArray(aaciIds)) { $.each( aaciIds, function(i, apiId) { if(!!AACITable[apiId]) aaciList.push( AACITable[apiId] ); }); var defaultOrder = function(a, b) { // Order by fixed desc, modifier desc, icons[0] desc return b.fixed - a.fixed || b.modifier - a.modifier || b.icons[0] - a.icons[0]; }; aaciList = aaciList.sort(sortCallback || defaultOrder); } return aaciList; } function sortedFleetPossibleAaciList(triggeredShipAaciIds) { return sortedPossibleAaciList(triggeredShipAaciIds, function(a, b) { // Order by (API) id desc return b.id - a.id; }); } function shipFixedShotdownRange(shipObj, fleetObj, formationModifier, onCombinedFleetNum) { var possibleAACIModifiers = fleetPossibleAACIs(fleetObj).map( function( apiId ) { return AACITable[apiId].modifier; }); // default value 1 is always available, making call to Math.max always non-empty possibleAACIModifiers.push( 1 ); var mod = Math.max.apply( null, possibleAACIModifiers ); return [ shipFixedShotdown(shipObj, fleetObj, formationModifier, 1, onCombinedFleetNum), shipFixedShotdown(shipObj, fleetObj, formationModifier, mod, onCombinedFleetNum), mod ]; } function shipFixedShotdownRangeWithAACI(shipObj, fleetObj, formationModifier, onCombinedFleetNum) { var possibleAaciList = sortedPossibleAaciList(fleetPossibleAACIs(fleetObj), function(a, b){ // Order by modifier desc, fixed desc, icons[0] desc return b.modifier - a.modifier || b.fixed - a.fixed || b.icons[0] - a.icons[0]; }); var aaciId = possibleAaciList.length > 0 ? possibleAaciList[0].id : 0; var mod = possibleAaciList.length > 0 ? possibleAaciList[0].modifier : 1; return [ shipFixedShotdown(shipObj, fleetObj, formationModifier, 1, onCombinedFleetNum), shipFixedShotdown(shipObj, fleetObj, formationModifier, mod, onCombinedFleetNum), aaciId ]; } function shipMaxShotdownFixedBonus(shipObj) { var possibleBonuses = shipPossibleAACIs(shipObj).map( function( apiId ) { return AACITable[apiId].fixed; }); // default value 0 is always available, making call to Math.max always non-empty possibleBonuses.push( 0 ); return Math.max.apply( null, possibleBonuses ); } function shipMaxShotdownAllBonuses(shipObj) { var possibleAaciList = sortedPossibleAaciList(shipPossibleAACIs(shipObj)); return possibleAaciList.length > 0 ? [possibleAaciList[0].id, possibleAaciList[0].fixed, possibleAaciList[0].modifier] : [0, 0, 1]; } // exporting module window.AntiAir = { getFleetEquipmentModifier: getFleetEquipmentModifier, getShipEquipmentModifier: getShipEquipmentModifier, getFleetImprovementModifier: getFleetImprovementModifier, getShipImprovementModifier: getShipImprovementModifier, calcEquipmentAADefense: calcEquipmentAADefense, shipEquipmentAntiAir: shipEquipmentAntiAir, shipAdjustedAntiAir: shipAdjustedAntiAir, specialFloor: specialFloor, shipProportionalShotdown: shipProportionalShotdown, shipProportionalShotdownRate: shipProportionalShotdownRate, getFormationModifiers: getFormationModifiers, fleetAdjustedAntiAir: fleetAdjustedAntiAir, fleetCombinedAdjustedAntiAir: fleetCombinedAdjustedAntiAir, shipFixedShotdown: shipFixedShotdown, shipFixedShotdownRange: shipFixedShotdownRange, shipFixedShotdownRangeWithAACI: shipFixedShotdownRangeWithAACI, shipMaxShotdownFixedBonus: shipMaxShotdownFixedBonus, shipMaxShotdownAllBonuses: shipMaxShotdownAllBonuses, AACITable: AACITable, shipPossibleAACIs: shipPossibleAACIs, shipAllPossibleAACIs: shipAllPossibleAACIs, fleetPossibleAACIs: fleetPossibleAACIs, sortedPossibleAaciList: sortedPossibleAaciList }; })();
{ "pile_set_name": "Github" }
# encoding: utf-8 # # Copyright 2009-2020 Greg Neagle. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an 'AS IS' BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ munkihash.py Created by Greg Neagle on 2016-12-14. Munki's hash functions """ from __future__ import absolute_import, print_function import hashlib import os def gethash(filename, hash_function): """ Calculates the hashvalue of the given file with the given hash_function. Args: filename: The file name to calculate the hash value of. hash_function: The hash function object to use, which was instantiated before calling this function, e.g. hashlib.md5(). Returns: The hashvalue of the given file as hex string. """ if not os.path.isfile(filename): return 'NOT A FILE' try: fileref = open(filename, 'rb') while True: chunk = fileref.read(2**16) if not chunk: break hash_function.update(chunk) fileref.close() return hash_function.hexdigest() except (OSError, IOError): return 'HASH_ERROR' def getmd5hash(filename): """ Returns hex of MD5 checksum of a file """ hash_function = hashlib.md5() return gethash(filename, hash_function) def getsha256hash(filename): """ Returns the SHA-256 hash value of a file as a hex string. """ hash_function = hashlib.sha256() return gethash(filename, hash_function) if __name__ == '__main__': print('This is a library of support tools for the Munki Suite.')
{ "pile_set_name": "Github" }
function out = get_scale_sample(im, pos, base_target_sz, scaleFactors, scale_window, scale_model_sz) % out = get_scale_sample(im, pos, base_target_sz, scaleFactors, scale_window, scale_model_sz) % % Extracts a sample for the scale filter at the current % location and scale. nScales = length(scaleFactors); for s = 1:nScales patch_sz = floor(base_target_sz * scaleFactors(s)); xs = floor(pos(2)) + (1:patch_sz(2)) - floor(patch_sz(2)/2); ys = floor(pos(1)) + (1:patch_sz(1)) - floor(patch_sz(1)/2); % check for out-of-bounds coordinates, and set them to the values at % the borders xs(xs < 1) = 1; ys(ys < 1) = 1; xs(xs > size(im,2)) = size(im,2); ys(ys > size(im,1)) = size(im,1); % extract image im_patch = im(ys, xs, :); % resize image to model size im_patch_resized = imResample(im_patch, scale_model_sz); % extract scale features temp_hog = fhog(single(im_patch_resized), 4); temp = temp_hog(:,:,1:31); if s == 1 out = zeros(numel(temp), nScales, 'single'); end % window out(:,s) = temp(:) * scale_window(s); end
{ "pile_set_name": "Github" }
/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const ModuleDependency = require("./ModuleDependency"); const ModuleDependencyTemplateAsRequireId = require("./ModuleDependencyTemplateAsRequireId"); class AMDRequireItemDependency extends ModuleDependency { constructor(request, range) { super(request); this.range = range; } get type() { return "amd require"; } } AMDRequireItemDependency.Template = ModuleDependencyTemplateAsRequireId; module.exports = AMDRequireItemDependency;
{ "pile_set_name": "Github" }
<div class="header"> <a href="#" class="nav">List</a> <h2>Backbone Cellar</h2> </div> <div class="content details"> <h1><%= name %></h1> <span><%= year %></span><br/> <span><%= grapes %></span><br/> <span><%= region %></span>, <span><%= country %></span> <br/><br/> <p> <img src="pics/<%= picture %>"/> <span><%= description %></span> </p> </div>
{ "pile_set_name": "Github" }
<?php declare(strict_types=1); namespace Sabre\DAV; /** * Collection class. * * This is a helper class, that should aid in getting collections classes setup. * Most of its methods are implemented, and throw permission denied exceptions * * @copyright Copyright (C) fruux GmbH (https://fruux.com/) * @author Evert Pot (http://evertpot.com/) * @license http://sabre.io/license/ Modified BSD License */ abstract class Collection extends Node implements ICollection { /** * Returns a child object, by its name. * * This method makes use of the getChildren method to grab all the child * nodes, and compares the name. * Generally its wise to override this, as this can usually be optimized * * This method must throw Sabre\DAV\Exception\NotFound if the node does not * exist. * * @param string $name * * @throws Exception\NotFound * * @return INode */ public function getChild($name) { foreach ($this->getChildren() as $child) { if ($child->getName() === $name) { return $child; } } throw new Exception\NotFound('File not found: '.$name); } /** * Checks is a child-node exists. * * It is generally a good idea to try and override this. Usually it can be optimized. * * @param string $name * * @return bool */ public function childExists($name) { try { $this->getChild($name); return true; } catch (Exception\NotFound $e) { return false; } } /** * Creates a new file in the directory. * * Data will either be supplied as a stream resource, or in certain cases * as a string. Keep in mind that you may have to support either. * * After successful creation of the file, you may choose to return the ETag * of the new file here. * * The returned ETag must be surrounded by double-quotes (The quotes should * be part of the actual string). * * If you cannot accurately determine the ETag, you should not return it. * If you don't store the file exactly as-is (you're transforming it * somehow) you should also not return an ETag. * * This means that if a subsequent GET to this new file does not exactly * return the same contents of what was submitted here, you are strongly * recommended to omit the ETag. * * @param string $name Name of the file * @param resource|string $data Initial payload * * @return string|null */ public function createFile($name, $data = null) { throw new Exception\Forbidden('Permission denied to create file (filename '.$name.')'); } /** * Creates a new subdirectory. * * @param string $name * * @throws Exception\Forbidden */ public function createDirectory($name) { throw new Exception\Forbidden('Permission denied to create directory'); } }
{ "pile_set_name": "Github" }
.TH std::deque<T,Allocator>::pop_front 3 "2019.08.27" "http://cppreference.com" "C++ Standard Libary" .SH NAME std::deque<T,Allocator>::pop_front \- std::deque<T,Allocator>::pop_front .SH Synopsis void pop_front(); Removes the first element of the container. If there are no elements in the container, the behavior is undefined. Iterators and references to the erased element are invalidated. It is unspecified whether the past-the-end iterator is invalidated if the \fI(until C++11)\fP element is the last element in the container. Other references and iterators are not affected. Iterators and references to the erased element are invalidated. If the element is the last element in the container, the past-the-end \fI(since C++11)\fP iterator is also invalidated. Other references and iterators are not affected. .SH Parameters \fI(none)\fP .SH Return value \fI(none)\fP .SH Complexity Constant. .SH Exceptions Does not throw. .SH See also pop_back removes the last element \fI(public member function)\fP push_front inserts an element to the beginning \fI(public member function)\fP
{ "pile_set_name": "Github" }
def diff(f, a, b, n): x = linspace(a, b, n+1) y = zeros(len(x)) z = zeros(len(x)) h = (b-a)/float(n) for i in xrange(len(x)): y[i] = func(x[i]) for i in xrange(len(x)-1): z[i] = (y[i+1] - y[i])/h z[n] = (y[n] - y[n-1])/h return y, z from scitools.std import * f_formula = sys.argv[1] a = eval(sys.argv[2]) b = eval(sys.argv[3]) n = int(sys.argv[4]) f = StringFunction(f_formula) y, z = diff(f, a, b, n) plot(x, y, 'r-', x, z, 'b-', legend=('function', 'derivative'))
{ "pile_set_name": "Github" }
require('../../modules/es7.math.deg-per-rad'); module.exports = Math.PI / 180;
{ "pile_set_name": "Github" }
/** * \file blowfish.h * * \brief Blowfish block cipher */ /* * Copyright (C) 2006-2015, ARM Limited, All Rights Reserved * SPDX-License-Identifier: Apache-2.0 * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * This file is part of mbed TLS (https://tls.mbed.org) */ #ifndef MBEDTLS_BLOWFISH_H #define MBEDTLS_BLOWFISH_H #if !defined(MBEDTLS_CONFIG_FILE) #include "config.h" #else #include MBEDTLS_CONFIG_FILE #endif #include <stddef.h> #include <stdint.h> #define MBEDTLS_BLOWFISH_ENCRYPT 1 #define MBEDTLS_BLOWFISH_DECRYPT 0 #define MBEDTLS_BLOWFISH_MAX_KEY_BITS 448 #define MBEDTLS_BLOWFISH_MIN_KEY_BITS 32 #define MBEDTLS_BLOWFISH_ROUNDS 16 /**< Rounds to use. When increasing this value, make sure to extend the initialisation vectors */ #define MBEDTLS_BLOWFISH_BLOCKSIZE 8 /* Blowfish uses 64 bit blocks */ #define MBEDTLS_ERR_BLOWFISH_INVALID_KEY_LENGTH -0x0016 /**< Invalid key length. */ #define MBEDTLS_ERR_BLOWFISH_HW_ACCEL_FAILED -0x0017 /**< Blowfish hardware accelerator failed. */ #define MBEDTLS_ERR_BLOWFISH_INVALID_INPUT_LENGTH -0x0018 /**< Invalid data input length. */ #if !defined(MBEDTLS_BLOWFISH_ALT) // Regular implementation // #ifdef __cplusplus extern "C" { #endif /** * \brief Blowfish context structure */ typedef struct { uint32_t P[MBEDTLS_BLOWFISH_ROUNDS + 2]; /*!< Blowfish round keys */ uint32_t S[4][256]; /*!< key dependent S-boxes */ } mbedtls_blowfish_context; /** * \brief Initialize Blowfish context * * \param ctx Blowfish context to be initialized */ void mbedtls_blowfish_init( mbedtls_blowfish_context *ctx ); /** * \brief Clear Blowfish context * * \param ctx Blowfish context to be cleared */ void mbedtls_blowfish_free( mbedtls_blowfish_context *ctx ); /** * \brief Blowfish key schedule * * \param ctx Blowfish context to be initialized * \param key encryption key * \param keybits must be between 32 and 448 bits * * \return 0 if successful, or MBEDTLS_ERR_BLOWFISH_INVALID_KEY_LENGTH */ int mbedtls_blowfish_setkey( mbedtls_blowfish_context *ctx, const unsigned char *key, unsigned int keybits ); /** * \brief Blowfish-ECB block encryption/decryption * * \param ctx Blowfish context * \param mode MBEDTLS_BLOWFISH_ENCRYPT or MBEDTLS_BLOWFISH_DECRYPT * \param input 8-byte input block * \param output 8-byte output block * * \return 0 if successful */ int mbedtls_blowfish_crypt_ecb( mbedtls_blowfish_context *ctx, int mode, const unsigned char input[MBEDTLS_BLOWFISH_BLOCKSIZE], unsigned char output[MBEDTLS_BLOWFISH_BLOCKSIZE] ); #if defined(MBEDTLS_CIPHER_MODE_CBC) /** * \brief Blowfish-CBC buffer encryption/decryption * Length should be a multiple of the block * size (8 bytes) * * \note Upon exit, the content of the IV is updated so that you can * call the function same function again on the following * block(s) of data and get the same result as if it was * encrypted in one call. This allows a "streaming" usage. * If on the other hand you need to retain the contents of the * IV, you should either save it manually or use the cipher * module instead. * * \param ctx Blowfish context * \param mode MBEDTLS_BLOWFISH_ENCRYPT or MBEDTLS_BLOWFISH_DECRYPT * \param length length of the input data * \param iv initialization vector (updated after use) * \param input buffer holding the input data * \param output buffer holding the output data * * \return 0 if successful, or * MBEDTLS_ERR_BLOWFISH_INVALID_INPUT_LENGTH */ int mbedtls_blowfish_crypt_cbc( mbedtls_blowfish_context *ctx, int mode, size_t length, unsigned char iv[MBEDTLS_BLOWFISH_BLOCKSIZE], const unsigned char *input, unsigned char *output ); #endif /* MBEDTLS_CIPHER_MODE_CBC */ #if defined(MBEDTLS_CIPHER_MODE_CFB) /** * \brief Blowfish CFB buffer encryption/decryption. * * \note Upon exit, the content of the IV is updated so that you can * call the function same function again on the following * block(s) of data and get the same result as if it was * encrypted in one call. This allows a "streaming" usage. * If on the other hand you need to retain the contents of the * IV, you should either save it manually or use the cipher * module instead. * * \param ctx Blowfish context * \param mode MBEDTLS_BLOWFISH_ENCRYPT or MBEDTLS_BLOWFISH_DECRYPT * \param length length of the input data * \param iv_off offset in IV (updated after use) * \param iv initialization vector (updated after use) * \param input buffer holding the input data * \param output buffer holding the output data * * \return 0 if successful */ int mbedtls_blowfish_crypt_cfb64( mbedtls_blowfish_context *ctx, int mode, size_t length, size_t *iv_off, unsigned char iv[MBEDTLS_BLOWFISH_BLOCKSIZE], const unsigned char *input, unsigned char *output ); #endif /*MBEDTLS_CIPHER_MODE_CFB */ #if defined(MBEDTLS_CIPHER_MODE_CTR) /** * \brief Blowfish-CTR buffer encryption/decryption * * Warning: You have to keep the maximum use of your counter in mind! * * \param ctx Blowfish context * \param length The length of the data * \param nc_off The offset in the current stream_block (for resuming * within current cipher stream). The offset pointer to * should be 0 at the start of a stream. * \param nonce_counter The 64-bit nonce and counter. * \param stream_block The saved stream-block for resuming. Is overwritten * by the function. * \param input The input data stream * \param output The output data stream * * \return 0 if successful */ int mbedtls_blowfish_crypt_ctr( mbedtls_blowfish_context *ctx, size_t length, size_t *nc_off, unsigned char nonce_counter[MBEDTLS_BLOWFISH_BLOCKSIZE], unsigned char stream_block[MBEDTLS_BLOWFISH_BLOCKSIZE], const unsigned char *input, unsigned char *output ); #endif /* MBEDTLS_CIPHER_MODE_CTR */ #ifdef __cplusplus } #endif #else /* MBEDTLS_BLOWFISH_ALT */ #include "blowfish_alt.h" #endif /* MBEDTLS_BLOWFISH_ALT */ #endif /* blowfish.h */
{ "pile_set_name": "Github" }
/* mbed Microcontroller Library * Copyright (c) 2006-2012 ARM Limited * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ #ifndef RTOS_H #define RTOS_H #include "Thread.h" #include "Mutex.h" #include "RtosTimer.h" #include "Semaphore.h" #include "Mail.h" #include "MemoryPool.h" #include "Queue.h" using namespace rtos; #endif
{ "pile_set_name": "Github" }
package libtrust import ( "bytes" "encoding/json" "testing" ) func generateECTestKeys(t *testing.T) []PrivateKey { p256Key, err := GenerateECP256PrivateKey() if err != nil { t.Fatal(err) } p384Key, err := GenerateECP384PrivateKey() if err != nil { t.Fatal(err) } p521Key, err := GenerateECP521PrivateKey() if err != nil { t.Fatal(err) } return []PrivateKey{p256Key, p384Key, p521Key} } func TestECKeys(t *testing.T) { ecKeys := generateECTestKeys(t) for _, ecKey := range ecKeys { if ecKey.KeyType() != "EC" { t.Fatalf("key type must be %q, instead got %q", "EC", ecKey.KeyType()) } } } func TestECSignVerify(t *testing.T) { ecKeys := generateECTestKeys(t) message := "Hello, World!" data := bytes.NewReader([]byte(message)) sigAlgs := []*signatureAlgorithm{es256, es384, es512} for i, ecKey := range ecKeys { sigAlg := sigAlgs[i] t.Logf("%s signature of %q with kid: %s\n", sigAlg.HeaderParam(), message, ecKey.KeyID()) data.Seek(0, 0) // Reset the byte reader // Sign sig, alg, err := ecKey.Sign(data, sigAlg.HashID()) if err != nil { t.Fatal(err) } data.Seek(0, 0) // Reset the byte reader // Verify err = ecKey.Verify(data, alg, sig) if err != nil { t.Fatal(err) } } } func TestMarshalUnmarshalECKeys(t *testing.T) { ecKeys := generateECTestKeys(t) data := bytes.NewReader([]byte("This is a test. I repeat: this is only a test.")) sigAlgs := []*signatureAlgorithm{es256, es384, es512} for i, ecKey := range ecKeys { sigAlg := sigAlgs[i] privateJWKJSON, err := json.MarshalIndent(ecKey, "", " ") if err != nil { t.Fatal(err) } publicJWKJSON, err := json.MarshalIndent(ecKey.PublicKey(), "", " ") if err != nil { t.Fatal(err) } t.Logf("JWK Private Key: %s", string(privateJWKJSON)) t.Logf("JWK Public Key: %s", string(publicJWKJSON)) privKey2, err := UnmarshalPrivateKeyJWK(privateJWKJSON) if err != nil { t.Fatal(err) } pubKey2, err := UnmarshalPublicKeyJWK(publicJWKJSON) if err != nil { t.Fatal(err) } // Ensure we can sign/verify a message with the unmarshalled keys. data.Seek(0, 0) // Reset the byte reader signature, alg, err := privKey2.Sign(data, sigAlg.HashID()) if err != nil { t.Fatal(err) } data.Seek(0, 0) // Reset the byte reader err = pubKey2.Verify(data, alg, signature) if err != nil { t.Fatal(err) } } } func TestFromCryptoECKeys(t *testing.T) { ecKeys := generateECTestKeys(t) for _, ecKey := range ecKeys { cryptoPrivateKey := ecKey.CryptoPrivateKey() cryptoPublicKey := ecKey.CryptoPublicKey() pubKey, err := FromCryptoPublicKey(cryptoPublicKey) if err != nil { t.Fatal(err) } if pubKey.KeyID() != ecKey.KeyID() { t.Fatal("public key key ID mismatch") } privKey, err := FromCryptoPrivateKey(cryptoPrivateKey) if err != nil { t.Fatal(err) } if privKey.KeyID() != ecKey.KeyID() { t.Fatal("public key key ID mismatch") } } } func TestExtendedFields(t *testing.T) { key, err := GenerateECP256PrivateKey() if err != nil { t.Fatal(err) } key.AddExtendedField("test", "foobar") val := key.GetExtendedField("test") gotVal, ok := val.(string) if !ok { t.Fatalf("value is not a string") } else if gotVal != val { t.Fatalf("value %q is not equal to %q", gotVal, val) } }
{ "pile_set_name": "Github" }
// Copyright 2018 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Package lazyregexp is a thin wrapper over regexp, allowing the use of global // regexp variables without forcing them to be compiled at init. package lazyregexp import ( "os" "regexp" "strings" "sync" ) // Regexp is a wrapper around regexp.Regexp, where the underlying regexp will be // compiled the first time it is needed. type Regexp struct { str string once sync.Once rx *regexp.Regexp } func (r *Regexp) re() *regexp.Regexp { r.once.Do(r.build) return r.rx } func (r *Regexp) build() { r.rx = regexp.MustCompile(r.str) r.str = "" } func (r *Regexp) FindSubmatch(s []byte) [][]byte { return r.re().FindSubmatch(s) } func (r *Regexp) FindStringSubmatch(s string) []string { return r.re().FindStringSubmatch(s) } func (r *Regexp) FindStringSubmatchIndex(s string) []int { return r.re().FindStringSubmatchIndex(s) } func (r *Regexp) ReplaceAllString(src, repl string) string { return r.re().ReplaceAllString(src, repl) } func (r *Regexp) FindString(s string) string { return r.re().FindString(s) } func (r *Regexp) FindAllString(s string, n int) []string { return r.re().FindAllString(s, n) } func (r *Regexp) MatchString(s string) bool { return r.re().MatchString(s) } func (r *Regexp) SubexpNames() []string { return r.re().SubexpNames() } var inTest = len(os.Args) > 0 && strings.HasSuffix(strings.TrimSuffix(os.Args[0], ".exe"), ".test") // New creates a new lazy regexp, delaying the compiling work until it is first // needed. If the code is being run as part of tests, the regexp compiling will // happen immediately. func New(str string) *Regexp { lr := &Regexp{str: str} if inTest { // In tests, always compile the regexps early. lr.re() } return lr }
{ "pile_set_name": "Github" }
-- * Metronome IM * -- -- This file is part of the Metronome XMPP server and is released under the -- ISC License, please see the LICENSE file in this source package for more -- information about copyright and licensing. local ipairs, pairs, tostring = ipairs, pairs, tostring; local st = require "util.stanza"; local datamanager = require "util.datamanager"; local storagemanager = require "core.storagemanager"; local jid_split = require "util.jid".split; local sha1 = require "util.hashes".sha1; local b64_decode = require "util.encodings".base64.decode; local t_remove = table.remove; local metronome = metronome; local vcard_store = storagemanager.open(module.host, "vcard"); local hash_store = storagemanager.open(module.host, "vcard_hash"); local data_xmlns, metadata_xmlns = "urn:xmpp:avatar:data", "urn:xmpp:avatar:metadata"; local vcard_max = module:get_option_number("vcard_max_size"); module:add_feature("vcard-temp"); local function handle_synchronize(event) local node, host = event.node, event.host; if host ~= module.host then return; end local vCard = st.deserialize(datamanager.load(node, host, "vcard")); if vCard then return vCard; else return false; end end local function handle_vcard(event) local session, stanza = event.origin, event.stanza; local to = stanza.attr.to; if stanza.attr.type == "get" then local vCard; if to then local node, host = jid_split(to); vCard = st.deserialize(vcard_store:get(node)); -- load vCard for user or server else vCard = st.deserialize(vcard_store:get(session.username)); -- load user's own vCard end if vCard then session.send(st.reply(stanza):add_child(vCard)); else session.send(st.error_reply(stanza, "cancel", "item-not-found")); end else if not to then local vCard = stanza.tags[1]; if vcard_max and tostring(vCard):len() > vcard_max then return session.send(st.error_reply(stanza, "modify", "policy-violation", "The vCard data exceeded the max allowed size!")); end local count = 0; for _, data_element in ipairs(vCard) do if data_element.name == "PHOTO" then count = count + 1; end end if count > 1 then return session.send(st.error_reply(stanza, "modify", "policy-violation", "vCards with multiple PHOTO elements are not supported")); end local ok, err = vcard_store:set(session.username, st.preserialize(vCard)); if ok then session.send(st.reply(stanza)); metronome.events.fire_event("vcard-updated", { node = session.username, host = session.host, vcard = vCard }); local photo = vCard:child_with_name("PHOTO"); if not photo then return true; end local from = stanza.attr.from or origin.full_jid; local pep_service = module:fire_event("pep-get-service", session.username, true, from); if pep_service then -- sync avatar local data, type = photo:child_with_name("BINVAL"), photo:child_with_name("TYPE"); if data and type then module:log("debug", "Converting vCard-based Avatar to User Avatar..."); data, type = data:get_text(), type:get_text(); local bytes, id = data:len(), sha1(b64_decode(data), true); module:get_bare_session(session.username).avatar_hash = id; ok, err = hash_store:set(session.username, { hash = id }); if not ok then module:log("warn", "Failed to save %s's avatar hash: %s", session.username.."@"..session.host, err); end local data_item = st.stanza("item", { id = id }) :tag("data", { xmlns = data_xmlns }):text(data):up():up(); local metadata_item = st.stanza("item", { id = id }) :tag("metadata", { xmlns = metadata_xmlns }) :tag("info", { bytes = bytes, id = id, type = type }):up():up():up(); if not pep_service.nodes[data_xmlns] then pep_service:create(data_xmlns, from, { max_items = 1 }); module:fire_event("pep-autosubscribe-recipients", pep_service, data_xmlns); end if not pep_service.nodes[metadata_xmlns] then pep_service:create(metadata_xmlns, from, { max_items = 1 }); module:fire_event("pep-autosubscribe-recipients", pep_service, data_xmlns); end pep_service:publish(data_xmlns, from, id, data_item); pep_service:publish(metadata_xmlns, from, id, metadata_item); else module:log("warn", "Failed to perform avatar conversion, PHOTO element is not valid"); end end else session.send(st.error_reply(stanza, "wait", "internal-server-error", err)); end else session.send(st.error_reply(stanza, "auth", "forbidden")); end end return true; end local waiting_metadata = setmetatable({}, { __mode = "v" }); local function handle_user_avatar(event) local node, item, from = event.node, event.item, event.from or event.origin.full_jid; if node == metadata_xmlns then local meta = item:get_child("metadata", node); local info = meta and meta:child_with_name("info"); if info then local data = waiting_metadata[info.attr.id]; if not data then return; end waiting_metadata[info.attr.id] = nil; local type = info.attr.type; local user, host = jid_split(from); local vCard = st.deserialize(datamanager.load(user, host, "vcard")); if vCard then for n, tag in ipairs(vCard.tags) do if tag.name == "PHOTO" then t_remove(vCard.tags, n); t_remove(vCard, n); end end vCard:tag("PHOTO") :tag("TYPE"):text(type):up() :tag("BINVAL"):text(data):up():up(); else vCard = st.stanza("vCard", { xmlns = "vcard-temp" }) :tag("PHOTO") :tag("TYPE"):text(type):up() :tag("BINVAL"):text(data):up():up(); end module:log("debug", "Converting User Avatar to vCard-based Avatar..."); local ok, err = vcard_store:set(user, st.preserialize(vCard)); if not ok then module:log("warn", "Failed to save %s's vCard: %s", user.."@"..host, err); end module:get_bare_session(event.origin.username).avatar_hash = info.attr.id; ok, err = hash_store:set(user, { hash = info.attr.id }); if not ok then module:log("warn", "Failed to save %s's avatar hash: %s", user.."@"..host, err); end end elseif node == data_xmlns then local data = item:get_child_text("data", node); if data then waiting_metadata[sha1(b64_decode(data), true)] = data; end end end local function handle_presence_inject(event) local session, stanza = event.origin, event.stanza; if session.type == "c2s" and not stanza.attr.type then local has_avatar = module:get_bare_session(session.username).avatar_hash; if has_avatar == nil then module:log("debug", "Caching Avatar hash of %s@%s...", session.username, session.host); local vc = hash_store:get(session.username); if vc then module:get_bare_session(session.username).avatar_hash = vc.hash; has_avatar = vc.hash; else module:get_bare_session(session.username).avatar_hash = false; return; end elseif has_avatar == false then return; end local vcard_update = stanza:get_child("x", "vcard-temp:x:update"); local photo = vcard_update and vcard_update:child_with_name("photo"); if photo and photo:get_text() ~= "" then photo[1] = nil; photo:text(has_avatar); elseif not photo or not vcard_update then if not vcard_update then stanza:tag("x", { xmlns = "vcard-temp:x:update" }) :tag("photo"):text(has_avatar):up():up(); elseif not photo then vcard_update:tag("photo"):text(has_avatar):up(); end end end end module:hook("account-disco-info", function(event) event.stanza:tag("feature", { var = "urn:xmpp:pep-vcard-conversion:0" }):up(); end, 45); module:hook_global("vcard-synchronize", handle_synchronize); module:hook("iq/bare/vcard-temp:vCard", handle_vcard); module:hook("iq/host/vcard-temp:vCard", handle_vcard); module:hook("pre-presence/bare", handle_presence_inject, 50); module:hook("pre-presence/full", handle_presence_inject, 50); module:hook("pre-presence/host", handle_presence_inject, 50); module:hook("pep-node-publish", handle_user_avatar); module.unload = function(reload) if not reload then for jid, session in module:get_bare_sessions() do session.avatar_hash = nil; end end end
{ "pile_set_name": "Github" }
/* * Copyright (c) 1999 * Boris Fomitchev * * This material is provided "as is", with absolutely no warranty expressed * or implied. Any use is at your own risk. * * Permission to use or copy this software for any purpose is hereby granted * without fee, provided the above notices are retained on all copies. * Permission to modify the code and to distribute modified code is granted, * provided the above notices are retained, and a notice that the code was * modified is included with the above copyright notice. * */ #ifndef _STLP_SETJMP_H #if !defined (_STLP_OUTERMOST_HEADER_ID) # define _STLP_OUTERMOST_HEADER_ID 0x256 # include <stl/_cprolog.h> #elif (_STLP_OUTERMOST_HEADER_ID == 0x256) && !defined (_STLP_DONT_POP_HEADER_ID) # define _STLP_DONT_POP_HEADER_ID # define _STLP_SETJMP_H #endif #if defined(_STLP_WCE_EVC3) struct _exception; #endif #if !defined (setjmp) # define _STLP_NATIVE_SETJMP_H_INCLUDED # if defined (_STLP_HAS_INCLUDE_NEXT) # include_next <setjmp.h> # else # include _STLP_NATIVE_C_HEADER(setjmp.h) # endif #endif #if !defined (_STLP_NATIVE_SETJMP_H_INCLUDED) /* See errno.h file for a description of this problem. */ # error setjmp has been defined before inclusion of setjmp.h header. #endif #if (_STLP_OUTERMOST_HEADER_ID == 0x256) # if ! defined (_STLP_DONT_POP_HEADER_ID) # include <stl/_epilog.h> # undef _STLP_OUTERMOST_HEADER_ID # else # undef _STLP_DONT_POP_HEADER_ID # endif #endif #endif /* _STLP_SETJMP_H */
{ "pile_set_name": "Github" }
/* global Metro, Utils, METRO_WEEK_START */ (function(Metro) { 'use strict'; Date.prototype.getWeek = function (dowOffset) { var nYear, nday, newYear, day, daynum, weeknum; dowOffset = !Utils.isValue(dowOffset) ? METRO_WEEK_START : typeof dowOffset === 'number' ? parseInt(dowOffset) : 0; newYear = new Date(this.getFullYear(),0,1); day = newYear.getDay() - dowOffset; day = (day >= 0 ? day : day + 7); daynum = Math.floor((this.getTime() - newYear.getTime() - (this.getTimezoneOffset()-newYear.getTimezoneOffset())*60000)/86400000) + 1; if(day < 4) { weeknum = Math.floor((daynum+day-1)/7) + 1; if(weeknum > 52) { nYear = new Date(this.getFullYear() + 1,0,1); nday = nYear.getDay() - dowOffset; nday = nday >= 0 ? nday : nday + 7; weeknum = nday < 4 ? 1 : 53; } } else { weeknum = Math.floor((daynum+day-1)/7); } return weeknum; }; Date.prototype.getYear = function(){ return this.getFullYear().toString().substr(-2); }; Date.prototype.format = function(format, locale){ if (locale === undefined) { locale = "en-US"; } var cal = (Metro.locales !== undefined && Metro.locales[locale] !== undefined ? Metro.locales[locale] : Metro.locales["en-US"])['calendar']; var date = this; var nDay = date.getDay(), nDate = date.getDate(), nMonth = date.getMonth(), nYear = date.getFullYear(), nHour = date.getHours(), aDays = cal['days'], aMonths = cal['months'], aDayCount = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334], isLeapYear = function() { return (nYear%4===0 && nYear%100!==0) || nYear%400===0; }, getThursday = function() { var target = new Date(date); target.setDate(nDate - ((nDay+6)%7) + 3); return target; }, zeroPad = function(nNum, nPad) { return ('' + (Math.pow(10, nPad) + nNum)).slice(1); }; return format.replace(/(%[a-z])/gi, function(sMatch) { return { '%a': aDays[nDay].slice(0,3), '%A': aDays[nDay], '%b': aMonths[nMonth].slice(0,3), '%B': aMonths[nMonth], '%c': date.toUTCString(), '%C': Math.floor(nYear/100), '%d': zeroPad(nDate, 2), 'dd': zeroPad(nDate, 2), '%e': nDate, '%F': date.toISOString().slice(0,10), '%G': getThursday().getFullYear(), '%g': ('' + getThursday().getFullYear()).slice(2), '%H': zeroPad(nHour, 2), // 'HH': zeroPad(nHour, 2), '%I': zeroPad((nHour+11)%12 + 1, 2), '%j': zeroPad(aDayCount[nMonth] + nDate + ((nMonth>1 && isLeapYear()) ? 1 : 0), 3), '%k': '' + nHour, '%l': (nHour+11)%12 + 1, '%m': zeroPad(nMonth + 1, 2), // 'mm': zeroPad(nMonth + 1, 2), '%M': zeroPad(date.getMinutes(), 2), // 'MM': zeroPad(date.getMinutes(), 2), '%p': (nHour<12) ? 'AM' : 'PM', '%P': (nHour<12) ? 'am' : 'pm', '%s': Math.round(date.getTime()/1000), // 'ss': Math.round(date.getTime()/1000), '%S': zeroPad(date.getSeconds(), 2), // 'SS': zeroPad(date.getSeconds(), 2), '%u': nDay || 7, '%V': (function() { var target = getThursday(), n1stThu = target.valueOf(); target.setMonth(0, 1); var nJan1 = target.getDay(); if (nJan1!==4) target.setMonth(0, 1 + ((4-nJan1)+7)%7); return zeroPad(1 + Math.ceil((n1stThu-target)/604800000), 2); })(), '%w': '' + nDay, '%x': date.toLocaleDateString(), '%X': date.toLocaleTimeString(), '%y': ('' + nYear).slice(2), // 'yy': ('' + nYear).slice(2), '%Y': nYear, // 'YYYY': nYear, '%z': date.toTimeString().replace(/.+GMT([+-]\d+).+/, '$1'), '%Z': date.toTimeString().replace(/.+\((.+?)\)$/, '$1') }[sMatch] || sMatch; }); }; Date.prototype.addHours = function(n) { this.setTime(this.getTime() + (n*60*60*1000)); return this; }; Date.prototype.addDays = function(n) { this.setDate(this.getDate() + (n)); return this; }; Date.prototype.addMonths = function(n) { this.setMonth(this.getMonth() + (n)); return this; }; Date.prototype.addYears = function(n) { this.setFullYear(this.getFullYear() + (n)); return this; }; }(Metro));
{ "pile_set_name": "Github" }
<script language="javascript" type="text/javascript"> /*<![CDATA[*/ if(typeof(console)=='object') { console.group("Log aplikacji"); <?php foreach($data as $index=>$log) { $time=date('H:i:s.',$log[3]).(int)(($log[3]-(int)$log[3])*1000); if($log[1]===CLogger::LEVEL_WARNING) $func='warn'; else if($log[1]===CLogger::LEVEL_ERROR) $func='error'; else $func='log'; $content=CJavaScript::quote("[$time][$log[1]][$log[2]] $log[0]"); echo "\tconsole.{$func}(\"{$content}\");\n"; } ?> console.groupEnd(); } /*]]>*/ </script>
{ "pile_set_name": "Github" }
''' Graphics compiler ================= Before rendering an :class:`~kivy.graphics.instructions.InstructionGroup`, we compile the group in order to reduce the number of instructions executed at rendering time. Reducing the context instructions --------------------------------- Imagine that you have a scheme like this:: Color(1, 1, 1) Rectangle(source='button.png', pos=(0, 0), size=(20, 20)) Color(1, 1, 1) Rectangle(source='button.png', pos=(10, 10), size=(20, 20)) Color(1, 1, 1) Rectangle(source='button.png', pos=(10, 20), size=(20, 20)) The real instructions seen by the graphics canvas would be:: Color: change 'color' context to 1, 1, 1 BindTexture: change 'texture0' to `button.png texture` Rectangle: push vertices (x1, y1...) to vbo & draw Color: change 'color' context to 1, 1, 1 BindTexture: change 'texture0' to `button.png texture` Rectangle: push vertices (x1, y1...) to vbo & draw Color: change 'color' context to 1, 1, 1 BindTexture: change 'texture0' to `button.png texture` Rectangle: push vertices (x1, y1...) to vbo & draw Only the first :class:`~kivy.graphics.context_instructions.Color` and :class:`~kivy.graphics.context_instructions.BindTexture` are useful and really change the context. We can reduce them to:: Color: change 'color' context to 1, 1, 1 BindTexture: change 'texture0' to `button.png texture` Rectangle: push vertices (x1, y1...) to vbo & draw Rectangle: push vertices (x1, y1...) to vbo & draw Rectangle: push vertices (x1, y1...) to vbo & draw This is what the compiler does in the first place, by flagging all the unused instruction with GI_IGNORE flag. As soon as a Color content changes, the whole InstructionGroup will be recompiled and a previously unused Color might be used for the next compilation. Note to any Kivy contributor / internal developer: - All context instructions are checked to see if they change anything in the cache. - We must ensure that a context instruction is needed for our current Canvas. - We must ensure that we don't depend of any other canvas. - We must reset our cache if one of our children is another instruction group because we don't know whether it might do weird things or not. ''' include 'opcodes.pxi' from kivy.graphics.instructions cimport Instruction, RenderContext, ContextInstruction from kivy.graphics.context_instructions cimport BindTexture cdef class GraphicsCompiler: cdef InstructionGroup compile(self, InstructionGroup group): cdef int count = 0 cdef Instruction c cdef ContextInstruction ci cdef RenderContext rc = None, oldrc = None cdef dict cs_by_rc = {} cdef list cs # Very simple compiler. We will apply all the element in the group. # If the render context is not changed between 2 call, we'll think that # the instruction could be ignored during the next frame. So flag as # GI_IGNORE. # Also, flag ourself as GL_NO_APPLY_ONCE, to prevent to reapply all the # instructions when the compiler is leaving. for c in group.children: # Select only the instructions who modify the context if c.flags & GI_CONTEXT_MOD: # convert as a ContextInstruction ci = c # get the context, and flag as done oldrc = rc rc = ci.get_context() # flag the old one as need update, if it's a new one if rc is not oldrc and oldrc is not None: oldrc.flag_update(0) # it's a new render context, track changes. rc.flag_update_done() # apply the instruction ci.apply() # whatever happen, flag as needed (ie not ignore this one.) ci.flags &= ~GI_IGNORE # before flag as ignore, we must ensure that all the states # inside this context instruction are not needed at all. # if a state has never been in the cache yet, we can't ignore # it. if rc not in cs_by_rc: cs = cs_by_rc[rc] = [] else: cs = cs_by_rc[rc] needed = 0 if isinstance(ci, BindTexture): # on texture case, bindtexture don't use context_state # to transfer changes on render context, but use directly # rendercontext.set_texture(). So we have no choice to try the # apply(), and saving in cs, as a texture0 if 'texture0' not in cs: cs.append('texture0') needed = 1 else: for state in ci.context_state: if state in cs: continue needed = 1 cs.append(state) # unflag the instruction only if it's not needed # and if the render context have not been changed if needed == 0 and not (rc.flags & GI_NEEDS_UPDATE): ci.flags |= GI_IGNORE count += 1 else: if isinstance(c, InstructionGroup): # we have potentially new childs, and them can fuck up our # compilation, so reset our current cache. cs_by_rc = {} c.apply() if rc: rc.flag_update(0) group.flags |= GI_NO_APPLY_ONCE return group
{ "pile_set_name": "Github" }
#include <stdio.h> #include <malloc.h> #include <stdlib.h> #include <string.h> #include <iostream> #include <vector> #include <chrono> #include "logger.h" #include "cmdlineparser.h" #include "xclbin_helper.h" #include "opencv2/opencv.hpp" #include "coefficients.h" #include "filter2d.h" using namespace sda; using namespace sda::utils; static void IplImage2Raw(IplImage* img, uchar* y, int stride_y, uchar* u, int stride_u, uchar* v, int stride_v); static void Raw2IplImage(uchar* y, int stride_y, uchar* u, int stride_u, uchar* v, int stride_v, IplImage* img); // ------------------------------------------------------------------------------------------- // An event callback function that prints the operations performed by the OpenCL runtime. // ------------------------------------------------------------------------------------------- void event_cb(cl_event event, cl_int cmd_status, void *id) { if (getenv("XCL_EMULATION_MODE") != NULL) { std::cout << " kernel finished processing request " << *(int *)id << std::endl; } } // ------------------------------------------------------------------------------------------- // Struct returned by BlurDispatcher() and used to keep track of the request sent to the kernel // The sync() method waits for completion of the request. After it returns, results are ready // ------------------------------------------------------------------------------------------- struct Filter2DRequest { cl_event mEvent[3]; int mId; Filter2DRequest(int id) { mId = id; } void sync() { // Wait until the outputs have been read back clWaitForEvents(1, &mEvent[2]); clReleaseEvent(mEvent[0]); clReleaseEvent(mEvent[1]); clReleaseEvent(mEvent[2]); } }; // ------------------------------------------------------------------------------------------- // Class used to dispatch requests to the kernel // The BlurDispatcher() method schedules the necessary operations (write, kernel, read) and // returns a BlurRequest* struct which can be used to track the completion of the request. // The dispatcher has its own OOO command queue allowing multiple requests to be scheduled // and executed independently by the OpenCL runtime. // ------------------------------------------------------------------------------------------- class Filter2DDispatcher { public: Filter2DDispatcher( cl_device_id &Device, cl_context &Context, cl_program &Program ) { mKernel = clCreateKernel(Program, "Filter2DKernel", &mErr); mQueue = clCreateCommandQueue(Context, Device, CL_QUEUE_PROFILING_ENABLE|CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE, &mErr); mContext = Context; mCounter = 0; } Filter2DRequest* operator() ( short *coeffs, unsigned char *src, unsigned int width, unsigned int height, unsigned int stride, unsigned char *dst ) { Filter2DRequest* req = new Filter2DRequest(mCounter++); unsigned nbytes = (stride*height); // Create input buffers for coefficients (host to device) mSrcExt[0].flags = XCL_MEM_DDR_BANK1; mSrcExt[0].param = 0; mSrcExt[0].obj = coeffs; mSrcBuf[0] = clCreateBuffer(mContext, CL_MEM_EXT_PTR_XILINX | CL_MEM_USE_HOST_PTR | CL_MEM_READ_ONLY, (FILTER2D_KERNEL_V_SIZE*FILTER2D_KERNEL_V_SIZE)*sizeof(short), &mSrcExt[0], &mErr); // Create input buffer for src (host to device) mSrcExt[1].flags = XCL_MEM_DDR_BANK1; mSrcExt[1].param = 0; mSrcExt[1].obj = src; mSrcBuf[1] = clCreateBuffer(mContext, CL_MEM_EXT_PTR_XILINX | CL_MEM_USE_HOST_PTR | CL_MEM_READ_ONLY, nbytes, &mSrcExt[1], &mErr); // Create output buffer for dst (device to host) mDstExt[0].flags = XCL_MEM_DDR_BANK1; mDstExt[0].param = 0; mDstExt[0].obj = dst; mDstBuf[0] = clCreateBuffer(mContext, CL_MEM_EXT_PTR_XILINX | CL_MEM_USE_HOST_PTR | CL_MEM_WRITE_ONLY, nbytes, &mDstExt[0], &mErr); // Schedule the writing of the inputs clEnqueueMigrateMemObjects(mQueue, 1, mSrcBuf, 0, 0, nullptr, &req->mEvent[0]); // Set the kernel arguments clSetKernelArg(mKernel, 0, sizeof(cl_mem), &mSrcBuf[0]); clSetKernelArg(mKernel, 1, sizeof(cl_mem), &mSrcBuf[1]); clSetKernelArg(mKernel, 2, sizeof(unsigned int), &width); clSetKernelArg(mKernel, 3, sizeof(unsigned int), &height); clSetKernelArg(mKernel, 4, sizeof(unsigned int), &stride); clSetKernelArg(mKernel, 5, sizeof(cl_mem), &mDstBuf[0]); // Schedule the execution of the kernel clEnqueueTask(mQueue, mKernel, 1, &req->mEvent[0], &req->mEvent[1]); // Schedule the reading of the outputs clEnqueueMigrateMemObjects(mQueue, 1, mDstBuf, CL_MIGRATE_MEM_OBJECT_HOST, 1, &req->mEvent[1], &req->mEvent[2]); // Register call back to notify of kernel completion clSetEventCallback(req->mEvent[1], CL_COMPLETE, event_cb, &req->mId); return req; }; ~Filter2DDispatcher() { clReleaseCommandQueue(mQueue); clReleaseKernel(mKernel); }; private: cl_kernel mKernel; cl_command_queue mQueue; cl_context mContext; cl_mem_ext_ptr_t mSrcExt[2]; cl_mem_ext_ptr_t mDstExt[1]; cl_mem mSrcBuf[2]; cl_mem mDstBuf[1]; cl_int mErr; int mCounter; }; int main(int argc, char** argv) { std::cout << std::endl; std::cout << "Xilinx 2D Filter Example Application\n"; // --------------------------------------------------------------------------------- // Parse command line // --------------------------------------------------------------------------------- CmdLineParser parser; parser.addSwitch("--nruns", "-n", "Number of times to image is processed", "1"); parser.addSwitch("--fpga", "-x", "FPGA binary (xclbin) file to use", "xclbin/fpga.hw.xilinx_aws-vu9p-f1_4ddr-xpr-2pr_4_0.awsxclbin"); parser.addSwitch("--input", "-i", "Input image file"); parser.addSwitch("--filter", "-f", "Filter type (0-3)", "0"); //parse all command line options parser.parse(argc, argv); string inputImage = parser.value("input"); string fpgaBinary = parser.value("fpga"); int numRuns = parser.value_to_int("nruns"); int coeffs = parser.value_to_int("filter"); if (inputImage.size() == 0) { std::cout << std::endl; std::cout << "ERROR: input image file must be specified using -i command line switch" << std::endl; exit(1); } if ((coeffs<0) || (coeffs>3)) { std::cout << std::endl; std::cout << "ERROR: Supported filter type values are [0:3]" << std::endl; exit(1); } std::cout << std::endl; std::cout << "FPGA binary : " << fpgaBinary << std::endl; std::cout << "Input image : " << inputImage << std::endl; std::cout << "Number of runs : " << numRuns << std::endl; std::cout << "Filter type : " << coeffs << std::endl; std::cout << std::endl; // --------------------------------------------------------------------------------- // Load XCLBIN file, create OpenCL context, device and program // --------------------------------------------------------------------------------- std::cout << "Programming FPGA" << std::endl; cl_context context; cl_program program; cl_device_id device; load_xclbin_file(fpgaBinary.c_str(), context, device, program); // --------------------------------------------------------------------------------- // Read input image and format inputs // --------------------------------------------------------------------------------- // Create filenames for input and ouput images std::string srcFileName = inputImage; std::string dstFileName = inputImage.substr(0, inputImage.size()-4)+"_out.bmp"; // Read Input image IplImage *src, *dst; src = cvLoadImage(srcFileName.c_str()); //format is BGR if(!src) { std::cout << "ERROR: Loading image " << srcFileName << " failed" << std::endl; exit(1); } unsigned width = src->width; unsigned height = src->height; unsigned stride = width; unsigned nbytes = (stride*height); // 4k aligned buffers for efficient data transfer to the kernel std::vector<uchar, aligned_allocator<uchar>> y_src(nbytes); std::vector<uchar, aligned_allocator<uchar>> u_src(nbytes); std::vector<uchar, aligned_allocator<uchar>> v_src(nbytes); std::vector<uchar, aligned_allocator<uchar>> y_dst(nbytes); std::vector<uchar, aligned_allocator<uchar>> u_dst(nbytes); std::vector<uchar, aligned_allocator<uchar>> v_dst(nbytes); std::vector<short, aligned_allocator<short>> coeff(FILTER2D_KERNEL_V_SIZE*FILTER2D_KERNEL_V_SIZE); // Create destination image dst = cvCreateImage(cvSize(width, height), src->depth, src->nChannels); // Convert CV Image to AXI video data IplImage2Raw(src, y_src.data(), stride, u_src.data(), stride, v_src.data(), stride); // Copy coefficients to 4k aligned vector memcpy(coeff.data() , &filterCoeffs[coeffs][0][0], coeff.size()*sizeof(short) ); // --------------------------------------------------------------------------------- // Make requests to kernel(s) // --------------------------------------------------------------------------------- // Note: change the number of kernels in the device, or reorder the sync() methods // to see the impact on performance and how requests are scheduled. // --------------------------------------------------------------------------------- std::cout << std::endl; std::cout << "Running FPGA version" << std::endl; // Create a dispatcher of requests to the Blur kernel(s) Filter2DDispatcher Filter(device, context, program); auto fpga_begin = std::chrono::high_resolution_clock::now(); Filter2DRequest* request[numRuns*3]; for(int xx=0; xx<numRuns; xx++) { // Make independent requests to Blur Y, U and V planes // Requests will run sequentially if there is a single kernel // Requests will run in parallel is there are two or more kernels request[xx*3+0] = Filter(coeff.data(), y_src.data(), width, height, stride, y_dst.data()); request[xx*3+1] = Filter(coeff.data(), u_src.data(), width, height, stride, u_dst.data()); request[xx*3+2] = Filter(coeff.data(), v_src.data(), width, height, stride, v_dst.data()); // Wait for completion of the outstanding requests request[xx*3+0]->sync(); request[xx*3+1]->sync(); request[xx*3+2]->sync(); } auto fpga_end = std::chrono::high_resolution_clock::now(); // --------------------------------------------------------------------------------- // Format output and write image out // --------------------------------------------------------------------------------- // Convert processed image back to CV Image Raw2IplImage(y_dst.data(), stride, u_dst.data(), stride, v_dst.data(), stride, dst); // Convert image to cvMat and write it to disk cvConvert( dst, cvCreateMat(height, width, CV_32FC3 ) ); cvSaveImage(dstFileName.c_str(), dst); // --------------------------------------------------------------------------------- // Compute reference results and compare // --------------------------------------------------------------------------------- std::cout << std::endl; std::cout << "Running Software version" << std::endl; // Create output buffers for reference results std::vector<uchar, aligned_allocator<uchar>> y_ref(nbytes); std::vector<uchar, aligned_allocator<uchar>> u_ref(nbytes); std::vector<uchar, aligned_allocator<uchar>> v_ref(nbytes); auto cpu_begin = std::chrono::high_resolution_clock::now(); #pragma omp parallel for for(int xx=0; xx<numRuns; xx++) { // Compute reference results Filter2D(filterCoeffs[coeffs], y_src.data(), width, height, stride, y_ref.data()); Filter2D(filterCoeffs[coeffs], u_src.data(), width, height, stride, u_ref.data()); Filter2D(filterCoeffs[coeffs], v_src.data(), width, height, stride, v_ref.data()); } auto cpu_end = std::chrono::high_resolution_clock::now(); std::string refFileName = inputImage.substr(0, inputImage.size()-4)+"_ref.bmp"; Raw2IplImage(y_ref.data(), stride, u_ref.data(), stride, v_ref.data(), stride, dst); cvConvert( dst, cvCreateMat(height, width, CV_32FC3 ) ); cvSaveImage(refFileName.c_str(), dst); // Compare results bool diff = false; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { if ( y_dst[y*stride+x] != y_ref[y*stride+x] ) diff = true; if ( u_dst[y*stride+x] != u_ref[y*stride+x] ) diff = true; if ( v_dst[y*stride+x] != v_ref[y*stride+x] ) diff = true; } } std::cout << std::endl; std::cout << "*******************************************************" << std::endl; if(diff) { std::cout << "MATCH FAILED: Output has mismatches with reference" << std::endl; } else { std::cout << "MATCH PASS: Output matches reference" << std::endl; } std::cout << "*******************************************************" << std::endl; // Report performance (if not running in emulation mode) if (getenv("XCL_EMULATION_MODE") == NULL) { std::chrono::duration<double> fpga_duration = fpga_end - fpga_begin; std::cout << "FPGA Time: " << fpga_duration.count() << " s" << std::endl; std::cout << "FPGA Throughput: " << (double) numRuns*3*nbytes / fpga_duration.count() / (1024.0*1024.0) << " MB/s" << std::endl; std::chrono::duration<double> cpu_duration = cpu_end - cpu_begin; std::cout << "CPU Time: " << cpu_duration.count() << " s" << std::endl; std::cout << "CPU Throughput: " << (double) numRuns*3*nbytes / cpu_duration.count() / (1024.0*1024.0) << " MB/s" << std::endl; std::cout << "FPGA Speedup: " << cpu_duration.count() / fpga_duration.count() << " x" << std::endl; } // Release allocated memory cvReleaseImage(&src); cvReleaseImage(&dst); clReleaseProgram(program); clReleaseContext(context); clReleaseDevice(device); return (diff?1:0); } static void IplImage2Raw(IplImage* img, uchar* y_buf, int stride_y, uchar* u_buf, int stride_u, uchar* v_buf, int stride_v) { // Assumes RGB or YUV 4:4:4 for (int y = 0; y < img->height; y++) { for (int x = 0; x < img->width; x++) { CvScalar cv_pix = cvGet2D(img, y, x); y_buf[y*stride_y+x] = (uchar)cv_pix.val[0]; u_buf[y*stride_u+x] = (uchar)cv_pix.val[1]; v_buf[y*stride_v+x] = (uchar)cv_pix.val[2]; } } } static void Raw2IplImage(uchar* y_buf, int stride_y, uchar* u_buf, int stride_u, uchar* v_buf, int stride_v, IplImage* img ) { // Assumes RGB or YUV 4:4:4 for (int y = 0; y < img->height; y++) { for (int x = 0; x < img->width; x++) { CvScalar cv_pix; cv_pix.val[0] = y_buf[y*stride_y+x]; cv_pix.val[1] = u_buf[y*stride_u+x]; cv_pix.val[2] = v_buf[y*stride_v+x]; cvSet2D(img, y, x, cv_pix); } } }
{ "pile_set_name": "Github" }
{ "notes": "", "support": { "Android Browser": "y 4.4", "Baidu Browser": "y 7.12", "Blackberry Browser": "n 10", "Chrome": "y 29", "Chrome for Android": "y 84", "Edge": "y 12", "Firefox": "y 16", "Firefox for Android": "y 68", "IE": "a 9", "IE Mobile": "a 10", "KaiOS Browser": "y 2.5", "Opera": "y 12.1", "Opera Mini": "a all", "Opera Mobile": "y 12.1", "QQ Browser": "y 10.4", "Safari": "n TP", "Samsung Internet": "y 4", "UC Browser for Android": "y 12.12", "iOS Safari": "n 14.0" }, "url": "https://www.w3.org/TR/css3-mediaqueries/#resolution" }
{ "pile_set_name": "Github" }
# Add project specific ProGuard rules here. # By default, the flags in this file are appended to flags specified # in /home/homer/programming-environment/sdk/tools/proguard/proguard-android.txt # You can edit the include path and order by changing the proguardFiles # directive in build.gradle. # # For more details, see # http://developer.android.com/guide/developing/tools/proguard.html # Add any project specific keep options here: # If your project uses WebView with JS, uncomment the following # and specify the fully qualified class name to the JavaScript interface # class: #-keepclassmembers class fqcn.of.javascript.interface.for.webview { # public *; #}
{ "pile_set_name": "Github" }
package d.d.meshenger; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.content.res.Configuration; import android.net.Uri; import android.support.v4.content.LocalBroadcastManager; import android.support.v4.util.ObjectsCompat; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.support.v7.app.AppCompatDelegate; import android.text.method.LinkMovementMethod; import android.util.Log; import android.view.View; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.RadioButton; import android.widget.RadioGroup; import android.widget.TextView; import org.w3c.dom.Text; import java.util.HashMap; import java.util.Locale; public class SettingsActivity extends MeshengerActivity { String nick; SharedPreferences prefs; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_settings); setTitle(getResources().getString(R.string.menu_settings)); prefs = getSharedPreferences(getPackageName(), MODE_PRIVATE); nick = prefs.getString("username", "undefined"); findViewById(R.id.changeNickLayout).setOnClickListener((v) -> changeNick()); CheckBox ignoreCB = findViewById(R.id.checkBoxIgnoreUnsaved); ignoreCB.setChecked(prefs.getBoolean("ignoreUnsaved", false)); ignoreCB.setOnCheckedChangeListener((compoundButton, b) -> { prefs.edit().putBoolean("ignoreUnsaved", b).apply(); syncSettings("ignoreUnsaved", b); }); CheckBox nightMode = findViewById(R.id.checkBoxNightMode); nightMode.setChecked(AppCompatDelegate.getDefaultNightMode() == AppCompatDelegate.MODE_NIGHT_YES); nightMode.setOnCheckedChangeListener((compoundButton, b) -> { AppCompatDelegate.setDefaultNightMode(compoundButton.isChecked() ? AppCompatDelegate.MODE_NIGHT_YES : AppCompatDelegate.MODE_NIGHT_NO); // TODO sync settings //syncSettings("ignoreUnsaved", b); }); } private void getLocale(){ Configuration config = getResources().getConfiguration(); Locale locale = config.locale; ((TextView) findViewById(R.id.localeTv)).setText(locale.getDisplayLanguage()); Locale[] locales = new Locale[]{Locale.ENGLISH, Locale.GERMAN}; findViewById(R.id.changeLocaleLayout).setOnClickListener((v) -> { RadioGroup group = new RadioGroup(this); AlertDialog.Builder builder = new AlertDialog.Builder(this); for(int i = 0; i < locales.length; i++){ Locale l = locales[i]; RadioButton button = new RadioButton(this); button.setId(i); button.setText(l.getDisplayLanguage()); if(l.getISO3Language().equals(locale.getISO3Language())) button.setChecked(true); group.addView(button); } builder.setView(group); AlertDialog dialog = builder.show(); group.setOnCheckedChangeListener((a, position) -> { Log.d("Settings", "changed locale to " + locales[position].getLanguage()); Configuration config1 = new Configuration(); config1.locale = locales[position]; getResources().updateConfiguration(config1, getResources().getDisplayMetrics()); finish(); startActivity(new Intent(getApplicationContext(), this.getClass())); dialog.dismiss(); }); }); } private void changeNick(){ EditText et = new EditText(this); et.setText(nick); et.setSelection(nick.length()); new AlertDialog.Builder(this) .setTitle(getResources().getString(R.string.settings_change_nick)) .setView(et) .setPositiveButton("ok", (dialogInterface, i) -> { nick = et.getText().toString(); prefs.edit().putString("username", nick).apply(); syncSettings("username", nick); initViews(); }) .setNegativeButton(getResources().getText(R.string.cancel), null) .show(); } private void syncSettings(String what, boolean content){ Intent intent = new Intent("settings_changed"); intent.putExtra("subject", what); intent.putExtra(what, content); LocalBroadcastManager.getInstance(this).sendBroadcast(intent); } private void syncSettings(String what, String content){ Intent intent = new Intent("settings_changed"); intent.putExtra("subject", what); intent.putExtra(what, content); LocalBroadcastManager.getInstance(this).sendBroadcast(intent); } @Override protected void onResume() { super.onResume(); initViews(); } private void initViews(){ ((TextView) findViewById(R.id.nickTv)).setText(nick); getLocale(); } }
{ "pile_set_name": "Github" }
package edu.harvard.iq.dataverse.confirmemail; /** * * @author bsilverstein */ public class ConfirmEmailInitResponse { private boolean userFound; private String confirmUrl; private ConfirmEmailData confirmEmailData; public ConfirmEmailInitResponse(boolean userFound) { this.userFound = userFound; } public ConfirmEmailInitResponse(boolean userFound, ConfirmEmailData confirmEmailData, String confirmUrl) { this.userFound = userFound; this.confirmEmailData = confirmEmailData; this.confirmUrl = confirmUrl; } public boolean isUserFound() { return userFound; } public String getConfirmUrl() { return confirmUrl; } public ConfirmEmailData getConfirmEmailData() { return confirmEmailData; } }
{ "pile_set_name": "Github" }
# frozen_string_literal: true require 'rails_helper' require "#{Rails.root}/lib/data_cycle/constant_update" describe ConstantUpdate do describe 'on initialization' do before do create(:course, start: '2015-03-20', end: '2015-03-31', needs_update: true, flags: { salesforce_id: 'a0f1a9063a1Wyad' }) end it 'calls lots of update routines' do expect(AssignmentUpdater).to receive(:update_assignment_article_ids_and_titles) expect(PlagiabotImporter).to receive(:find_recent_plagiarism) expect(StudentGreetingChecker).to receive(:check_all_ungreeted_students) expect(ArticlesForDeletionMonitor).to receive(:create_alerts_for_course_articles) expect(DiscretionarySanctionsMonitor).to receive(:create_alerts_for_course_articles) expect(HighQualityArticleMonitor).to receive(:create_alerts_for_course_articles) expect(DYKNominationMonitor).to receive(:create_alerts_for_course_articles) expect(GANominationMonitor).to receive(:create_alerts_for_course_articles) expect(BlockedUserMonitor).to receive(:create_alerts_for_recently_blocked_users) expect_any_instance_of(CourseAlertManager).to receive(:create_no_students_alerts) expect_any_instance_of(CourseAlertManager).to receive(:create_untrained_students_alerts) expect_any_instance_of(CourseAlertManager).to receive(:create_productive_course_alerts) expect_any_instance_of(CourseAlertManager).to receive(:create_active_course_alerts) expect_any_instance_of(CourseAlertManager).to receive(:create_deleted_uploads_alerts) expect_any_instance_of(CourseAlertManager) .to receive(:create_continued_course_activity_alerts) expect_any_instance_of(SurveyResponseAlertManager).to receive(:create_alerts) expect(UpdateLogger).to receive(:update_settings_record) expect(Raven).to receive(:capture_message).and_call_original update = described_class.new sentry_logs = update.instance_variable_get(:@sentry_logs) expect(sentry_logs.grep(/Generating AfD alerts/).any?).to eq(true) end it 'reports logs to sentry even when it errors out' do allow(Raven).to receive(:capture_message) allow(PlagiabotImporter).to receive(:find_recent_plagiarism) .and_raise(StandardError) expect { described_class.new }.to raise_error(StandardError) expect(Raven).to have_received(:capture_message).with('Constant update failed.', anything) end end end
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.IO; using System.Text; using UnityEngine; namespace DTS_Addon { [KSPAddon(KSPAddon.Startup.EveryScene, false)] public class ConfigTest : MonoBehaviour { Vector2 hc; int h = 0; Rect window = new Rect(100, 100, 600, 600); string find = ""; string tp = ""; string findvalue = ""; string fvalue = ""; string urlvalue = ""; string maxLevel = "3"; void OnGUI() { if (SuperTools.ConfigTest == false) return; window = GUI.Window(104, window, CNodeWindow, "资源"); } static bool testMody = false; bool isOut = false; void CNodeWindow(int id) { GUI.DragWindow(new Rect(0, 0, 580, 30)); GUI.Label(new Rect(5, 20, 100, 20), "最大深度"); maxLevel = GUI.TextField(new Rect(105, 20, 100, 25), maxLevel); GUI.Label(new Rect(5, 60, 100, 20), "检索"); GUI.Label(new Rect(105, 40, 100, 20), "name"); GUI.Label(new Rect(205, 40, 100, 20), "type"); GUI.Label(new Rect(305, 40, 100, 20), "value1"); GUI.Label(new Rect(405, 40, 100, 20), "value2"); GUI.Label(new Rect(505, 40, 100, 20), "url"); find = GUI.TextField(new Rect(105, 60, 100, 25), find); tp = GUI.TextField(new Rect(205, 60, 100, 25), tp); fvalue = GUI.TextField(new Rect(305, 60, 100, 25), fvalue); findvalue = GUI.TextField(new Rect(405, 60, 100, 25), findvalue); urlvalue = GUI.TextField(new Rect(505, 60, 100, 25), urlvalue); h = 0; hc = GUI.BeginScrollView(new Rect(0, 90, 600, 600), hc, new Rect(0, 0, 590, 40000)); //sht("GameDatabase.Instance.root"); sht("name", GameDatabase.Instance.root.name, 0); //sht("path", GameDatabase.Instance.root.path); sht("url", GameDatabase.Instance.root.url, 0); sht("type", GameDatabase.Instance.root.type.ToString(), 0); GUI.Label(new Rect(0, h * 20, 200, 20), " "); h++; //sht("GameDatabase.Instance.root.AllConfigs"); foreach (UrlDir.UrlConfig config in GameDatabase.Instance.root.AllConfigs) { if (config.name.Contains(find) == false) continue; if (config.type.Contains(tp) == false) continue; if (config.url.Contains(urlvalue) == false) continue; sht("name", config.name, 0); sht("type", config.type, 0);//AGENT,PART,PROP,RESOURCE_DEFINITION,EXPERIMENT_DEFINITION,STORY_DEF,INTERNAL, sht("url", config.url, 0); sht("parent.name", config.parent.name, 0); sht("parent.url", config.parent.url, 0); sht("parent.fileType", config.parent.fileType.ToString(), 0); //sht("GameDatabase.Instance.root.AllConfigs.config"); shlabel("values.Count", config.config.values.Count.ToString(), 0); //if (config.url == "Squad/Parts/Command/advSasModule/part/advSasModule" && testMody == false) //{ // testMody = true; // config.config.AddValue("--description", "这个系统使用一组以'相当'高速旋转的圆盘来产生控制飞船必需的扭矩.还包含"); //} foreach (ConfigNode.Value value in config.config.values) { shValue(value, 0); } shlabel("nodes.Count", config.config.nodes.Count.ToString(), 0); showNode(config.config.nodes, 1); shh1(); } GUI.EndScrollView(); isOut = false; } public void showNode(ConfigNode.ConfigNodeList nodes, int level) { foreach (ConfigNode node in nodes) { GUI.Label(new Rect(0 + level * 30, h * 20, 200, 20), "--------------------------"); h++; sht("node.id", node.id, level); sht("node.name", node.name, level); shlabel("values.Count", node.values.Count.ToString(), level); if (level > System.Convert.ToInt32(maxLevel)) return; foreach (ConfigNode.Value value in node.values) { shValue(value, level); } shlabel("nodes.Count", node.nodes.Count.ToString(), level); showNode(node.nodes, level + 1); } } void sht(string name) { if (findvalue != "") return; GUI.Label(new Rect(0, h * 20, 590, 20), name); h++; if (isOut == true) File.AppendAllText("GameData/DTS_zh/Debug.txt", "".PadRight(0) + name + "\r\n"); } void sht(string name, string value, int level) { if (findvalue != "") return; GUI.Label(new Rect(5 + level * 30, h * 20, 140 - level * 30, 20), name); GUI.TextField(new Rect(140 + level * 30, h * 20, 450 - level * 30, 20), value); h++; if (isOut == true) File.AppendAllText("GameData/DTS_zh/Debug.txt", "".PadRight(level * 4) + name + "\t" + value + "\r\n"); } void shlabel(string name, string value, int level) { if (findvalue != "") return; GUI.Label(new Rect(5 + level * 30, h * 20, 140 - level * 30, 20), name); GUI.Label(new Rect(140 + level * 30, h * 20, 450 - level * 30, 20), value); h++; if (isOut == true) File.AppendAllText("GameData/DTS_zh/Debug.txt", "".PadRight(level * 4) + name + "\t" + value + "\r\n"); } void shh1() { if (findvalue != "") return; GUI.Label(new Rect(0, h * 20, 200, 20), " "); h++; GUI.Label(new Rect(0, h * 20, 200, 20), "=========================="); h++; if (isOut == true) File.AppendAllText("GameData/DTS_zh/Debug.txt", "==========================\r\n"); } void shValue(ConfigNode.Value value, int level) { if (value.value.Contains(findvalue) == false) return; if (value.value.Contains(fvalue) == false) return; GUI.Label(new Rect(5 + level * 30, h * 20, 140 - level * 30, 20), value.name); value.value = GUI.TextField(new Rect(140 + level * 30, h * 20, 450 - level * 30, 20), value.value); h++; if (isOut == true) File.AppendAllText("GameData/DTS_zh/Debug.txt", "".PadRight(level * 4) + value.name + "\t" + value.value + "\r\n"); } } }
{ "pile_set_name": "Github" }
// CS0012: The type `A1' is defined in an assembly that is not referenced. Consider adding a reference to assembly `CS0012-lib-missing, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null' // Line: 13 // Compiler options: -r:CS0012-lib.dll using System.Threading.Tasks; class Test { public static void Main () { var b = new B (); var t = Task.Factory.StartNew (() => { b.Test (); b.Test (); }); b.Test (); } }
{ "pile_set_name": "Github" }
import {entity, MultiIndex, t} from '@deepkit/type'; import {ActiveRecord} from '@deepkit/orm'; import {Book} from './book'; import {Tag} from './tag'; @entity.name('book-tag') @MultiIndex(['book', 'tag']) export class BookTag extends ActiveRecord { @t.primary.autoIncrement public id?: number; constructor( @t.type(() => Book).reference() public book: Book, @t.reference() public tag: Tag, ) { super() } }
{ "pile_set_name": "Github" }
# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4 PortSystem 1.0 PortGroup java 1.0 PortGroup github 1.0 github.setup w3c epubcheck 4.2.2 v revision 0 categories textproc platforms darwin license BSD maintainers nomaintainer supported_archs noarch java.version 1.7+ java.fallback openjdk11 description EPUB validator long_description EPUBCheck is a tool to validate the \ conformance of EPUB publications against the \ EPUB specifications. EPUBCheck can be run as a \ standalone command-line tool or used as a Java \ library. checksums sha256 029ee9cda237a42e0d4c1a2b8771a440a4a6326449db843510b16a3c61045463 \ rmd160 cebf60866877013c2a9170605548d139fd16a232 \ size 11990465 # This Portfile downloads the Java bytecode for EPUBCheck from GitHub, # rather than pulling together all the Java bits and pieces necessary # to run EPUBCheck github.tarball_from releases use_zip yes extract.post_args "" # Since we're just dealing with downloaded bytecode, there is no configure # and build use_configure no build {} # Since this is a non-standard installation, # the standard destroot phase is entirely overridden destroot { # path to the epubcheck java files (without ${destroot}): set epubcheck ${prefix}/share/java/epubcheck # install the template epubcheck script from files: xinstall ${filespath}/epubcheck.in ${destroot}${prefix}/bin/epubcheck # set the correct path to epubcheck.jar in the epubcheck script: reinplace "s|@@EPUBCHECK@@|${epubcheck}|g" ${destroot}${prefix}/bin/epubcheck # install the epubcheck java files: xinstall -d ${destroot}${epubcheck}/lib xinstall ${worksrcpath}/${name}.jar ${destroot}${epubcheck}/${name}.jar xinstall {*}[glob ${worksrcpath}/*.txt] ${destroot}${epubcheck} xinstall {*}[glob ${worksrcpath}/lib/*] ${destroot}${epubcheck}/lib }
{ "pile_set_name": "Github" }
using System; using System.Linq; using Revo.Core.Configuration; using Revo.Infrastructure.DataAccess.Migrations; using Revo.Infrastructure.Events.Async; using Revo.Infrastructure.Jobs.InMemory; using Revo.Infrastructure.Sagas; using Revo.Infrastructure.Tenancy; namespace Revo.Infrastructure { public static class InfrastructureConfigurationExtensions { public static IRevoConfiguration ConfigureInfrastructure(this IRevoConfiguration configuration, Action<InfrastructureConfigurationSection> action = null) { configuration.ConfigureCore(); configuration.UseInMemoryJobs(isActive: null); // activate only if not previously disabled var section = configuration.GetSection<InfrastructureConfigurationSection>(); action?.Invoke(section); configuration.ConfigureKernel(c => { if (!c.Kernel.GetBindings(typeof(IAsyncEventPipelineConfiguration)).Any()) { c.Kernel.Bind<IAsyncEventPipelineConfiguration>().ToConstant(section.AsyncEventPipeline); } if (!c.Kernel.GetBindings(typeof(DatabaseMigrationsConfiguration)).Any()) { c.Kernel.Bind<DatabaseMigrationsConfiguration>().ToConstant(section.DatabaseMigrations); } c.LoadModule(new DatabaseMigrationsModule(section.DatabaseMigrations)); c.LoadModule(new TenancyModule(section.Tenancy)); if (section.EnableSagas) { c.LoadModule<SagasModule>(); } }); return configuration; } } }
{ "pile_set_name": "Github" }